Home
last modified time | relevance | path

Searched refs:ar (Results 1 – 183 of 183) sorted by relevance

/linux-4.1.27/drivers/net/wireless/ath/ath10k/
Dcore.c105 static void ath10k_send_suspend_complete(struct ath10k *ar) in ath10k_send_suspend_complete() argument
107 ath10k_dbg(ar, ATH10K_DBG_BOOT, "boot suspend complete\n"); in ath10k_send_suspend_complete()
109 complete(&ar->target_suspend); in ath10k_send_suspend_complete()
112 static int ath10k_init_configure_target(struct ath10k *ar) in ath10k_init_configure_target() argument
118 ret = ath10k_bmi_write32(ar, hi_app_host_interest, in ath10k_init_configure_target()
121 ath10k_err(ar, "settings HTC version failed\n"); in ath10k_init_configure_target()
126 ret = ath10k_bmi_read32(ar, hi_option_flag, &param_host); in ath10k_init_configure_target()
128 ath10k_err(ar, "setting firmware mode (1/2) failed\n"); in ath10k_init_configure_target()
145 ret = ath10k_bmi_write32(ar, hi_option_flag, param_host); in ath10k_init_configure_target()
147 ath10k_err(ar, "setting firmware mode (2/2) failed\n"); in ath10k_init_configure_target()
[all …]
Dwmi-ops.h25 void (*rx)(struct ath10k *ar, struct sk_buff *skb);
28 int (*pull_scan)(struct ath10k *ar, struct sk_buff *skb,
30 int (*pull_mgmt_rx)(struct ath10k *ar, struct sk_buff *skb,
32 int (*pull_ch_info)(struct ath10k *ar, struct sk_buff *skb,
34 int (*pull_vdev_start)(struct ath10k *ar, struct sk_buff *skb,
36 int (*pull_peer_kick)(struct ath10k *ar, struct sk_buff *skb,
38 int (*pull_swba)(struct ath10k *ar, struct sk_buff *skb,
40 int (*pull_phyerr)(struct ath10k *ar, struct sk_buff *skb,
42 int (*pull_svc_rdy)(struct ath10k *ar, struct sk_buff *skb,
44 int (*pull_rdy)(struct ath10k *ar, struct sk_buff *skb,
[all …]
Dhif.h34 int (*tx_completion)(struct ath10k *ar,
36 int (*rx_completion)(struct ath10k *ar,
42 int (*tx_sg)(struct ath10k *ar, u8 pipe_id,
46 int (*diag_read)(struct ath10k *ar, u32 address, void *buf,
49 int (*diag_write)(struct ath10k *ar, u32 address, const void *data,
56 int (*exchange_bmi_msg)(struct ath10k *ar,
61 int (*start)(struct ath10k *ar);
65 void (*stop)(struct ath10k *ar);
67 int (*map_service_to_pipe)(struct ath10k *ar, u16 service_id,
71 void (*get_default_pipe)(struct ath10k *ar, u8 *ul_pipe, u8 *dl_pipe);
[all …]
Dtestmode.c41 bool ath10k_tm_event_wmi(struct ath10k *ar, u32 cmd_id, struct sk_buff *skb) in ath10k_tm_event_wmi() argument
47 ath10k_dbg(ar, ATH10K_DBG_TESTMODE, in ath10k_tm_event_wmi()
51 ath10k_dbg_dump(ar, ATH10K_DBG_TESTMODE, NULL, "", skb->data, skb->len); in ath10k_tm_event_wmi()
53 spin_lock_bh(&ar->data_lock); in ath10k_tm_event_wmi()
55 if (!ar->testmode.utf_monitor) { in ath10k_tm_event_wmi()
66 nl_skb = cfg80211_testmode_alloc_event_skb(ar->hw->wiphy, in ath10k_tm_event_wmi()
70 ath10k_warn(ar, in ath10k_tm_event_wmi()
77 ath10k_warn(ar, in ath10k_tm_event_wmi()
86 ath10k_warn(ar, in ath10k_tm_event_wmi()
95 ath10k_warn(ar, in ath10k_tm_event_wmi()
[all …]
Dmac.c42 struct ath10k *ar = arvif->ar; in ath10k_send_key() local
51 lockdep_assert_held(&arvif->ar->conf_mutex); in ath10k_send_key()
83 ath10k_warn(ar, "cipher %d is not supported\n", key->cipher); in ath10k_send_key()
92 return ath10k_wmi_vdev_install_key(arvif->ar, &arg); in ath10k_send_key()
100 struct ath10k *ar = arvif->ar; in ath10k_install_key() local
103 lockdep_assert_held(&ar->conf_mutex); in ath10k_install_key()
105 reinit_completion(&ar->install_key_done); in ath10k_install_key()
111 ret = wait_for_completion_timeout(&ar->install_key_done, 3*HZ); in ath10k_install_key()
121 struct ath10k *ar = arvif->ar; in ath10k_install_peer_wep_keys() local
127 lockdep_assert_held(&ar->conf_mutex); in ath10k_install_peer_wep_keys()
[all …]
Dpci.c82 static void ath10k_pci_buffer_cleanup(struct ath10k *ar);
83 static int ath10k_pci_cold_reset(struct ath10k *ar);
84 static int ath10k_pci_warm_reset(struct ath10k *ar);
85 static int ath10k_pci_wait_for_target_init(struct ath10k *ar);
86 static int ath10k_pci_init_irq(struct ath10k *ar);
87 static int ath10k_pci_deinit_irq(struct ath10k *ar);
88 static int ath10k_pci_request_irq(struct ath10k *ar);
89 static void ath10k_pci_free_irq(struct ath10k *ar);
333 static bool ath10k_pci_irq_pending(struct ath10k *ar) in ath10k_pci_irq_pending() argument
338 cause = ath10k_pci_read32(ar, SOC_CORE_BASE_ADDRESS + in ath10k_pci_irq_pending()
[all …]
Dthermal.c26 static int ath10k_thermal_get_active_vifs(struct ath10k *ar, in ath10k_thermal_get_active_vifs() argument
32 lockdep_assert_held(&ar->conf_mutex); in ath10k_thermal_get_active_vifs()
34 list_for_each_entry(arvif, &ar->arvifs, list) { in ath10k_thermal_get_active_vifs()
60 struct ath10k *ar = cdev->devdata; in ath10k_thermal_get_cur_dutycycle() local
62 mutex_lock(&ar->conf_mutex); in ath10k_thermal_get_cur_dutycycle()
63 *state = ar->thermal.duty_cycle; in ath10k_thermal_get_cur_dutycycle()
64 mutex_unlock(&ar->conf_mutex); in ath10k_thermal_get_cur_dutycycle()
72 struct ath10k *ar = cdev->devdata; in ath10k_thermal_set_cur_dutycycle() local
76 mutex_lock(&ar->conf_mutex); in ath10k_thermal_set_cur_dutycycle()
77 if (ar->state != ATH10K_STATE_ON) { in ath10k_thermal_set_cur_dutycycle()
[all …]
Dce.c62 static inline void ath10k_ce_dest_ring_write_index_set(struct ath10k *ar, in ath10k_ce_dest_ring_write_index_set() argument
66 ath10k_pci_write32(ar, ce_ctrl_addr + DST_WR_INDEX_ADDRESS, n); in ath10k_ce_dest_ring_write_index_set()
69 static inline u32 ath10k_ce_dest_ring_write_index_get(struct ath10k *ar, in ath10k_ce_dest_ring_write_index_get() argument
72 return ath10k_pci_read32(ar, ce_ctrl_addr + DST_WR_INDEX_ADDRESS); in ath10k_ce_dest_ring_write_index_get()
75 static inline void ath10k_ce_src_ring_write_index_set(struct ath10k *ar, in ath10k_ce_src_ring_write_index_set() argument
79 ath10k_pci_write32(ar, ce_ctrl_addr + SR_WR_INDEX_ADDRESS, n); in ath10k_ce_src_ring_write_index_set()
82 static inline u32 ath10k_ce_src_ring_write_index_get(struct ath10k *ar, in ath10k_ce_src_ring_write_index_get() argument
85 return ath10k_pci_read32(ar, ce_ctrl_addr + SR_WR_INDEX_ADDRESS); in ath10k_ce_src_ring_write_index_get()
88 static inline u32 ath10k_ce_src_ring_read_index_get(struct ath10k *ar, in ath10k_ce_src_ring_read_index_get() argument
91 return ath10k_pci_read32(ar, ce_ctrl_addr + CURRENT_SRRI_ADDRESS); in ath10k_ce_src_ring_read_index_get()
[all …]
Dtxrx.c24 static void ath10k_report_offchan_tx(struct ath10k *ar, struct sk_buff *skb) in ath10k_report_offchan_tx() argument
33 spin_lock_bh(&ar->data_lock); in ath10k_report_offchan_tx()
34 if (ar->offchan_tx_skb != skb) { in ath10k_report_offchan_tx()
35 ath10k_warn(ar, "completed old offchannel frame\n"); in ath10k_report_offchan_tx()
39 complete(&ar->offchan_tx_completed); in ath10k_report_offchan_tx()
40 ar->offchan_tx_skb = NULL; /* just for sanity */ in ath10k_report_offchan_tx()
42 ath10k_dbg(ar, ATH10K_DBG_HTT, "completed offchannel skb %p\n", skb); in ath10k_report_offchan_tx()
44 spin_unlock_bh(&ar->data_lock); in ath10k_report_offchan_tx()
50 struct ath10k *ar = htt->ar; in ath10k_txrx_tx_unref() local
51 struct device *dev = ar->dev; in ath10k_txrx_tx_unref()
[all …]
Ddebug.c110 void ath10k_info(struct ath10k *ar, const char *fmt, ...) in ath10k_info() argument
119 dev_info(ar->dev, "%pV", &vaf); in ath10k_info()
120 trace_ath10k_log_info(ar, &vaf); in ath10k_info()
125 void ath10k_print_driver_info(struct ath10k *ar) in ath10k_print_driver_info() argument
127 ath10k_info(ar, "%s (0x%08x, 0x%08x) fw %s api %d htt %d.%d wmi %d cal %s max_sta %d\n", in ath10k_print_driver_info()
128 ar->hw_params.name, in ath10k_print_driver_info()
129 ar->target_version, in ath10k_print_driver_info()
130 ar->chip_id, in ath10k_print_driver_info()
131 ar->hw->wiphy->fw_version, in ath10k_print_driver_info()
132 ar->fw_api, in ath10k_print_driver_info()
[all …]
Dtrace.h52 TP_PROTO(struct ath10k *ar, struct va_format *vaf),
53 TP_ARGS(ar, vaf),
55 __string(device, dev_name(ar->dev))
56 __string(driver, dev_driver_string(ar->dev))
60 __assign_str(device, dev_name(ar->dev));
61 __assign_str(driver, dev_driver_string(ar->dev));
76 TP_PROTO(struct ath10k *ar, struct va_format *vaf),
77 TP_ARGS(ar, vaf)
81 TP_PROTO(struct ath10k *ar, struct va_format *vaf),
82 TP_ARGS(ar, vaf)
[all …]
Dbmi.c23 void ath10k_bmi_start(struct ath10k *ar) in ath10k_bmi_start() argument
25 ath10k_dbg(ar, ATH10K_DBG_BMI, "bmi start\n"); in ath10k_bmi_start()
27 ar->bmi.done_sent = false; in ath10k_bmi_start()
30 int ath10k_bmi_done(struct ath10k *ar) in ath10k_bmi_done() argument
36 ath10k_dbg(ar, ATH10K_DBG_BMI, "bmi done\n"); in ath10k_bmi_done()
38 if (ar->bmi.done_sent) { in ath10k_bmi_done()
39 ath10k_dbg(ar, ATH10K_DBG_BMI, "bmi skipped\n"); in ath10k_bmi_done()
43 ar->bmi.done_sent = true; in ath10k_bmi_done()
46 ret = ath10k_hif_exchange_bmi_msg(ar, &cmd, cmdlen, NULL, NULL); in ath10k_bmi_done()
48 ath10k_warn(ar, "unable to write to the device: %d\n", ret); in ath10k_bmi_done()
[all …]
Dspectral.c22 static void send_fft_sample(struct ath10k *ar, in send_fft_sample() argument
27 if (!ar->spectral.rfs_chan_spec_scan) in send_fft_sample()
32 relay_write(ar->spectral.rfs_chan_spec_scan, fft_sample_tlv, length); in send_fft_sample()
59 int ath10k_spectral_process_fft(struct ath10k *ar, in ath10k_spectral_process_fft() argument
151 send_fft_sample(ar, &fft_sample->tlv); in ath10k_spectral_process_fft()
156 static struct ath10k_vif *ath10k_get_spectral_vdev(struct ath10k *ar) in ath10k_get_spectral_vdev() argument
160 lockdep_assert_held(&ar->conf_mutex); in ath10k_get_spectral_vdev()
162 if (list_empty(&ar->arvifs)) in ath10k_get_spectral_vdev()
166 list_for_each_entry(arvif, &ar->arvifs, list) in ath10k_get_spectral_vdev()
171 return list_first_entry(&ar->arvifs, typeof(*arvif), list); in ath10k_get_spectral_vdev()
[all …]
Ddebug.h59 __printf(2, 3) void ath10k_info(struct ath10k *ar, const char *fmt, ...);
60 __printf(2, 3) void ath10k_err(struct ath10k *ar, const char *fmt, ...);
61 __printf(2, 3) void ath10k_warn(struct ath10k *ar, const char *fmt, ...);
62 void ath10k_print_driver_info(struct ath10k *ar);
65 int ath10k_debug_start(struct ath10k *ar);
66 void ath10k_debug_stop(struct ath10k *ar);
67 int ath10k_debug_create(struct ath10k *ar);
68 void ath10k_debug_destroy(struct ath10k *ar);
69 int ath10k_debug_register(struct ath10k *ar);
70 void ath10k_debug_unregister(struct ath10k *ar);
[all …]
Dwmi.c885 int ath10k_wmi_wait_for_service_ready(struct ath10k *ar) in ath10k_wmi_wait_for_service_ready() argument
889 ret = wait_for_completion_timeout(&ar->wmi.service_ready, in ath10k_wmi_wait_for_service_ready()
894 int ath10k_wmi_wait_for_unified_ready(struct ath10k *ar) in ath10k_wmi_wait_for_unified_ready() argument
898 ret = wait_for_completion_timeout(&ar->wmi.unified_ready, in ath10k_wmi_wait_for_unified_ready()
903 struct sk_buff *ath10k_wmi_alloc_skb(struct ath10k *ar, u32 len) in ath10k_wmi_alloc_skb() argument
908 skb = ath10k_htc_alloc_skb(ar, WMI_SKB_HEADROOM + round_len); in ath10k_wmi_alloc_skb()
914 ath10k_warn(ar, "Unaligned WMI skb\n"); in ath10k_wmi_alloc_skb()
922 static void ath10k_wmi_htc_tx_complete(struct ath10k *ar, struct sk_buff *skb) in ath10k_wmi_htc_tx_complete() argument
927 int ath10k_wmi_cmd_send_nowait(struct ath10k *ar, struct sk_buff *skb, in ath10k_wmi_cmd_send_nowait() argument
944 ret = ath10k_htc_send(&ar->htc, ar->wmi.eid, skb); in ath10k_wmi_cmd_send_nowait()
[all …]
Dhtc.c33 ath10k_hif_send_complete_check(ep->htc->ar, ep->ul_pipe_id, force); in ath10k_htc_send_complete_check()
36 static void ath10k_htc_control_tx_complete(struct ath10k *ar, in ath10k_htc_control_tx_complete() argument
42 static struct sk_buff *ath10k_htc_build_tx_ctrl_skb(void *ar) in ath10k_htc_build_tx_ctrl_skb() argument
57 ath10k_dbg(ar, ATH10K_DBG_HTC, "%s: skb %p\n", __func__, skb); in ath10k_htc_build_tx_ctrl_skb()
66 dma_unmap_single(htc->ar->dev, skb_cb->paddr, skb->len, DMA_TO_DEVICE); in ath10k_htc_restore_tx_skb()
73 struct ath10k *ar = ep->htc->ar; in ath10k_htc_notify_tx_completion() local
75 ath10k_dbg(ar, ATH10K_DBG_HTC, "%s: ep %d skb %p\n", __func__, in ath10k_htc_notify_tx_completion()
81 ath10k_warn(ar, "no tx handler for eid %d\n", ep->eid); in ath10k_htc_notify_tx_completion()
86 ep->ep_ops.ep_tx_complete(ep->htc->ar, skb); in ath10k_htc_notify_tx_completion()
92 struct ath10k *ar = ep->htc->ar; in ath10k_htc_ep_need_credit_update() local
[all …]
Ddebugfs_sta.c27 struct ath10k *ar = arsta->arvif->ar; in ath10k_dbg_sta_read_aggr_mode() local
31 mutex_lock(&ar->conf_mutex); in ath10k_dbg_sta_read_aggr_mode()
35 mutex_unlock(&ar->conf_mutex); in ath10k_dbg_sta_read_aggr_mode()
46 struct ath10k *ar = arsta->arvif->ar; in ath10k_dbg_sta_write_aggr_mode() local
56 mutex_lock(&ar->conf_mutex); in ath10k_dbg_sta_write_aggr_mode()
57 if ((ar->state != ATH10K_STATE_ON) || in ath10k_dbg_sta_write_aggr_mode()
63 ret = ath10k_wmi_addba_clear_resp(ar, arsta->arvif->vdev_id, sta->addr); in ath10k_dbg_sta_write_aggr_mode()
65 ath10k_warn(ar, "failed to clear addba session ret: %d\n", ret); in ath10k_dbg_sta_write_aggr_mode()
71 mutex_unlock(&ar->conf_mutex); in ath10k_dbg_sta_write_aggr_mode()
89 struct ath10k *ar = arsta->arvif->ar; in ath10k_dbg_sta_write_addba() local
[all …]
Dhtt_rx.c38 ath10k_htt_rx_find_skb_paddr(struct ath10k *ar, u32 paddr) in ath10k_htt_rx_find_skb_paddr() argument
42 hash_for_each_possible(ar->htt.rx_ring.skb_table, rxcb, hlist, paddr) in ath10k_htt_rx_find_skb_paddr()
60 dma_unmap_single(htt->ar->dev, rxcb->paddr, in ath10k_htt_rx_ring_free()
73 dma_unmap_single(htt->ar->dev, rxcb->paddr, in ath10k_htt_rx_ring_free()
118 paddr = dma_map_single(htt->ar->dev, skb->data, in __ath10k_htt_rx_ring_fill_n()
122 if (unlikely(dma_mapping_error(htt->ar->dev, paddr))) { in __ath10k_htt_rx_ring_fill_n()
207 int ath10k_htt_rx_ring_refill(struct ath10k *ar) in ath10k_htt_rx_ring_refill() argument
209 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_rx_ring_refill()
235 dma_free_coherent(htt->ar->dev, in ath10k_htt_rx_free()
241 dma_free_coherent(htt->ar->dev, in ath10k_htt_rx_free()
[all …]
Dwmi-tlv.c68 ath10k_wmi_tlv_iter(struct ath10k *ar, const void *ptr, size_t len, in ath10k_wmi_tlv_iter() argument
69 int (*iter)(struct ath10k *ar, u16 tag, u16 len, in ath10k_wmi_tlv_iter() argument
80 ath10k_dbg(ar, ATH10K_DBG_WMI, in ath10k_wmi_tlv_iter()
93 ath10k_dbg(ar, ATH10K_DBG_WMI, in ath10k_wmi_tlv_iter()
102 ath10k_dbg(ar, ATH10K_DBG_WMI, in ath10k_wmi_tlv_iter()
109 ret = iter(ar, tlv_tag, tlv_len, ptr, data); in ath10k_wmi_tlv_iter()
120 static int ath10k_wmi_tlv_iter_parse(struct ath10k *ar, u16 tag, u16 len, in ath10k_wmi_tlv_iter_parse() argument
131 static int ath10k_wmi_tlv_parse(struct ath10k *ar, const void **tb, in ath10k_wmi_tlv_parse() argument
134 return ath10k_wmi_tlv_iter(ar, ptr, len, ath10k_wmi_tlv_iter_parse, in ath10k_wmi_tlv_parse()
139 ath10k_wmi_tlv_parse_alloc(struct ath10k *ar, const void *ptr, in ath10k_wmi_tlv_parse_alloc() argument
[all …]
Dpci.h163 struct ath10k *ar; member
190 static inline struct ath10k_pci *ath10k_pci_priv(struct ath10k *ar) in ath10k_pci_priv() argument
192 return (struct ath10k_pci *)ar->drv_priv; in ath10k_pci_priv()
211 #define TARG_CPU_SPACE_TO_CE_SPACE(ar, pci_addr, addr) \ argument
230 static inline void ath10k_pci_write32(struct ath10k *ar, u32 offset, in ath10k_pci_write32() argument
233 struct ath10k_pci *ar_pci = ath10k_pci_priv(ar); in ath10k_pci_write32()
238 static inline u32 ath10k_pci_read32(struct ath10k *ar, u32 offset) in ath10k_pci_read32() argument
240 struct ath10k_pci *ar_pci = ath10k_pci_priv(ar); in ath10k_pci_read32()
245 static inline u32 ath10k_pci_soc_read32(struct ath10k *ar, u32 addr) in ath10k_pci_soc_read32() argument
247 return ath10k_pci_read32(ar, RTC_SOC_BASE_ADDRESS + addr); in ath10k_pci_soc_read32()
[all …]
Dhtt_tx.c29 ieee80211_wake_queues(htt->ar->hw); in __ath10k_htt_tx_dec_pending()
52 ieee80211_stop_queues(htt->ar->hw); in ath10k_htt_tx_inc_pending()
61 struct ath10k *ar = htt->ar; in ath10k_htt_tx_alloc_msdu_id() local
68 ath10k_dbg(ar, ATH10K_DBG_HTT, "htt tx alloc msdu_id %d\n", ret); in ath10k_htt_tx_alloc_msdu_id()
75 struct ath10k *ar = htt->ar; in ath10k_htt_tx_free_msdu_id() local
79 ath10k_dbg(ar, ATH10K_DBG_HTT, "htt tx free msdu_id %hu\n", msdu_id); in ath10k_htt_tx_free_msdu_id()
86 struct ath10k *ar = htt->ar; in ath10k_htt_tx_alloc() local
88 ath10k_dbg(ar, ATH10K_DBG_BOOT, "htt tx max num pending tx %d\n", in ath10k_htt_tx_alloc()
94 htt->tx_pool = dma_pool_create("ath10k htt tx pool", htt->ar->dev, in ath10k_htt_tx_alloc()
106 struct ath10k *ar = ctx; in ath10k_htt_tx_clean_up_pending() local
[all …]
Dmac.h27 struct ath10k *ar; member
32 void ath10k_mac_destroy(struct ath10k *ar);
33 int ath10k_mac_register(struct ath10k *ar);
34 void ath10k_mac_unregister(struct ath10k *ar);
35 struct ath10k_vif *ath10k_get_arvif(struct ath10k *ar, u32 vdev_id);
36 void __ath10k_scan_finish(struct ath10k *ar);
37 void ath10k_scan_finish(struct ath10k *ar);
39 void ath10k_offchan_tx_purge(struct ath10k *ar);
41 void ath10k_mgmt_over_wmi_tx_purge(struct ath10k *ar);
43 void ath10k_halt(struct ath10k *ar);
[all …]
Dhtt.c40 status = ath10k_htc_connect_service(&htt->ar->htc, &conn_req, in ath10k_htt_connect()
51 int ath10k_htt_init(struct ath10k *ar) in ath10k_htt_init() argument
53 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_init()
55 htt->ar = ar; in ath10k_htt_init()
76 struct ath10k *ar = htt->ar; in ath10k_htt_verify_version() local
78 ath10k_dbg(ar, ATH10K_DBG_BOOT, "htt target version %d.%d\n", in ath10k_htt_verify_version()
83 ath10k_err(ar, "unsupported htt major version %d. supported versions are 2 and 3\n", in ath10k_htt_verify_version()
93 struct ath10k *ar = htt->ar; in ath10k_htt_setup() local
105 ath10k_warn(ar, "htt version request timed out\n"); in ath10k_htt_setup()
Dspectral.h49 int ath10k_spectral_process_fft(struct ath10k *ar,
53 int ath10k_spectral_start(struct ath10k *ar);
55 int ath10k_spectral_create(struct ath10k *ar);
56 void ath10k_spectral_destroy(struct ath10k *ar);
61 ath10k_spectral_process_fft(struct ath10k *ar, in ath10k_spectral_process_fft() argument
69 static inline int ath10k_spectral_start(struct ath10k *ar) in ath10k_spectral_start() argument
79 static inline int ath10k_spectral_create(struct ath10k *ar) in ath10k_spectral_create() argument
84 static inline void ath10k_spectral_destroy(struct ath10k *ar) in ath10k_spectral_destroy() argument
Dbmi.h186 void ath10k_bmi_start(struct ath10k *ar);
187 int ath10k_bmi_done(struct ath10k *ar);
188 int ath10k_bmi_get_target_info(struct ath10k *ar,
190 int ath10k_bmi_read_memory(struct ath10k *ar, u32 address,
192 int ath10k_bmi_write_memory(struct ath10k *ar, u32 address,
195 #define ath10k_bmi_read32(ar, item, val) \ argument
202 ret = ath10k_bmi_read_memory(ar, addr, (u8 *)&tmp, 4); \
208 #define ath10k_bmi_write32(ar, item, val) \ argument
215 ret = ath10k_bmi_write_memory(ar, address, \
220 int ath10k_bmi_execute(struct ath10k *ar, u32 address, u32 param, u32 *result);
[all …]
Dhw.h150 #define QCA_REV_988X(ar) ((ar)->hw_rev == ATH10K_HW_QCA988X) argument
151 #define QCA_REV_6174(ar) ((ar)->hw_rev == ATH10K_HW_QCA6174) argument
297 #define RTC_STATE_COLD_RESET_MASK ar->regs->rtc_state_cold_reset_mask
306 #define RTC_SOC_BASE_ADDRESS ar->regs->rtc_soc_base_address
307 #define RTC_WMAC_BASE_ADDRESS ar->regs->rtc_wmac_base_address
311 #define SOC_CORE_BASE_ADDRESS ar->regs->soc_core_base_address
320 #define CE_WRAPPER_BASE_ADDRESS ar->regs->ce_wrapper_base_address
321 #define CE0_BASE_ADDRESS ar->regs->ce0_base_address
322 #define CE1_BASE_ADDRESS ar->regs->ce1_base_address
323 #define CE2_BASE_ADDRESS ar->regs->ce2_base_address
[all …]
Dthermal.h39 int ath10k_thermal_register(struct ath10k *ar);
40 void ath10k_thermal_unregister(struct ath10k *ar);
41 void ath10k_thermal_event_temperature(struct ath10k *ar, int temperature);
43 static inline int ath10k_thermal_register(struct ath10k *ar) in ath10k_thermal_register() argument
48 static inline void ath10k_thermal_unregister(struct ath10k *ar) in ath10k_thermal_unregister() argument
52 static inline void ath10k_thermal_event_temperature(struct ath10k *ar, in ath10k_thermal_event_temperature() argument
Dce.h110 struct ath10k *ar; member
203 int ath10k_ce_init_pipe(struct ath10k *ar, unsigned int ce_id,
205 void ath10k_ce_deinit_pipe(struct ath10k *ar, unsigned int ce_id);
206 int ath10k_ce_alloc_pipe(struct ath10k *ar, int ce_id,
210 void ath10k_ce_free_pipe(struct ath10k *ar, int ce_id);
241 void ath10k_ce_per_engine_service_any(struct ath10k *ar);
242 void ath10k_ce_per_engine_service(struct ath10k *ar, unsigned int ce_id);
243 int ath10k_ce_disable_interrupts(struct ath10k *ar);
244 void ath10k_ce_enable_interrupts(struct ath10k *ar);
397 static inline u32 ath10k_ce_base_address(struct ath10k *ar, unsigned int ce_id) in ath10k_ce_base_address() argument
[all …]
Dtestmode.h21 void ath10k_testmode_destroy(struct ath10k *ar);
23 bool ath10k_tm_event_wmi(struct ath10k *ar, u32 cmd_id, struct sk_buff *skb);
29 static inline void ath10k_testmode_destroy(struct ath10k *ar) in ath10k_testmode_destroy() argument
33 static inline bool ath10k_tm_event_wmi(struct ath10k *ar, u32 cmd_id, in ath10k_tm_event_wmi() argument
Dtxrx.h25 struct ath10k_peer *ath10k_peer_find(struct ath10k *ar, int vdev_id,
27 struct ath10k_peer *ath10k_peer_find_by_id(struct ath10k *ar, int peer_id);
28 int ath10k_wait_for_peer_created(struct ath10k *ar, int vdev_id,
30 int ath10k_wait_for_peer_deleted(struct ath10k *ar, int vdev_id,
Dwmi.h4870 int ath10k_wmi_attach(struct ath10k *ar);
4871 void ath10k_wmi_detach(struct ath10k *ar);
4872 int ath10k_wmi_wait_for_service_ready(struct ath10k *ar);
4873 int ath10k_wmi_wait_for_unified_ready(struct ath10k *ar);
4875 struct sk_buff *ath10k_wmi_alloc_skb(struct ath10k *ar, u32 len);
4876 int ath10k_wmi_connect(struct ath10k *ar);
4878 struct sk_buff *ath10k_wmi_alloc_skb(struct ath10k *ar, u32 len);
4879 int ath10k_wmi_cmd_send(struct ath10k *ar, struct sk_buff *skb, u32 cmd_id);
4880 int ath10k_wmi_cmd_send_nowait(struct ath10k *ar, struct sk_buff *skb,
4882 void ath10k_wmi_start_scan_init(struct ath10k *ar, struct wmi_start_scan_arg *);
[all …]
Dcore.h305 struct ath10k *ar; member
698 void ath10k_core_destroy(struct ath10k *ar);
700 int ath10k_core_start(struct ath10k *ar, enum ath10k_firmware_mode mode);
701 int ath10k_wait_for_suspend(struct ath10k *ar, u32 suspend_opt);
702 void ath10k_core_stop(struct ath10k *ar);
703 int ath10k_core_register(struct ath10k *ar, u32 chip_id);
704 void ath10k_core_unregister(struct ath10k *ar);
Dhtc.h272 void (*target_send_suspend_complete)(struct ath10k *ar);
331 struct ath10k *ar; member
349 int ath10k_htc_init(struct ath10k *ar);
357 struct sk_buff *ath10k_htc_alloc_skb(struct ath10k *ar, int size);
Dhtt.h1245 struct ath10k *ar; member
1406 int ath10k_htt_init(struct ath10k *ar);
1413 int ath10k_htt_rx_ring_refill(struct ath10k *ar);
1416 void ath10k_htt_htc_tx_complete(struct ath10k *ar, struct sk_buff *skb);
1417 void ath10k_htt_t2h_msg_handler(struct ath10k *ar, struct sk_buff *skb);
Dwmi-tlv.h1457 void ath10k_wmi_tlv_attach(struct ath10k *ar);
/linux-4.1.27/drivers/net/wireless/ath/ath6kl/
Dcore.c52 void ath6kl_core_tx_complete(struct ath6kl *ar, struct sk_buff *skb) in ath6kl_core_tx_complete() argument
54 ath6kl_htc_tx_complete(ar, skb); in ath6kl_core_tx_complete()
58 void ath6kl_core_rx_complete(struct ath6kl *ar, struct sk_buff *skb, u8 pipe) in ath6kl_core_rx_complete() argument
60 ath6kl_htc_rx_complete(ar, skb, pipe); in ath6kl_core_rx_complete()
64 int ath6kl_core_init(struct ath6kl *ar, enum ath6kl_htc_type htc_type) in ath6kl_core_init() argument
72 ath6kl_htc_mbox_attach(ar); in ath6kl_core_init()
75 ath6kl_htc_pipe_attach(ar); in ath6kl_core_init()
82 ar->ath6kl_wq = create_singlethread_workqueue("ath6kl"); in ath6kl_core_init()
83 if (!ar->ath6kl_wq) in ath6kl_core_init()
86 ret = ath6kl_bmi_init(ar); in ath6kl_core_init()
[all …]
Drecovery.c23 struct ath6kl *ar = container_of(work, struct ath6kl, in ath6kl_recovery_work() local
26 ar->state = ATH6KL_STATE_RECOVERY; in ath6kl_recovery_work()
28 del_timer_sync(&ar->fw_recovery.hb_timer); in ath6kl_recovery_work()
30 ath6kl_init_hw_restart(ar); in ath6kl_recovery_work()
32 ar->state = ATH6KL_STATE_ON; in ath6kl_recovery_work()
33 clear_bit(WMI_CTRL_EP_FULL, &ar->flag); in ath6kl_recovery_work()
35 ar->fw_recovery.err_reason = 0; in ath6kl_recovery_work()
37 if (ar->fw_recovery.hb_poll) in ath6kl_recovery_work()
38 mod_timer(&ar->fw_recovery.hb_timer, jiffies + in ath6kl_recovery_work()
39 msecs_to_jiffies(ar->fw_recovery.hb_poll)); in ath6kl_recovery_work()
[all …]
Dinit.c250 static int ath6kl_set_host_app_area(struct ath6kl *ar) in ath6kl_set_host_app_area() argument
257 address = ath6kl_get_hi_item_addr(ar, HI_ITEM(hi_app_host_interest)); in ath6kl_set_host_app_area()
258 address = TARG_VTOP(ar->target_type, address); in ath6kl_set_host_app_area()
260 if (ath6kl_diag_read32(ar, address, &data)) in ath6kl_set_host_app_area()
263 address = TARG_VTOP(ar->target_type, data); in ath6kl_set_host_app_area()
265 if (ath6kl_diag_write(ar, address, (u8 *) &host_app_area, in ath6kl_set_host_app_area()
272 static inline void set_ac2_ep_map(struct ath6kl *ar, in set_ac2_ep_map() argument
276 ar->ac2ep_map[ac] = ep; in set_ac2_ep_map()
277 ar->ep2ac_map[ep] = ac; in set_ac2_ep_map()
281 static int ath6kl_connectservice(struct ath6kl *ar, in ath6kl_connectservice() argument
[all …]
Dhif-ops.h24 static inline int hif_read_write_sync(struct ath6kl *ar, u32 addr, u8 *buf, in hif_read_write_sync() argument
32 return ar->hif_ops->read_write_sync(ar, addr, buf, len, request); in hif_read_write_sync()
35 static inline int hif_write_async(struct ath6kl *ar, u32 address, u8 *buffer, in hif_write_async() argument
43 return ar->hif_ops->write_async(ar, address, buffer, length, in hif_write_async()
46 static inline void ath6kl_hif_irq_enable(struct ath6kl *ar) in ath6kl_hif_irq_enable() argument
50 return ar->hif_ops->irq_enable(ar); in ath6kl_hif_irq_enable()
53 static inline void ath6kl_hif_irq_disable(struct ath6kl *ar) in ath6kl_hif_irq_disable() argument
57 return ar->hif_ops->irq_disable(ar); in ath6kl_hif_irq_disable()
60 static inline struct hif_scatter_req *hif_scatter_req_get(struct ath6kl *ar) in hif_scatter_req_get() argument
62 return ar->hif_ops->scatter_req_get(ar); in hif_scatter_req_get()
[all …]
Dbmi.c23 int ath6kl_bmi_done(struct ath6kl *ar) in ath6kl_bmi_done() argument
28 if (ar->bmi.done_sent) { in ath6kl_bmi_done()
33 ar->bmi.done_sent = true; in ath6kl_bmi_done()
35 ret = ath6kl_hif_bmi_write(ar, (u8 *)&cid, sizeof(cid)); in ath6kl_bmi_done()
44 int ath6kl_bmi_get_target_info(struct ath6kl *ar, in ath6kl_bmi_get_target_info() argument
50 if (ar->bmi.done_sent) { in ath6kl_bmi_get_target_info()
55 ret = ath6kl_hif_bmi_write(ar, (u8 *)&cid, sizeof(cid)); in ath6kl_bmi_get_target_info()
61 if (ar->hif_type == ATH6KL_HIF_TYPE_USB) { in ath6kl_bmi_get_target_info()
62 ret = ath6kl_hif_bmi_read(ar, (u8 *)targ_info, in ath6kl_bmi_get_target_info()
65 ret = ath6kl_hif_bmi_read(ar, (u8 *)&targ_info->version, in ath6kl_bmi_get_target_info()
[all …]
Dmain.c28 struct ath6kl *ar = vif->ar; in ath6kl_find_sta() local
38 if (memcmp(node_addr, ar->sta_list[i].mac, ETH_ALEN) == 0) { in ath6kl_find_sta()
39 conn = &ar->sta_list[i]; in ath6kl_find_sta()
47 struct ath6kl_sta *ath6kl_find_sta_by_aid(struct ath6kl *ar, u8 aid) in ath6kl_find_sta_by_aid() argument
53 if (ar->sta_list[ctr].aid == aid) { in ath6kl_find_sta_by_aid()
54 conn = &ar->sta_list[ctr]; in ath6kl_find_sta_by_aid()
65 struct ath6kl *ar = vif->ar; in ath6kl_add_new_sta() local
71 sta = &ar->sta_list[free_slot]; in ath6kl_add_new_sta()
81 ar->sta_list_index = ar->sta_list_index | (1 << free_slot); in ath6kl_add_new_sta()
82 ar->ap_stats.sta[free_slot].aid = cpu_to_le32(aid); in ath6kl_add_new_sta()
[all …]
Dcfg80211.c148 struct ath6kl *ar = vif->ar; in __ath6kl_cfg80211_sscan_stop() local
155 if (ar->state == ATH6KL_STATE_RECOVERY) in __ath6kl_cfg80211_sscan_stop()
158 ath6kl_wmi_enable_sched_scan_cmd(ar->wmi, vif->fw_vif_idx, false); in __ath6kl_cfg80211_sscan_stop()
165 struct ath6kl *ar = vif->ar; in ath6kl_cfg80211_sscan_disable() local
173 cfg80211_sched_scan_stopped(ar->wiphy); in ath6kl_cfg80211_sscan_disable()
287 struct ath6kl *ar = vif->ar; in ath6kl_cfg80211_ready() local
289 if (!test_bit(WMI_READY, &ar->flag)) { in ath6kl_cfg80211_ready()
325 struct ath6kl *ar = vif->ar; in ath6kl_set_assoc_req_ies() local
335 ar->connect_ctrl_flags &= ~CONNECT_WPS_FLAG; in ath6kl_set_assoc_req_ies()
356 ar->connect_ctrl_flags |= CONNECT_WPS_FLAG; in ath6kl_set_assoc_req_ies()
[all …]
Dtxrx.c45 struct ath6kl *ar = ath6kl_priv(dev); in ath6kl_ibss_map_epid() local
57 for (i = 0; i < ar->node_num; i++) { in ath6kl_ibss_map_epid()
58 if (memcmp(eth_hdr->h_dest, ar->node_map[i].mac_addr, in ath6kl_ibss_map_epid()
61 ar->node_map[i].tx_pend++; in ath6kl_ibss_map_epid()
62 return ar->node_map[i].ep_id; in ath6kl_ibss_map_epid()
65 if ((ep_map == -1) && !ar->node_map[i].tx_pend) in ath6kl_ibss_map_epid()
70 ep_map = ar->node_num; in ath6kl_ibss_map_epid()
71 ar->node_num++; in ath6kl_ibss_map_epid()
72 if (ar->node_num > MAX_NODE_NUM) in ath6kl_ibss_map_epid()
76 memcpy(ar->node_map[ep_map].mac_addr, eth_hdr->h_dest, ETH_ALEN); in ath6kl_ibss_map_epid()
[all …]
Dhtc-ops.h23 static inline void *ath6kl_htc_create(struct ath6kl *ar) in ath6kl_htc_create() argument
25 return ar->htc_ops->create(ar); in ath6kl_htc_create()
30 return target->dev->ar->htc_ops->wait_target(target); in ath6kl_htc_wait_target()
35 return target->dev->ar->htc_ops->start(target); in ath6kl_htc_start()
42 return target->dev->ar->htc_ops->conn_service(target, req, resp); in ath6kl_htc_conn_service()
48 return target->dev->ar->htc_ops->tx(target, packet); in ath6kl_htc_tx()
53 return target->dev->ar->htc_ops->stop(target); in ath6kl_htc_stop()
58 return target->dev->ar->htc_ops->cleanup(target); in ath6kl_htc_cleanup()
65 return target->dev->ar->htc_ops->flush_txep(target, endpoint, tag); in ath6kl_htc_flush_txep()
70 return target->dev->ar->htc_ops->flush_rx_buf(target); in ath6kl_htc_flush_rx_buf()
[all …]
Dbmi.h226 #define ath6kl_bmi_write_hi32(ar, item, val) \ argument
231 addr = ath6kl_get_hi_item_addr(ar, HI_ITEM(item)); \
233 ath6kl_bmi_write(ar, addr, (u8 *) &v, sizeof(v)); \
236 #define ath6kl_bmi_read_hi32(ar, item, val) \ argument
243 addr = ath6kl_get_hi_item_addr(ar, HI_ITEM(item)); \
244 ret = ath6kl_bmi_read(ar, addr, (u8 *) &tmp, 4); \
250 int ath6kl_bmi_init(struct ath6kl *ar);
251 void ath6kl_bmi_cleanup(struct ath6kl *ar);
252 void ath6kl_bmi_reset(struct ath6kl *ar);
254 int ath6kl_bmi_done(struct ath6kl *ar);
[all …]
Ddebug.c191 if (dev->ar->mbox_info.gmbox_addr != 0) { in ath6kl_dump_registers()
264 void ath6kl_debug_war(struct ath6kl *ar, enum ath6kl_war war) in ath6kl_debug_war() argument
268 ar->debug.war_stats.invalid_rate++; in ath6kl_debug_war()
276 struct ath6kl *ar = file->private_data; in read_file_war_stats() local
291 "Invalid rates", ar->debug.war_stats.invalid_rate); in read_file_war_stats()
309 void ath6kl_debug_fwlog_event(struct ath6kl *ar, const void *buf, size_t len) in ath6kl_debug_fwlog_event() argument
332 spin_lock(&ar->debug.fwlog_queue.lock); in ath6kl_debug_fwlog_event()
334 __skb_queue_tail(&ar->debug.fwlog_queue, skb); in ath6kl_debug_fwlog_event()
335 complete(&ar->debug.fwlog_completion); in ath6kl_debug_fwlog_event()
338 while (skb_queue_len(&ar->debug.fwlog_queue) > in ath6kl_debug_fwlog_event()
[all …]
Dsdio.c45 struct ath6kl *ar; member
78 static inline struct ath6kl_sdio *ath6kl_sdio_priv(struct ath6kl *ar) in ath6kl_sdio_priv() argument
80 return ar->hif_priv; in ath6kl_sdio_priv()
94 static void ath6kl_sdio_set_mbox_info(struct ath6kl *ar) in ath6kl_sdio_set_mbox_info() argument
96 struct ath6kl_mbox_info *mbox_info = &ar->mbox_info; in ath6kl_sdio_set_mbox_info()
337 scat_req->complete(ar_sdio->ar->htc_target, scat_req); in ath6kl_sdio_scat_rw()
401 hif_scatter_req_add(ar_sdio->ar, s_req); in ath6kl_sdio_alloc_prep_scat_req()
407 static int ath6kl_sdio_read_write_sync(struct ath6kl *ar, u32 addr, u8 *buf, in ath6kl_sdio_read_write_sync() argument
410 struct ath6kl_sdio *ar_sdio = ath6kl_sdio_priv(ar); in ath6kl_sdio_read_write_sync()
451 status = ath6kl_sdio_read_write_sync(ar_sdio->ar, req->address, in __ath6kl_sdio_write_async()
[all …]
Dhif.h231 struct ath6kl *ar; member
235 int (*read_write_sync)(struct ath6kl *ar, u32 addr, u8 *buf,
237 int (*write_async)(struct ath6kl *ar, u32 address, u8 *buffer,
240 void (*irq_enable)(struct ath6kl *ar);
241 void (*irq_disable)(struct ath6kl *ar);
243 struct hif_scatter_req *(*scatter_req_get)(struct ath6kl *ar);
244 void (*scatter_req_add)(struct ath6kl *ar,
246 int (*enable_scatter)(struct ath6kl *ar);
247 int (*scat_req_rw) (struct ath6kl *ar,
249 void (*cleanup_scatter)(struct ath6kl *ar);
[all …]
Ddebug.h73 void ath6kl_debug_fwlog_event(struct ath6kl *ar, const void *buf, size_t len);
74 void ath6kl_debug_war(struct ath6kl *ar, enum ath6kl_war war);
75 int ath6kl_debug_roam_tbl_event(struct ath6kl *ar, const void *buf,
77 void ath6kl_debug_set_keepalive(struct ath6kl *ar, u8 keepalive);
78 void ath6kl_debug_set_disconnect_timeout(struct ath6kl *ar, u8 timeout);
79 void ath6kl_debug_init(struct ath6kl *ar);
80 int ath6kl_debug_init_fs(struct ath6kl *ar);
81 void ath6kl_debug_cleanup(struct ath6kl *ar);
105 static inline void ath6kl_debug_fwlog_event(struct ath6kl *ar, in ath6kl_debug_fwlog_event() argument
110 static inline void ath6kl_debug_war(struct ath6kl *ar, enum ath6kl_war war) in ath6kl_debug_war() argument
[all …]
Dhif.c70 static void ath6kl_hif_dump_fw_crash(struct ath6kl *ar) in ath6kl_hif_dump_fw_crash() argument
76 if (ar->target_type != TARGET_TYPE_AR6003) in ath6kl_hif_dump_fw_crash()
80 address = ath6kl_get_hi_item_addr(ar, HI_ITEM(hi_failure_state)); in ath6kl_hif_dump_fw_crash()
81 address = TARG_VTOP(ar->target_type, address); in ath6kl_hif_dump_fw_crash()
84 ret = ath6kl_diag_read32(ar, address, &regdump_addr); in ath6kl_hif_dump_fw_crash()
94 regdump_addr = TARG_VTOP(ar->target_type, regdump_addr); in ath6kl_hif_dump_fw_crash()
97 ret = ath6kl_diag_read(ar, regdump_addr, (u8 *)&regdump_val[0], in ath6kl_hif_dump_fw_crash()
105 ath6kl_info("hw 0x%x fw %s\n", ar->wiphy->hw_version, in ath6kl_hif_dump_fw_crash()
106 ar->wiphy->fw_version); in ath6kl_hif_dump_fw_crash()
131 ret = hif_read_write_sync(dev->ar, COUNT_DEC_ADDRESS, in ath6kl_hif_proc_dbg_intr()
[all …]
Dusb.c73 struct ath6kl *ar; member
81 struct ath6kl *ar; member
168 static inline struct ath6kl_usb *ath6kl_usb_priv(struct ath6kl *ar) in ath6kl_usb_priv() argument
170 return ar->hif_priv; in ath6kl_usb_priv()
586 ath6kl_core_tx_complete(ar_usb->ar, skb); in ath6kl_usb_io_comp_work()
590 ath6kl_core_rx_complete(ar_usb->ar, skb, in ath6kl_usb_io_comp_work()
668 ath6kl_stop_txrx(ar_usb->ar); in ath6kl_usb_device_detached()
672 ath6kl_core_cleanup(ar_usb->ar); in ath6kl_usb_device_detached()
677 static void hif_start(struct ath6kl *ar) in hif_start() argument
679 struct ath6kl_usb *device = ath6kl_usb_priv(ar); in hif_start()
[all …]
Dcore.h603 struct ath6kl *ar; member
876 return ((struct ath6kl_vif *) netdev_priv(dev))->ar; in ath6kl_priv()
879 static inline u32 ath6kl_get_hi_item_addr(struct ath6kl *ar, in ath6kl_get_hi_item_addr() argument
884 if (ar->target_type == TARGET_TYPE_AR6003) in ath6kl_get_hi_item_addr()
886 else if (ar->target_type == TARGET_TYPE_AR6004) in ath6kl_get_hi_item_addr()
892 int ath6kl_configure_target(struct ath6kl *ar);
896 void ath6kl_cookie_init(struct ath6kl *ar);
897 void ath6kl_cookie_cleanup(struct ath6kl *ar);
903 void ath6kl_stop_txrx(struct ath6kl *ar);
904 void ath6kl_cleanup_amsdu_rxbufs(struct ath6kl *ar);
[all …]
Dcfg80211.h27 struct wireless_dev *ath6kl_interface_add(struct ath6kl *ar, const char *name,
49 int ath6kl_cfg80211_suspend(struct ath6kl *ar,
53 int ath6kl_cfg80211_resume(struct ath6kl *ar);
58 void ath6kl_cfg80211_stop_all(struct ath6kl *ar);
60 int ath6kl_cfg80211_init(struct ath6kl *ar);
61 void ath6kl_cfg80211_cleanup(struct ath6kl *ar);
64 void ath6kl_cfg80211_destroy(struct ath6kl *ar);
Dtestmode.c46 void ath6kl_tm_rx_event(struct ath6kl *ar, void *buf, size_t buf_len) in ath6kl_tm_rx_event() argument
53 skb = cfg80211_testmode_alloc_event_skb(ar->wiphy, buf_len, GFP_KERNEL); in ath6kl_tm_rx_event()
72 struct ath6kl *ar = wiphy_priv(wiphy); in ath6kl_tm_cmd() local
93 ath6kl_wmi_test_cmd(ar->wmi, buf, buf_len); in ath6kl_tm_cmd()
Dtestmode.h22 void ath6kl_tm_rx_event(struct ath6kl *ar, void *buf, size_t buf_len);
28 static inline void ath6kl_tm_rx_event(struct ath6kl *ar, void *buf, in ath6kl_tm_rx_event() argument
Dhtc_pipe.c255 status = ath6kl_hif_pipe_send(target->dev->ar, in htc_issue_packets()
307 struct ath6kl *ar = target->dev->ar; in htc_try_send() local
413 ath6kl_hif_pipe_get_free_queue_number(ar, in htc_try_send()
488 ath6kl_hif_pipe_get_free_queue_number(ar, pipeid); in htc_try_send()
743 static int ath6kl_htc_pipe_tx_complete(struct ath6kl *ar, struct sk_buff *skb) in ath6kl_htc_pipe_tx_complete() argument
745 struct htc_target *target = ar->htc_target; in ath6kl_htc_pipe_tx_complete()
952 static int ath6kl_htc_pipe_rx_complete(struct ath6kl *ar, struct sk_buff *skb, in ath6kl_htc_pipe_rx_complete() argument
955 struct htc_target *target = ar->htc_target; in ath6kl_htc_pipe_rx_complete()
1232 struct ath6kl *ar = target->dev->ar; in ath6kl_htc_pipe_conn_service() local
1391 status = ath6kl_hif_pipe_map_service(ar, ep->svc_id, in ath6kl_htc_pipe_conn_service()
[all …]
Dhtc.h549 void* (*create)(struct ath6kl *ar);
570 int (*tx_complete)(struct ath6kl *ar, struct sk_buff *skb);
571 int (*rx_complete)(struct ath6kl *ar, struct sk_buff *skb, u8 pipe);
674 void ath6kl_htc_pipe_attach(struct ath6kl *ar);
675 void ath6kl_htc_mbox_attach(struct ath6kl *ar);
Dhtc_mbox.c501 hif_scatter_req_add(target->dev->ar, scat_req); in htc_async_tx_scat_complete()
524 target->dev->ar->mbox_info.htc_addr, in ath6kl_htc_tx_issue()
528 status = hif_read_write_sync(target->dev->ar, in ath6kl_htc_tx_issue()
529 target->dev->ar->mbox_info.htc_addr, in ath6kl_htc_tx_issue()
536 status = hif_write_async(target->dev->ar, in ath6kl_htc_tx_issue()
537 target->dev->ar->mbox_info.htc_addr, in ath6kl_htc_tx_issue()
769 ac = target->dev->ar->ep2ac_map[endpoint->eid]; in ath6kl_htc_tx_bundle()
780 scat_req = hif_scatter_req_get(target->dev->ar); in ath6kl_htc_tx_bundle()
825 hif_scatter_req_add(target->dev->ar, scat_req); in ath6kl_htc_tx_bundle()
886 ac = target->dev->ar->ep2ac_map[endpoint->eid]; in ath6kl_htc_tx_from_queue()
[all …]
Dwmi.c136 struct ath6kl_vif *ath6kl_get_vif_by_index(struct ath6kl *ar, u8 if_idx) in ath6kl_get_vif_by_index() argument
140 if (WARN_ON(if_idx > (ar->vif_max - 1))) in ath6kl_get_vif_by_index()
144 spin_lock_bh(&ar->list_lock); in ath6kl_get_vif_by_index()
145 list_for_each_entry(vif, &ar->vif_list, list) { in ath6kl_get_vif_by_index()
151 spin_unlock_bh(&ar->list_lock); in ath6kl_get_vif_by_index()
510 struct ath6kl *ar = wmi->parent_dev; in ath6kl_wmi_remain_on_chnl_event_rx() local
521 chan = ieee80211_get_channel(ar->wiphy, freq); in ath6kl_wmi_remain_on_chnl_event_rx()
543 struct ath6kl *ar = wmi->parent_dev; in ath6kl_wmi_cancel_remain_on_chnl_event_rx() local
555 chan = ieee80211_get_channel(ar->wiphy, freq); in ath6kl_wmi_cancel_remain_on_chnl_event_rx()
1085 cfg80211_sched_scan_results(vif->ar->wiphy); in ath6kl_wmi_sscan_timer()
[all …]
Dwmi.h2726 struct ath6kl_vif *ath6kl_get_vif_by_index(struct ath6kl *ar, u8 if_idx);
/linux-4.1.27/drivers/net/wireless/ath/carl9170/
Dusb.c131 static void carl9170_usb_submit_data_urb(struct ar9170 *ar) in carl9170_usb_submit_data_urb() argument
136 if (atomic_inc_return(&ar->tx_anch_urbs) > AR9170_NUM_TX_URBS) in carl9170_usb_submit_data_urb()
139 urb = usb_get_from_anchor(&ar->tx_wait); in carl9170_usb_submit_data_urb()
143 usb_anchor_urb(urb, &ar->tx_anch); in carl9170_usb_submit_data_urb()
148 dev_err(&ar->udev->dev, "tx submit failed (%d)\n", in carl9170_usb_submit_data_urb()
153 usb_anchor_urb(urb, &ar->tx_err); in carl9170_usb_submit_data_urb()
162 atomic_dec(&ar->tx_anch_urbs); in carl9170_usb_submit_data_urb()
167 struct ar9170 *ar = usb_get_intfdata(usb_ifnum_to_if(urb->dev, 0)); in carl9170_usb_tx_data_complete() local
169 if (WARN_ON_ONCE(!ar)) { in carl9170_usb_tx_data_complete()
174 atomic_dec(&ar->tx_anch_urbs); in carl9170_usb_tx_data_complete()
[all …]
Dmain.c184 static void carl9170_ampdu_gc(struct ar9170 *ar) in carl9170_ampdu_gc() argument
190 list_for_each_entry_rcu(tid_info, &ar->tx_ampdu_list, list) { in carl9170_ampdu_gc()
191 spin_lock_bh(&ar->tx_ampdu_list_lock); in carl9170_ampdu_gc()
195 ar->tx_ampdu_list_len--; in carl9170_ampdu_gc()
198 spin_unlock_bh(&ar->tx_ampdu_list_lock); in carl9170_ampdu_gc()
201 rcu_assign_pointer(ar->tx_ampdu_iter, tid_info); in carl9170_ampdu_gc()
212 carl9170_tx_status(ar, skb, false); in carl9170_ampdu_gc()
219 static void carl9170_flush(struct ar9170 *ar, bool drop_queued) in carl9170_flush() argument
229 for (i = 0; i < ar->hw->queues; i++) { in carl9170_flush()
232 while ((skb = skb_dequeue(&ar->tx_pending[i]))) { in carl9170_flush()
[all …]
Dled.c43 int carl9170_led_set_state(struct ar9170 *ar, const u32 led_state) in carl9170_led_set_state() argument
45 return carl9170_write_reg(ar, AR9170_GPIO_REG_PORT_DATA, led_state); in carl9170_led_set_state()
48 int carl9170_led_init(struct ar9170 *ar) in carl9170_led_init() argument
54 err = carl9170_write_reg(ar, AR9170_GPIO_REG_PORT_TYPE, 3); in carl9170_led_init()
59 err = carl9170_led_set_state(ar, 0); in carl9170_led_init()
68 struct ar9170 *ar = container_of(work, struct ar9170, led_work.work); in carl9170_led_update() local
73 if (!IS_ACCEPTING_CMD(ar)) in carl9170_led_update()
76 mutex_lock(&ar->mutex); in carl9170_led_update()
78 if (ar->leds[i].registered) { in carl9170_led_update()
79 if (ar->leds[i].last_state || in carl9170_led_update()
[all …]
Dfw.c33 static const void *carl9170_fw_find_desc(struct ar9170 *ar, const u8 descid[4], in carl9170_fw_find_desc() argument
38 carl9170fw_for_each_hdr(iter, ar->fw.desc) { in carl9170_fw_find_desc()
52 static int carl9170_fw_verify_descs(struct ar9170 *ar, in carl9170_fw_verify_descs() argument
95 static void carl9170_fw_info(struct ar9170 *ar) in carl9170_fw_info() argument
101 dev_info(&ar->udev->dev, "driver API: %s 2%03d-%02d-%02d [%d-%d]\n", in carl9170_fw_info()
106 motd_desc = carl9170_fw_find_desc(ar, MOTD_MAGIC, in carl9170_fw_info()
115 dev_info(&ar->udev->dev, "firmware API: %.*s 2%03d-%02d-%02d\n", in carl9170_fw_info()
121 strlcpy(ar->hw->wiphy->fw_version, motd_desc->release, in carl9170_fw_info()
122 sizeof(ar->hw->wiphy->fw_version)); in carl9170_fw_info()
144 static int carl9170_fw_checksum(struct ar9170 *ar, const __u8 *data, in carl9170_fw_checksum() argument
[all …]
Drx.c49 static void carl9170_dbg_message(struct ar9170 *ar, const char *buf, u32 len) in carl9170_dbg_message() argument
56 ar->fw.err_counter++; in carl9170_dbg_message()
57 if (ar->fw.err_counter > 3) { in carl9170_dbg_message()
64 ar->fw.bug_counter++; in carl9170_dbg_message()
70 wiphy_info(ar->hw->wiphy, "FW: %.*s\n", len, buf); in carl9170_dbg_message()
73 carl9170_restart(ar, reason); in carl9170_dbg_message()
76 static void carl9170_handle_ps(struct ar9170 *ar, struct carl9170_rsp *rsp) in carl9170_handle_ps() argument
84 if (ar->ps.state != new_ps) { in carl9170_handle_ps()
86 ar->ps.sleep_ms = jiffies_to_msecs(jiffies - in carl9170_handle_ps()
87 ar->ps.last_action); in carl9170_handle_ps()
[all …]
Dmac.c44 int carl9170_set_dyn_sifs_ack(struct ar9170 *ar) in carl9170_set_dyn_sifs_ack() argument
48 if (conf_is_ht40(&ar->hw->conf)) in carl9170_set_dyn_sifs_ack()
51 if (ar->hw->conf.chandef.chan->band == IEEE80211_BAND_2GHZ) in carl9170_set_dyn_sifs_ack()
57 return carl9170_write_reg(ar, AR9170_MAC_REG_DYNAMIC_SIFS_ACK, val); in carl9170_set_dyn_sifs_ack()
60 int carl9170_set_rts_cts_rate(struct ar9170 *ar) in carl9170_set_rts_cts_rate() argument
64 if (conf_is_ht(&ar->hw->conf)) { in carl9170_set_rts_cts_rate()
69 if (ar->hw->conf.chandef.chan->band == IEEE80211_BAND_2GHZ) { in carl9170_set_rts_cts_rate()
80 return carl9170_write_reg(ar, AR9170_MAC_REG_RTS_CTS_RATE, in carl9170_set_rts_cts_rate()
84 int carl9170_set_slot_time(struct ar9170 *ar) in carl9170_set_slot_time() argument
90 vif = carl9170_get_main_vif(ar); in carl9170_set_slot_time()
[all …]
Ddebug.c54 char *(*read)(struct ar9170 *ar, char *buf, size_t bufsize,
66 struct ar9170 *ar; in carl9170_debugfs_read() local
74 ar = file->private_data; in carl9170_debugfs_read()
76 if (!ar) in carl9170_debugfs_read()
89 mutex_lock(&ar->mutex); in carl9170_debugfs_read()
90 if (!CHK_DEV_STATE(ar, dfops->req_dev_state)) { in carl9170_debugfs_read()
96 res_buf = dfops->read(ar, buf, dfops->read_bufsize, &ret); in carl9170_debugfs_read()
108 mutex_unlock(&ar->mutex); in carl9170_debugfs_read()
116 struct ar9170 *ar; in carl9170_debugfs_write() local
126 ar = file->private_data; in carl9170_debugfs_write()
[all …]
Dtx.c48 static inline unsigned int __carl9170_get_queue(struct ar9170 *ar, in __carl9170_get_queue() argument
64 static inline unsigned int carl9170_get_queue(struct ar9170 *ar, in carl9170_get_queue() argument
67 return __carl9170_get_queue(ar, skb_get_queue_mapping(skb)); in carl9170_get_queue()
70 static bool is_mem_full(struct ar9170 *ar) in is_mem_full() argument
72 return (DIV_ROUND_UP(IEEE80211_MAX_FRAME_LEN, ar->fw.mem_block_size) > in is_mem_full()
73 atomic_read(&ar->mem_free_blocks)); in is_mem_full()
76 static void carl9170_tx_accounting(struct ar9170 *ar, struct sk_buff *skb) in carl9170_tx_accounting() argument
81 atomic_inc(&ar->tx_total_queued); in carl9170_tx_accounting()
84 spin_lock_bh(&ar->tx_stats_lock); in carl9170_tx_accounting()
92 ar->tx_stats[queue].len++; in carl9170_tx_accounting()
[all …]
Dcmd.c43 int carl9170_write_reg(struct ar9170 *ar, const u32 reg, const u32 val) in carl9170_write_reg() argument
51 err = carl9170_exec_cmd(ar, CARL9170_CMD_WREG, sizeof(buf), in carl9170_write_reg()
55 wiphy_err(ar->hw->wiphy, "writing reg %#x " in carl9170_write_reg()
62 int carl9170_read_mreg(struct ar9170 *ar, const int nregs, in carl9170_read_mreg() argument
76 err = carl9170_exec_cmd(ar, CARL9170_CMD_RREG, in carl9170_read_mreg()
81 wiphy_err(ar->hw->wiphy, "reading regs failed (%d)\n", in carl9170_read_mreg()
94 int carl9170_read_reg(struct ar9170 *ar, u32 reg, u32 *val) in carl9170_read_reg() argument
96 return carl9170_read_mreg(ar, 1, &reg, val); in carl9170_read_reg()
99 int carl9170_echo_test(struct ar9170 *ar, const u32 v) in carl9170_echo_test() argument
104 err = carl9170_exec_cmd(ar, CARL9170_CMD_ECHO, in carl9170_echo_test()
[all …]
Dphy.c44 static int carl9170_init_power_cal(struct ar9170 *ar) in carl9170_init_power_cal() argument
46 carl9170_regwrite_begin(ar); in carl9170_init_power_cal()
432 static int carl9170_init_phy_from_eeprom(struct ar9170 *ar, in carl9170_init_phy_from_eeprom() argument
440 struct ar9170_eeprom_modal *m = &ar->eeprom.modal_header[is_2ghz]; in carl9170_init_phy_from_eeprom()
443 carl9170_regwrite_begin(ar); in carl9170_init_phy_from_eeprom()
536 carl9170_regwrite(AR9170_PHY_REG_RX_CHAINMASK, ar->eeprom.rx_mask); in carl9170_init_phy_from_eeprom()
537 carl9170_regwrite(AR9170_PHY_REG_CAL_CHAINMASK, ar->eeprom.rx_mask); in carl9170_init_phy_from_eeprom()
543 static int carl9170_init_phy(struct ar9170 *ar, enum ieee80211_band band) in carl9170_init_phy() argument
548 bool is_40mhz = conf_is_ht40(&ar->hw->conf); in carl9170_init_phy()
550 carl9170_regwrite_begin(ar); in carl9170_init_phy()
[all …]
Dcarl9170.h198 struct ar9170 *ar; member
495 struct ar9170 *ar; member
504 static inline void __carl9170_set_state(struct ar9170 *ar, in __carl9170_set_state() argument
507 ar->state = newstate; in __carl9170_set_state()
510 static inline void carl9170_set_state(struct ar9170 *ar, in carl9170_set_state() argument
515 spin_lock_irqsave(&ar->state_lock, flags); in carl9170_set_state()
516 __carl9170_set_state(ar, newstate); in carl9170_set_state()
517 spin_unlock_irqrestore(&ar->state_lock, flags); in carl9170_set_state()
520 static inline void carl9170_set_state_when(struct ar9170 *ar, in carl9170_set_state_when() argument
525 spin_lock_irqsave(&ar->state_lock, flags); in carl9170_set_state_when()
[all …]
Dcmd.h45 int carl9170_write_reg(struct ar9170 *ar, const u32 reg, const u32 val);
46 int carl9170_read_reg(struct ar9170 *ar, const u32 reg, u32 *val);
47 int carl9170_read_mreg(struct ar9170 *ar, const int nregs,
49 int carl9170_echo_test(struct ar9170 *ar, u32 v);
50 int carl9170_reboot(struct ar9170 *ar);
51 int carl9170_mac_reset(struct ar9170 *ar);
52 int carl9170_powersave(struct ar9170 *ar, const bool power_on);
53 int carl9170_collect_tally(struct ar9170 *ar);
54 int carl9170_bcn_ctrl(struct ar9170 *ar, const unsigned int vif_id,
57 static inline int carl9170_flush_cab(struct ar9170 *ar, in carl9170_flush_cab() argument
[all …]
Ddebug.h132 void carl9170_debugfs_register(struct ar9170 *ar);
133 void carl9170_debugfs_unregister(struct ar9170 *ar);
/linux-4.1.27/drivers/net/wireless/ath/ar5523/
Dar5523.c45 static int ar5523_submit_rx_cmd(struct ar5523 *ar);
46 static void ar5523_data_tx_pkt_put(struct ar5523 *ar);
48 static void ar5523_read_reply(struct ar5523 *ar, struct ar5523_cmd_hdr *hdr, in ar5523_read_reply() argument
61 ar5523_dbg(ar, "Code = %d len = %d\n", be32_to_cpu(hdr->code) & 0xff, in ar5523_read_reply()
77 ar5523_err(ar, "olen to small %d < %d\n", in ar5523_read_reply()
94 struct ar5523 *ar = urb->context; in ar5523_cmd_rx_cb() local
95 struct ar5523_tx_cmd *cmd = &ar->tx_cmd; in ar5523_cmd_rx_cb()
96 struct ar5523_cmd_hdr *hdr = ar->rx_cmd_buf; in ar5523_cmd_rx_cb()
102 ar5523_err(ar, "RX USB error %d.\n", urb->status); in ar5523_cmd_rx_cb()
107 ar5523_err(ar, "RX USB to short.\n"); in ar5523_cmd_rx_cb()
[all …]
Dar5523.h61 struct ar5523 *ar; member
76 struct ar5523 *ar; member
82 struct ar5523 *ar; member
137 #define ar5523_dbg(ar, format, arg...) \ argument
138 dev_dbg(&(ar)->dev->dev, format, ## arg)
144 #define ar5523_err(ar, format, arg...) \ argument
146 if (!test_bit(AR5523_USB_DISCONNECTED, &ar->flags)) { \
147 dev_err(&(ar)->dev->dev, format, ## arg); \
150 #define ar5523_info(ar, format, arg...) \ argument
151 dev_info(&(ar)->dev->dev, format, ## arg)
/linux-4.1.27/arch/xtensa/include/asm/
Dcacheasm.h34 .macro __loop_cache_all ar at insn size line_width
36 movi \ar, 0
38 __loopi \ar, \at, \size, (4 << (\line_width))
39 \insn \ar, 0 << (\line_width)
40 \insn \ar, 1 << (\line_width)
41 \insn \ar, 2 << (\line_width)
42 \insn \ar, 3 << (\line_width)
43 __endla \ar, \at, 4 << (\line_width)
48 .macro __loop_cache_range ar as at insn line_width
50 extui \at, \ar, 0, \line_width
[all …]
Dasmmacro.h48 .macro __loopi ar, at, size, incr
54 addi \at, \ar, \size
64 .macro __loops ar, as, at, incr_log2, mask_log2, cond, ncond
90 add \at, \ar, \at
92 add \at, \ar, \as
103 .macro __loopt ar, as, at, incr_log2
106 sub \at, \as, \ar
136 .macro __endl ar, as
138 bltu \ar, \as, 98b
147 .macro __endla ar, as, incr
[all …]
/linux-4.1.27/drivers/media/platform/
Darv.c109 struct ar { struct
125 static struct ar ardev; argument
249 static void wait_for_vertical_sync(struct ar *ar, int exp_line) in wait_for_vertical_sync() argument
264 v4l2_err(&ar->v4l2_dev, "lost %d -> %d\n", exp_line, l); in wait_for_vertical_sync()
273 struct ar *ar = video_drvdata(file); in ar_read() local
274 long ret = ar->frame_bytes; /* return read bytes */ in ar_read()
286 if (ar->size == AR_SIZE_QVGA) in ar_read()
288 if (ar->mode == AR_MODE_NORMAL) in ar_read()
291 mutex_lock(&ar->lock); in ar_read()
302 ar_outl(ar->line_buff, M32R_DMA0CDA_PORTL); /* destination addr. */ in ar_read()
[all …]
/linux-4.1.27/net/rxrpc/
DMakefile7 ar-accept.o \
8 ar-ack.o \
9 ar-call.o \
10 ar-connection.o \
11 ar-connevent.o \
12 ar-error.o \
13 ar-input.o \
14 ar-key.o \
15 ar-local.o \
16 ar-output.o \
[all …]
/linux-4.1.27/arch/ia64/lib/
Dxor.S21 .save ar.pfs, r31
22 alloc r31 = ar.pfs, 3, 0, 13, 16
23 .save ar.lc, r30
24 mov r30 = ar.lc
30 mov ar.ec = 6 + 2
37 mov ar.lc = in0
50 mov ar.lc = r30
58 .save ar.pfs, r31
59 alloc r31 = ar.pfs, 4, 0, 20, 24
60 .save ar.lc, r30
[all …]
Dflush.S27 alloc r2=ar.pfs,2,0,0,0
41 .save ar.lc,r3
42 mov r3=ar.lc // save ar.lc
46 mov ar.lc=r8
60 mov ar.lc=r3 // restore ar.lc
79 alloc r2=ar.pfs,2,0,0,0
95 .save ar.lc,r3
96 mov r3=ar.lc // save ar.lc
100 mov ar.lc=r8
115 mov ar.lc=r3 // restore ar.lc
Dcopy_user.S75 .save ar.pfs, saved_pfs
76 alloc saved_pfs=ar.pfs,3,((2*PIPE_DEPTH+7)&~7),0,((2*PIPE_DEPTH+7)&~7)
86 .save ar.lc, saved_lc
87 mov saved_lc=ar.lc // preserve ar.lc (slow)
98 mov ar.ec=PIPE_DEPTH
102 mov ar.lc=len2 // initialize lc for small count
117 mov ar.lc=saved_lc
119 mov ar.pfs=saved_pfs // restore ar.ec
190 mov ar.ec=PIPE_DEPTH
192 mov ar.lc=cnt
[all …]
Dmemcpy.S48 .save ar.pfs, saved_pfs
49 alloc saved_pfs=ar.pfs,3,Nrot,0,Nrot
50 .save ar.lc, saved_lc
51 mov saved_lc=ar.lc
73 mov ar.ec=N
77 mov ar.lc=cnt
106 mov ar.lc=saved_lc
108 mov ar.pfs=saved_pfs
120 mov ar.ec=MEM_LAT
123 mov ar.lc=cnt
[all …]
Dcopy_page.S39 .save ar.pfs, saved_pfs
40 alloc saved_pfs=ar.pfs,3,Nrot-3,0,Nrot
46 .save ar.lc, saved_lc
47 mov saved_lc=ar.lc
48 mov ar.ec=PIPE_DEPTH
63 mov ar.lc=lcount
95 mov ar.pfs=saved_pfs
96 mov ar.lc=saved_lc
Dclear_user.S56 .save ar.pfs, saved_pfs
57 alloc saved_pfs=ar.pfs,2,0,0,0
59 .save ar.lc, saved_lc
60 mov saved_lc=ar.lc // preserve ar.lc (slow)
68 mov ar.lc=tmp // initialize lc for small count
89 mov ar.lc=saved_lc
125 mov ar.lc=tmp
153 mov ar.lc=saved_lc
207 mov ar.lc=saved_lc
Dstrnlen_user.S19 alloc r2=ar.pfs,2,0,0,0
20 .save ar.lc, r16
21 mov r16=ar.lc // preserve ar.lc
27 mov ar.lc=r3
43 mov ar.lc=r16 // restore ar.lc
Dmemcpy_mck.S109 .save ar.pfs, saved_pfs
110 alloc saved_pfs=ar.pfs,3,Nrot-3,0,Nrot
118 .save ar.lc, saved_lc
119 mov saved_lc=ar.lc
125 (p7) mov ar.lc=cnt // prefetch count
126 (p8) mov ar.lc=r0
147 mov ar.lc=cnt // loop setup
149 mov ar.ec=2
168 mov ar.lc=saved_lc
169 mov ar.pfs=saved_pfs
[all …]
Dcopy_page_mck.S102 alloc r8 = ar.pfs, 2, Nrot-2, 0, Nrot
107 .save ar.lc, saved_lc
108 mov saved_lc = ar.lc
115 mov ar.ec = 1 // special unrolled loop
118 mov ar.lc = 2*PREFETCH_DIST - 1
137 mov ar.lc = t1 // with 64KB pages, t1 is too big to fit in 8 bits!
138 mov ar.ec = N // # of stages in pipeline
182 mov ar.lc = saved_lc
Dclear_page.S36 .save ar.lc, saved_lc
37 mov saved_lc = ar.lc
40 mov ar.lc = (PREFETCH_LINES - 1)
50 mov ar.lc = r16 // one L3 line per iteration
74 mov ar.lc = saved_lc // restore lc
Dstrlen_user.S85 .save ar.pfs, saved_pfs
86 alloc saved_pfs=ar.pfs,11,0,0,8
108 mov ar.ec=r0 // clear epilogue counter (saved in ar.pfs)
151 mov ar.pfs=saved_pfs // because of ar.ec, restore no matter what
187 mov ar.pfs=saved_pfs // because of ar.ec, restore no matter what
196 mov ar.pfs=saved_pfs // because of ar.ec, restore no matter what
Ddo_csum.S128 .save ar.pfs, saved_pfs
129 alloc saved_pfs=ar.pfs,2,16,0,16
155 .save ar.lc, saved_lc
156 mov saved_lc=ar.lc // save lc
218 mov ar.ec=PIPE_DEPTH
219 mov ar.lc=count // set lc
284 mov ar.pfs=saved_pfs // restore ar.ec
287 mov ar.lc=saved_lc
Dstrlen.S83 .save ar.pfs, saved_pfs
84 alloc saved_pfs=ar.pfs,11,0,0,8 // rotating must be multiple of 8
106 mov ar.ec=r0 // clear epilogue counter (saved in ar.pfs)
148 mov ar.pfs=saved_pfs // because of ar.ec, restore no matter what
190 mov ar.pfs=saved_pfs // because of ar.ec, restore no matter what
Dmemset.S59 alloc tmp = ar.pfs, 3, 0, 0, 0
61 .save ar.lc, save_lc
62 mov.i save_lc = ar.lc
144 mov.i ar.lc = loopcnt //
154 mov.i ar.lc = tmp //
214 mov.i ar.lc = loopcnt
224 mov.i ar.lc = tmp
262 mov.i ar.lc = loopcnt
316 mov.i ar.lc = save_lc
335 mov.i ar.lc = save_lc
Dip_fast_csum.S80 .save ar.pfs, r35
81 alloc r35=ar.pfs,2,2,2,0
91 mov ar.pfs=r35
Dstrncpy_from_user.S22 alloc r2=ar.pfs,3,0,0,0
/linux-4.1.27/arch/ia64/kernel/
Drelocate_kernel.S23 alloc r31=ar.pfs,4,0,0,0
41 mov ar.rsc=0 // put RSE in enforced lazy mode
46 mov r18=ar.rnat
47 mov ar.bspstore=r8
54 mov ar.rnat=r18
83 mov ar.lc=r20
156 mov ar.lc=r14;;
191 alloc loc0=ar.pfs,1,2,0,0
193 mov ar.rsc=0 // put RSE in enforced lazy mode
205 mov r4=ar.rnat
[all …]
Dpal.S32 alloc r3=ar.pfs,1,0,0,0
57 alloc loc1 = ar.pfs,4,5,0,0
67 mov loc4=ar.rsc // save RSE configuration
69 mov ar.rsc=0 // put RSE in enforced lazy, LE mode
83 mov ar.rsc = loc4 // restore RSE configuration
84 mov ar.pfs = loc1
100 alloc loc1 = ar.pfs,4,4,4,0
119 mov ar.pfs = loc1
147 alloc loc1 = ar.pfs,4,7,0,0
164 mov loc4=ar.rsc // save RSE configuration
[all …]
Dgate.S104 .savesp ar.unat, UNAT_OFF+SIGCONTEXT_OFF; \
105 .savesp ar.fpsr, FPSR_OFF+SIGCONTEXT_OFF; \
108 .savesp ar.pfs, CFM_OFF+SIGCONTEXT_OFF; \
128 mov.m r9=ar.bsp // fetch ar.bsp
129 .spillsp.p p1, ar.rnat, RNAT_OFF+SIGCONTEXT_OFF
132 alloc r8=ar.pfs,0,0,3,0
143 .spillsp ar.bsp, BSP_OFF+SIGCONTEXT_OFF
167 mov r14=ar.bsp
197 mov ar.rsc=0 // put RSE into enforced lazy mode
199 .save ar.rnat, r19
[all …]
Dminstate.h10 (pUStk) mov.m r20=ar.itc;
49 mov r27=ar.rsc; /* M */ \
51 mov r25=ar.unat; /* M */ \
53 mov r26=ar.pfs; /* I */ \
55 mov r21=ar.fpsr; /* M */ \
69 (pUStk) mov ar.rsc=0; /* set enforced lazy mode, pl 0, little-endian, loadrs=0 */ \
71 (pUStk) mov.m r24=ar.rnat; \
77 (pUStk) mov r23=ar.bspstore; /* save ar.bspstore */ \
79 (pUStk) mov ar.bspstore=r22; /* switch to kernel RBS */ \
82 (pUStk) mov r18=ar.bsp; \
[all …]
Dentry.h31 .spillsp ar.pfs, PT(CR_IFS)+16+(off); \
32 .spillsp ar.unat, PT(AR_UNAT)+16+(off); \
33 .spillsp ar.fpsr, PT(AR_FPSR)+16+(off); \
42 .savesp ar.unat,SW(CALLER_UNAT)+16+(off); \
43 .savesp ar.fpsr,SW(AR_FPSR)+16+(off); \
59 .spillsp ar.pfs,SW(AR_PFS)+16+(off); .spillsp ar.lc,SW(AR_LC)+16+(off); \
61 .spillsp ar.rnat,SW(AR_RNAT)+16+(off); \
62 .spillsp ar.bspstore,SW(AR_BSPSTORE)+16+(off); \
Desi_stub.S49 alloc loc1=ar.pfs,2,7,8,0
70 mov loc4=ar.rsc // save RSE configuration
71 mov ar.rsc=0 // put RSE in enforced lazy, LE mode
86 .ret1: mov ar.rsc=0 // put RSE in enforced lazy, LE mode
91 .ret2: mov ar.rsc=loc4 // restore RSE configuration
92 mov ar.pfs=loc1
Defi_stub.S47 alloc loc1=ar.pfs,8,7,7,0
53 mov loc4=ar.rsc // save RSE configuration
54 mov ar.rsc=0 // put RSE in enforced lazy, LE mode
76 .ret1: mov ar.rsc=0 // put RSE in enforced lazy, LE mode
81 .ret2: mov ar.rsc=loc4 // restore RSE configuration
82 mov ar.pfs=loc1
Dentry.S64 alloc loc1=ar.pfs,8,2,3,0
74 mov ar.pfs=loc1 // restore ar.pfs
79 (p6) mov ar.pfs=r0 // clear ar.pfs on success
88 mov ar.unat=0; mov ar.lc=0
112 alloc r16=ar.pfs,8,2,6,0
129 mov ar.pfs=loc1
143 alloc r16=ar.pfs,8,2,6,0
160 mov ar.pfs=loc1
174 alloc r16=ar.pfs,1,0,0,0
248 mov r17=ar.unat // preserve caller's
[all …]
Dmca_asm.S82 mov ar.lc=r20
262 mov ar.rsc=3 // set eager mode for C handler
267 alloc r14=ar.pfs,0,0,3,0
283 alloc r14=ar.pfs,0,0,0,0 // remove the MCA handler frame
371 mov ar.rsc=3 // set eager mode for C handler
376 alloc r14=ar.pfs,0,0,3,0
397 alloc r14=ar.pfs,0,0,0,0 // remove the INIT handler frame
524 mov temp3=ar.csd
525 mov temp4=ar.ssd
530 mov temp3=ar.unat
[all …]
Dmca_drv_asm.S20 alloc r16=ar.pfs,0,2,3,0 // make a new frame
21 mov ar.rsc=0
28 mov ar.bspstore=r22
50 mov ar.pfs=loc0
Dhead.S50 mov ar.lc=IA64_NUM_DBG_REGS-1;; \
58 mov ar.lc=IA64_NUM_DBG_REGS-1;; \
89 mov ar.lc=0x08-1;; \
117 SAVE_FROM_REG(ar.fpsr,_reg1,_reg2);; \
118 SAVE_FROM_REG(ar.pfs,_reg1,_reg2);; \
119 SAVE_FROM_REG(ar.rnat,_reg1,_reg2);; \
120 SAVE_FROM_REG(ar.unat,_reg1,_reg2);; \
121 SAVE_FROM_REG(ar.bspstore,_reg1,_reg2);; \
135 SAVE_FROM_REG(ar.lc, _reg1, _reg2);; \
289 mov ar.fpsr=r2
[all …]
Divt.S72 # define DBG_FAULT(i) mov r16=ar.k2;; shl r16=r16,8;; add r16=(i),r16;;mov ar.k2=r16
545 mov r28=ar.ccv // save ar.ccv
549 mov ar.ccv=r18 // set compare value for cmpxchg
553 (p6) cmpxchg8.acq r26=[r17],r25,ar.ccv // Only update if page is present
572 mov ar.ccv=r28
611 mov r28=ar.ccv // save ar.ccv
615 mov ar.ccv=r18 // set compare value for cmpxchg
619 (p6) cmpxchg8.acq r26=[r17],r25,ar.ccv // Only if page present
638 mov ar.ccv=r28
666 mov r28=ar.ccv // save ar.ccv
[all …]
Djprobes.S73 mov r16=ar.rsc
75 mov ar.rsc=r0
79 mov ar.rsc=r16
Dfsys.S247 (p7) mov ar.ccv = r25 // more than last_cycle. Prep for cmpxchg
249 (p7) cmpxchg8.rel r3 = [r19],r2,ar.ccv
400 mov r27=ar.rsc
401 mov r21=ar.fpsr
402 mov r26=ar.pfs
484 mov ar.rsc=0 // M2 set enforced lazy mode, pl 0, LE, loadrs=0
492 mov r23=ar.bspstore // M2 (12 cyc) save ar.bspstore
493 mov.m r24=ar.rnat // M2 (5 cyc) read ar.rnat (dual-issues!)
496 mov ar.bspstore=r22 // M2 (6 cyc) switch to kernel RBS
499 mov r25=ar.unat // M2 (5 cyc) save ar.unat
[all …]
Dptrace.c864 retval |= __put_user(pt->ar_pfs, &ppr->ar[PT_AUR_PFS]); in ptrace_getregs()
865 retval |= __put_user(pt->ar_rsc, &ppr->ar[PT_AUR_RSC]); in ptrace_getregs()
866 retval |= __put_user(pt->ar_bspstore, &ppr->ar[PT_AUR_BSPSTORE]); in ptrace_getregs()
867 retval |= __put_user(pt->ar_unat, &ppr->ar[PT_AUR_UNAT]); in ptrace_getregs()
868 retval |= __put_user(pt->ar_ccv, &ppr->ar[PT_AUR_CCV]); in ptrace_getregs()
869 retval |= __put_user(pt->ar_fpsr, &ppr->ar[PT_AUR_FPSR]); in ptrace_getregs()
871 retval |= __put_user(ec, &ppr->ar[PT_AUR_EC]); in ptrace_getregs()
872 retval |= __put_user(lc, &ppr->ar[PT_AUR_LC]); in ptrace_getregs()
873 retval |= __put_user(rnat, &ppr->ar[PT_AUR_RNAT]); in ptrace_getregs()
874 retval |= __put_user(bsp, &ppr->ar[PT_AUR_BSP]); in ptrace_getregs()
[all …]
Dparavirt.c405 #define __DEFINE_GET_AR(id, reg) __DEFINE_GET_REG(AR_ ## id, ar.reg)
483 #define __DEFINE_SET_AR(id, reg) __DEFINE_SET_REG(AR_ ## id, ar.reg)
674 IA64_NATIVE_PATCH_DEFINE_REG(ar_ ## name, ar.reg)
Dpalinfo.c737 unsigned long ar:3; in tr_info() member
799 gr_reg->pl, gr_reg->ar, rid_reg->rid, gr_reg->p, gr_reg->ma, in tr_info()
/linux-4.1.27/arch/ia64/include/asm/
Dmca_asm.h87 mov ar.rsc = 0 ; \
90 mov temp2 = ar.bspstore; \
94 mov temp1 = ar.rnat; \
96 mov ar.bspstore = temp2; \
98 mov ar.rnat = temp1; \
170 mov ar.rsc = 0; \
173 mov r13 = ar.k6; \
174 mov temp2 = ar.bspstore; \
178 mov temp1 = ar.rnat; \
180 mov ar.bspstore = temp2; \
[all …]
Dasmmacro.h94 mov r16=ar.pfs; \
99 mov ar.pfs=r16; \
Dkregs.h24 #define _IA64_KR_PREFIX(n) _IA64_KR_PASTE(ar.k, n)
Dsal.h381 ar : 1, member
389 u64 ar[128]; member
Dpal.h451 ar : 1, /* App regs valid */ member
689 #define pmci_proc_app_regs_valid pme_processor.ar
/linux-4.1.27/arch/s390/kvm/
Dpriv.c39 ar_t ar; in handle_set_clock() local
45 op2 = kvm_s390_get_base_disp_s(vcpu, &ar); in handle_set_clock()
48 rc = read_guest(vcpu, op2, ar, &val, sizeof(val)); in handle_set_clock()
72 ar_t ar; in handle_set_prefix() local
79 operand2 = kvm_s390_get_base_disp_s(vcpu, &ar); in handle_set_prefix()
86 rc = read_guest(vcpu, operand2, ar, &address, sizeof(address)); in handle_set_prefix()
112 ar_t ar; in handle_store_prefix() local
119 operand2 = kvm_s390_get_base_disp_s(vcpu, &ar); in handle_store_prefix()
128 rc = write_guest(vcpu, operand2, ar, &address, sizeof(address)); in handle_store_prefix()
142 ar_t ar; in handle_store_cpu_address() local
[all …]
Dgaccess.h159 ar_t ar, unsigned long *gpa, int write);
160 int check_gva_range(struct kvm_vcpu *vcpu, unsigned long gva, ar_t ar,
163 int access_guest(struct kvm_vcpu *vcpu, unsigned long ga, ar_t ar, void *data,
215 int write_guest(struct kvm_vcpu *vcpu, unsigned long ga, ar_t ar, void *data, in write_guest() argument
218 return access_guest(vcpu, ga, ar, data, len, 1); in write_guest()
235 int read_guest(struct kvm_vcpu *vcpu, unsigned long ga, ar_t ar, void *data, in read_guest() argument
238 return access_guest(vcpu, ga, ar, data, len, 0); in read_guest()
Dkvm-s390.h75 static inline u64 kvm_s390_get_base_disp_s(struct kvm_vcpu *vcpu, ar_t *ar) in kvm_s390_get_base_disp_s() argument
80 if (ar) in kvm_s390_get_base_disp_s()
81 *ar = base2; in kvm_s390_get_base_disp_s()
112 static inline u64 kvm_s390_get_base_disp_rsy(struct kvm_vcpu *vcpu, ar_t *ar) in kvm_s390_get_base_disp_rsy() argument
121 if (ar) in kvm_s390_get_base_disp_rsy()
122 *ar = base2; in kvm_s390_get_base_disp_rsy()
127 static inline u64 kvm_s390_get_base_disp_rs(struct kvm_vcpu *vcpu, ar_t *ar) in kvm_s390_get_base_disp_rs() argument
132 if (ar) in kvm_s390_get_base_disp_rs()
133 *ar = base2; in kvm_s390_get_base_disp_rs()
Dgaccess.c359 static int ar_translation(struct kvm_vcpu *vcpu, union asce *asce, ar_t ar, in ar_translation() argument
370 if (ar >= NUM_ACRS) in ar_translation()
374 alet.val = vcpu->run->s.regs.acrs[ar]; in ar_translation()
376 if (ar == 0 || alet.val == 0) { in ar_translation()
464 ar_t ar, int write) in get_vcpu_asce() argument
493 rc = ar_translation(vcpu, asce, ar, write); in get_vcpu_asce()
500 vcpu->arch.pgm.exc_access_id = ar; in get_vcpu_asce()
754 int access_guest(struct kvm_vcpu *vcpu, unsigned long ga, ar_t ar, void *data, in access_guest() argument
767 rc = get_vcpu_asce(vcpu, &asce, ar, write); in access_guest()
827 int guest_translate_address(struct kvm_vcpu *vcpu, unsigned long gva, ar_t ar, in guest_translate_address() argument
[all …]
Dkvm-s390.c2362 r = check_gva_range(vcpu, mop->gaddr, mop->ar, mop->size, false); in kvm_s390_guest_mem_op()
2365 r = read_guest(vcpu, mop->gaddr, mop->ar, tmpbuf, mop->size); in kvm_s390_guest_mem_op()
2373 r = check_gva_range(vcpu, mop->gaddr, mop->ar, mop->size, true); in kvm_s390_guest_mem_op()
2380 r = write_guest(vcpu, mop->gaddr, mop->ar, tmpbuf, mop->size); in kvm_s390_guest_mem_op()
/linux-4.1.27/fs/ext4/
Dindirect.c322 struct ext4_allocation_request *ar, in ext4_alloc_branch() argument
333 new_blocks[i] = ext4_mb_new_blocks(handle, ar, &err); in ext4_alloc_branch()
335 ar->goal = new_blocks[i] = ext4_new_meta_blocks(handle, in ext4_alloc_branch()
336 ar->inode, ar->goal, in ext4_alloc_branch()
337 ar->flags & EXT4_MB_DELALLOC_RESERVED, in ext4_alloc_branch()
347 bh = branch[i].bh = sb_getblk(ar->inode->i_sb, new_blocks[i-1]); in ext4_alloc_branch()
365 len = ar->len; in ext4_alloc_branch()
374 err = ext4_handle_dirty_metadata(handle, ar->inode, bh); in ext4_alloc_branch()
388 ext4_forget(handle, 1, ar->inode, branch[i].bh, in ext4_alloc_branch()
390 ext4_free_blocks(handle, ar->inode, NULL, new_blocks[i], in ext4_alloc_branch()
[all …]
Dmballoc.c3006 struct ext4_allocation_request *ar) in ext4_mb_normalize_request() argument
3091 if (ar->pleft && start <= ar->lleft) { in ext4_mb_normalize_request()
3092 size -= ar->lleft + 1 - start; in ext4_mb_normalize_request()
3093 start = ar->lleft + 1; in ext4_mb_normalize_request()
3095 if (ar->pright && start + size - 1 >= ar->lright) in ext4_mb_normalize_request()
3096 size -= start + size - ar->lright; in ext4_mb_normalize_request()
3173 if (ar->pright && (ar->lright == (start + size))) { in ext4_mb_normalize_request()
3175 ext4_get_group_no_and_offset(ac->ac_sb, ar->pright - size, in ext4_mb_normalize_request()
3180 if (ar->pleft && (ar->lleft + 1 == start)) { in ext4_mb_normalize_request()
3182 ext4_get_group_no_and_offset(ac->ac_sb, ar->pleft + 1, in ext4_mb_normalize_request()
[all …]
Dballoc.c625 struct ext4_allocation_request ar; in ext4_new_meta_blocks() local
628 memset(&ar, 0, sizeof(ar)); in ext4_new_meta_blocks()
630 ar.inode = inode; in ext4_new_meta_blocks()
631 ar.goal = goal; in ext4_new_meta_blocks()
632 ar.len = count ? *count : 1; in ext4_new_meta_blocks()
633 ar.flags = flags; in ext4_new_meta_blocks()
635 ret = ext4_mb_new_blocks(handle, &ar, errp); in ext4_new_meta_blocks()
637 *count = ar.len; in ext4_new_meta_blocks()
644 EXT4_C2B(EXT4_SB(inode->i_sb), ar.len)); in ext4_new_meta_blocks()
Dextents.c4280 struct ext4_allocation_request ar; in ext4_ext_map_blocks() local
4386 ar.len = allocated = map->m_len; in ext4_ext_map_blocks()
4393 ar.lleft = map->m_lblk; in ext4_ext_map_blocks()
4394 err = ext4_ext_search_left(inode, path, &ar.lleft, &ar.pleft); in ext4_ext_map_blocks()
4397 ar.lright = map->m_lblk; in ext4_ext_map_blocks()
4399 err = ext4_ext_search_right(inode, path, &ar.lright, &ar.pright, &ex2); in ext4_ext_map_blocks()
4407 ar.len = allocated = map->m_len; in ext4_ext_map_blocks()
4435 ar.inode = inode; in ext4_ext_map_blocks()
4436 ar.goal = ext4_ext_find_goal(inode, path, map->m_lblk); in ext4_ext_map_blocks()
4437 ar.logical = map->m_lblk; in ext4_ext_map_blocks()
[all …]
/linux-4.1.27/arch/ia64/scripts/
Dpvcheck.sed18 s/mov.*=[^\.]*psr/.warning \"psr should not used directly\"/g # avoid ar.fpsr
19 s/mov.*=.*ar\.eflags/.warning \"ar.eflags should not used directly\"/g
20 s/mov.*=.*ar\.itc.*/.warning \"ar.itc should not used directly\"/g
28 s/mov.*ar\.eflags.*=.*/.warning \"ar.eflags should not used directly\"/g
/linux-4.1.27/drivers/edac/
Dcell_edac.c34 static void cell_edac_count_ce(struct mem_ctl_info *mci, int chan, u64 ar) in cell_edac_count_ce() argument
41 priv->node, chan, ar); in cell_edac_count_ce()
44 address = (ar & 0xffffffffe0000000ul) >> 29; in cell_edac_count_ce()
49 syndrome = (ar & 0x000000001fe00000ul) >> 21; in cell_edac_count_ce()
57 static void cell_edac_count_ue(struct mem_ctl_info *mci, int chan, u64 ar) in cell_edac_count_ue() argument
64 priv->node, chan, ar); in cell_edac_count_ue()
67 address = (ar & 0xffffffffe0000000ul) >> 29; in cell_edac_count_ue()
/linux-4.1.27/scripts/
Dkallsyms.c95 struct addr_range *ar; in check_symbol_range() local
98 ar = &ranges[i]; in check_symbol_range()
100 if (strcmp(sym, ar->start_sym) == 0) { in check_symbol_range()
101 ar->start = addr; in check_symbol_range()
103 } else if (strcmp(sym, ar->end_sym) == 0) { in check_symbol_range()
104 ar->end = addr; in check_symbol_range()
184 struct addr_range *ar; in symbol_in_range() local
187 ar = &ranges[i]; in symbol_in_range()
189 if (s->addr >= ar->start && s->addr <= ar->end) in symbol_in_range()
DKbuild.include152 # ar-option
153 # Usage: KBUILD_ARFLAGS := $(call ar-option,D)
155 ar-option = $(call try-run, $(AR) rc$(1) "$$TMP",$(1),$(2))
/linux-4.1.27/drivers/staging/lustre/lustre/libcfs/
Dnidstrings.c743 struct addrrange *ar; in free_addrranges() local
745 ar = list_entry(list->next, struct addrrange, ar_link); in free_addrranges()
747 cfs_expr_list_free_list(&ar->ar_numaddr_ranges); in free_addrranges()
748 list_del(&ar->ar_link); in free_addrranges()
749 LIBCFS_FREE(ar, sizeof(struct addrrange)); in free_addrranges()
826 struct addrrange *ar; in cfs_match_nid() local
835 list_for_each_entry(ar, &nr->nr_addrranges, ar_link) in cfs_match_nid()
837 &ar->ar_numaddr_ranges)) in cfs_match_nid()
/linux-4.1.27/tools/testing/selftests/x86/
Dsigreturn.c370 uint32_t valid = 0, ar; in cs_bitness() local
375 : [ar] "=r" (ar), [valid] "+rm" (valid) in cs_bitness()
381 bool db = (ar & (1 << 22)); in cs_bitness()
382 bool l = (ar & (1 << 21)); in cs_bitness()
384 if (!(ar & (1<<11))) in cs_bitness()
/linux-4.1.27/fs/
Dlibfs.c687 struct simple_transaction_argresp *ar = file->private_data; in simple_transaction_set() local
696 ar->size = n; in simple_transaction_set()
702 struct simple_transaction_argresp *ar; in simple_transaction_get() local
708 ar = (struct simple_transaction_argresp *)get_zeroed_page(GFP_KERNEL); in simple_transaction_get()
709 if (!ar) in simple_transaction_get()
717 free_page((unsigned long)ar); in simple_transaction_get()
721 file->private_data = ar; in simple_transaction_get()
725 if (copy_from_user(ar->data, buf, size)) in simple_transaction_get()
728 return ar->data; in simple_transaction_get()
734 struct simple_transaction_argresp *ar = file->private_data; in simple_transaction_read() local
[all …]
/linux-4.1.27/drivers/staging/lustre/lustre/osc/
Dosc_object.c141 struct osc_async_rc *ar = &oinfo->loi_ar; in osc_object_print() local
146 ar->ar_rc, ar->ar_force_sync, ar->ar_min_xid); in osc_object_print()
Dosc_cache.c1761 static void osc_process_ar(struct osc_async_rc *ar, __u64 xid, in osc_process_ar() argument
1765 if (!ar->ar_rc) in osc_process_ar()
1766 ar->ar_rc = rc; in osc_process_ar()
1768 ar->ar_force_sync = 1; in osc_process_ar()
1769 ar->ar_min_xid = ptlrpc_sample_next_xid(); in osc_process_ar()
1774 if (ar->ar_force_sync && (xid >= ar->ar_min_xid)) in osc_process_ar()
1775 ar->ar_force_sync = 0; in osc_process_ar()
/linux-4.1.27/drivers/media/pci/saa7164/
Dsaa7164-api.c251 struct tmComResEncVideoInputAspectRatio ar; in saa7164_api_get_encoder() local
294 ar.width = 0; in saa7164_api_get_encoder()
295 ar.height = 0; in saa7164_api_get_encoder()
298 sizeof(struct tmComResEncVideoInputAspectRatio), &ar); in saa7164_api_get_encoder()
319 ar.width, ar.height); in saa7164_api_get_encoder()
327 struct tmComResEncVideoInputAspectRatio ar; in saa7164_api_set_aspect_ratio() local
335 ar.width = 1; in saa7164_api_set_aspect_ratio()
336 ar.height = 1; in saa7164_api_set_aspect_ratio()
339 ar.width = 4; in saa7164_api_set_aspect_ratio()
340 ar.height = 3; in saa7164_api_set_aspect_ratio()
[all …]
/linux-4.1.27/arch/ia64/hp/sim/boot/
Dboot_head.S95 mov r9=ar.lc
98 mov ar.lc=r8
108 mov ar.lc=r9
/linux-4.1.27/sound/isa/msnd/
Dmsnd_pinnacle_mixer.c163 #define update_potm(d, s, ar) \ argument
171 if (snd_msnd_send_word(dev, 0, 0, ar) == 0) \
175 #define update_pot(d, s, ar) \ argument
181 if (snd_msnd_send_word(dev, 0, 0, ar) == 0) \
/linux-4.1.27/drivers/video/fbdev/core/
Dsvgalib.c383 u16 am, an, ar; in svga_compute_pll() local
388 ar = pll->r_max; in svga_compute_pll()
389 f_vco = f_wanted << ar; in svga_compute_pll()
392 if ((f_vco >> ar) != f_wanted) in svga_compute_pll()
398 while ((ar > pll->r_min) && (f_vco > pll->f_vco_max)) { in svga_compute_pll()
399 ar--; in svga_compute_pll()
410 *r = ar; in svga_compute_pll()
433 …pr_debug("fb%d: found frequency: %d kHz (VCO %d kHz)\n", node, (int) (f_current >> ar), (int) f_cu… in svga_compute_pll()
/linux-4.1.27/include/trace/events/
Dext4.h748 TP_PROTO(struct ext4_allocation_request *ar),
750 TP_ARGS(ar),
766 __entry->dev = ar->inode->i_sb->s_dev;
767 __entry->ino = ar->inode->i_ino;
768 __entry->len = ar->len;
769 __entry->logical = ar->logical;
770 __entry->goal = ar->goal;
771 __entry->lleft = ar->lleft;
772 __entry->lright = ar->lright;
773 __entry->pleft = ar->pleft;
[all …]
/linux-4.1.27/Documentation/ia64/
Dfsys.txt93 - r11 = saved ar.pfs (a user-level value)
98 - ar.pfs = previous frame-state (a user-level value)
104 - r11 = saved ar.pfs (as passed into the fsyscall handler)
108 - ar.pfs = previous frame-state (as passed into the fsyscall handler)
119 r15, b6, and ar.pfs) because they will be needed in case of a
147 through ar.k6).
251 .save ar.pfs, r11
252 mov r11 = ar.pfs
268 mov ar.pfs = r11
Derr_inject.txt261 ar : 1,
/linux-4.1.27/drivers/scsi/esas2r/
Desas2r_main.c972 struct esas2r_request *ar = *abort_request; in esas2r_check_active_queue() local
989 ar = esas2r_alloc_request(a); in esas2r_check_active_queue()
990 if (ar == NULL) { in esas2r_check_active_queue()
1002 ar->sense_len = 0; in esas2r_check_active_queue()
1003 ar->vrq->scsi.length = 0; in esas2r_check_active_queue()
1004 ar->target_id = rq->target_id; in esas2r_check_active_queue()
1005 ar->vrq->scsi.flags |= cpu_to_le32( in esas2r_check_active_queue()
1008 memset(ar->vrq->scsi.cdb, 0, in esas2r_check_active_queue()
1009 sizeof(ar->vrq->scsi.cdb)); in esas2r_check_active_queue()
1011 ar->vrq->scsi.flags |= cpu_to_le32( in esas2r_check_active_queue()
[all …]
/linux-4.1.27/tools/build/tests/ex/
DMakefile4 export AR := ar
/linux-4.1.27/Documentation/filesystems/
Dcoda.txt420 …44..11.. DDaattaa ssttrruuccttuurreess sshhaarreedd bbyy tthhee kke…
581 SSuummmmaarryy Find the ViceFid and type of an object in a directory if it
628 SSuummmmaarryy Get the attributes of a file.
670 SSuummmmaarryy Set the attributes of a file.
702 SSuummmmaarryy
733 SSuummmmaarryy Invoked to create a file
792 SSuummmmaarryy Create a new directory.
834 SSuummmmaarryy Create a link to an existing file.
862 SSuummmmaarryy create a symbolic link
895 SSuummmmaarryy Remove a file
[all …]
Dproc.txt470 ar - architecture specific flag
/linux-4.1.27/net/mac80211/
Drc80211_minstrel.c270 struct ieee80211_tx_rate *ar = info->status.rates; in minstrel_tx_status() local
277 if (ar[i].idx < 0) in minstrel_tx_status()
280 ndx = rix_to_ndx(mi, ar[i].idx); in minstrel_tx_status()
284 mi->r[ndx].stats.attempts += ar[i].count; in minstrel_tx_status()
286 if ((i != IEEE80211_TX_MAX_RATES - 1) && (ar[i + 1].idx < 0)) in minstrel_tx_status()
Drc80211_minstrel_ht.c704 struct ieee80211_tx_rate *ar = info->status.rates; in minstrel_ht_tx_status() local
737 last = !minstrel_ht_txstat_valid(mp, &ar[0]); in minstrel_ht_tx_status()
740 !minstrel_ht_txstat_valid(mp, &ar[i + 1]); in minstrel_ht_tx_status()
742 rate = minstrel_ht_get_stats(mp, mi, &ar[i]); in minstrel_ht_tx_status()
747 rate->attempts += ar[i].count * info->status.ampdu_len; in minstrel_ht_tx_status()
/linux-4.1.27/arch/s390/include/asm/
Dnmi.h49 __u32 ar : 1; /* 33 access register validity */ member
/linux-4.1.27/arch/s390/kernel/
Dsclp.S233 ar %r9,%r6
236 ar %r9,%r6
239 ar %r9,%r6
241 ar %r7,%r6 # update current mto address
Dnmi.c175 if (!mci->ar) { in s390_revalidate_registers()
Dhead.S128 ar %r2,%r0
/linux-4.1.27/tools/lib/api/
DMakefile12 AR = $(CROSS_COMPILE)ar
/linux-4.1.27/arch/x86/include/asm/
Dvmx.h390 #define AR_DPL(ar) (((ar) >> AR_DPL_SHIFT) & 3) argument
/linux-4.1.27/arch/ia64/include/uapi/asm/
Dptrace.h217 unsigned long ar[128]; member
/linux-4.1.27/Documentation/networking/
Dip_dynaddr.txt29 -- Juanjo <jjciarla@raiz.uncu.edu.ar>
/linux-4.1.27/tools/lib/lockdep/
DMakefile19 $(call allow-override,AR,$(CROSS_COMPILE)ar)
/linux-4.1.27/tools/power/cpupower/
DMakefile96 AR = $(CROSS)ar
/linux-4.1.27/arch/x86/kvm/
Dvmx.c523 u32 ar; member
1558 u32 *p = &vmx->segment_cache.seg[seg].ar; in vmx_read_guest_seg_ar()
3696 u32 ar; in vmx_get_segment() local
3710 ar = vmx_read_guest_seg_ar(vmx, seg); in vmx_get_segment()
3711 var->unusable = (ar >> 16) & 1; in vmx_get_segment()
3712 var->type = ar & 15; in vmx_get_segment()
3713 var->s = (ar >> 4) & 1; in vmx_get_segment()
3714 var->dpl = (ar >> 5) & 3; in vmx_get_segment()
3723 var->avl = (ar >> 12) & 1; in vmx_get_segment()
3724 var->l = (ar >> 13) & 1; in vmx_get_segment()
[all …]
/linux-4.1.27/tools/lib/traceevent/
DMakefile25 $(call allow-override,AR,$(CROSS_COMPILE)ar)
/linux-4.1.27/drivers/net/wireless/
Dmac80211_hwsim.c2133 struct mac80211_hwsim_data *ar = hw->priv; in mac80211_hwsim_get_et_stats() local
2136 data[i++] = ar->tx_pkts; in mac80211_hwsim_get_et_stats()
2137 data[i++] = ar->tx_bytes; in mac80211_hwsim_get_et_stats()
2138 data[i++] = ar->rx_pkts; in mac80211_hwsim_get_et_stats()
2139 data[i++] = ar->rx_bytes; in mac80211_hwsim_get_et_stats()
2140 data[i++] = ar->tx_dropped; in mac80211_hwsim_get_et_stats()
2141 data[i++] = ar->tx_failed; in mac80211_hwsim_get_et_stats()
2142 data[i++] = ar->ps; in mac80211_hwsim_get_et_stats()
2143 data[i++] = ar->group; in mac80211_hwsim_get_et_stats()
2144 data[i++] = ar->power_level; in mac80211_hwsim_get_et_stats()
/linux-4.1.27/arch/ia64/include/asm/native/
Dinst.h81 (pred) mov reg = ar.itc \
/linux-4.1.27/lib/raid6/test/
DMakefile11 AR = ar
/linux-4.1.27/tools/build/Documentation/
DBuild.txt92 $ ar rcs libex.a libex-in.o
/linux-4.1.27/drivers/gpu/drm/ast/
Dast_drv.h276 u8 ar[20]; member
Dast_mode.c254 jreg = stdtable->ar[i]; in ast_set_std_reg()
/linux-4.1.27/arch/arm/boot/dts/
Dsun4i-a10-a1000.dts4 * Emilio López <emilio@elopez.com.ar>
/linux-4.1.27/include/uapi/linux/
Dkvm.h326 __u8 ar; member
396 __u8 ar; /* the access register number */ member
/linux-4.1.27/sound/oss/
Dmsnd_pinnacle.c406 #define update_potm(d,s,ar) \ argument
413 if (msnd_send_word(&dev, 0, 0, ar) == 0) \
416 #define update_pot(d,s,ar) \ argument
421 if (msnd_send_word(&dev, 0, 0, ar) == 0) \
/linux-4.1.27/Documentation/usb/
Drio.txt56 Most of the code was written by Cesar Miquel <miquel@df.uba.ar>. Keith
/linux-4.1.27/arch/powerpc/
DMakefile21 CROSS32AR := $(CROSS32_COMPILE)ar
/linux-4.1.27/arch/powerpc/sysdev/
Dfsl_pci.c591 u32 ar; member
828 if (!(in_le32(&in[i].ar) & PEX_RCIWARn_EN)) in fsl_pci_immrbar_base()
/linux-4.1.27/tools/perf/
DMakefile.perf112 AR = $(CROSS_COMPILE)ar
/linux-4.1.27/drivers/scsi/megaraid/
Dmegaraid_sas_fp.c135 u16 MR_ArPdGet(u32 ar, u32 arm, struct MR_DRV_RAID_MAP_ALL *map) in MR_ArPdGet() argument
137 return le16_to_cpu(map->raidMap.arMapInfo[ar].pd[arm]); in MR_ArPdGet()
Dmegaraid_sas.h1964 u16 MR_ArPdGet(u32 ar, u32 arm, struct MR_DRV_RAID_MAP_ALL *map);
/linux-4.1.27/
DMakefile357 AR = $(CROSS_COMPILE)ar
776 KBUILD_ARFLAGS := $(call ar-option,D)
DCREDITS689 E: jjciarla@raiz.uncu.edu.ar
690 E: jjo@mendoza.gov.ar
DMAINTAINERS878 M: Emilio López <emilio@elopez.com.ar>
4491 M: Ezequiel Garcia <ezequiel@vanguardiasur.com.ar>
5305 M: Juanjo Ciarlante <jjciarla@raiz.uncu.edu.ar>
10289 M: Cesar Miquel <miquel@df.uba.ar>
/linux-4.1.27/net/ipv4/
DKconfig74 documentation at <http://www.compendium.com.ar/policy-routing.txt>
/linux-4.1.27/Documentation/
Ddevices.txt1769 0 = /dev/amiraid/ar? Whole disk
1770 1 = /dev/amiraid/ar?p1 First partition
1771 2 = /dev/amiraid/ar?p2 Second partition
1773 15 = /dev/amiraid/ar?p15 15th partition
/linux-4.1.27/Documentation/virtual/kvm/
Dapi.txt2780 __u8 ar; /* the access register number */
2800 when KVM_S390_MEMOP_F_CHECK_ONLY is specified. "ar" designates the access
3562 __u8 ar;
3573 @ar - access register number
/linux-4.1.27/Documentation/security/
DSmack.txt434 as "ar". A lone dash is used to specify that no access should be allowed.
/linux-4.1.27/drivers/scsi/
DKconfig1586 <http://www.lysator.liu.se/amiga/ar/guide/ar310.guide?FEATURE5>),