Home
last modified time | relevance | path

Searched refs:txq (Results 1 – 24 of 24) sorted by relevance

/linux-4.19.296/include/trace/events/
Dqdisc.h14 TP_PROTO(struct Qdisc *qdisc, const struct netdev_queue *txq,
17 TP_ARGS(qdisc, txq, packets, skb),
21 __field(const struct netdev_queue *, txq )
33 __entry->txq = txq;
36 __entry->ifindex = txq->dev ? txq->dev->ifindex : 0;
39 __entry->txq_state = txq->state;
/linux-4.19.296/drivers/bluetooth/
Dhci_h4.c52 struct sk_buff_head txq; member
66 skb_queue_head_init(&h4->txq); in h4_open()
79 skb_queue_purge(&h4->txq); in h4_flush()
93 skb_queue_purge(&h4->txq); in h4_close()
112 skb_queue_tail(&h4->txq, skb); in h4_enqueue()
146 return skb_dequeue(&h4->txq); in h4_dequeue()
Dhci_ath.c49 struct sk_buff_head txq; member
122 skb_queue_head_init(&ath->txq); in ath_open()
138 skb_queue_purge(&ath->txq); in ath_close()
156 skb_queue_purge(&ath->txq); in ath_flush()
246 skb_queue_tail(&ath->txq, skb); in ath_enqueue()
258 return skb_dequeue(&ath->txq); in ath_dequeue()
Dhci_mrvl.c50 struct sk_buff_head txq; member
76 skb_queue_head_init(&mrvl->txq); in mrvl_open()
91 skb_queue_purge(&mrvl->txq); in mrvl_close()
106 skb_queue_purge(&mrvl->txq); in mrvl_flush()
117 skb = skb_dequeue(&mrvl->txq); in mrvl_dequeue()
133 skb_queue_tail(&mrvl->txq, skb); in mrvl_enqueue()
150 skb_queue_tail(&mrvl->txq, skb); in mrvl_send_ack()
Dhci_ag6xx.c39 struct sk_buff_head txq; member
58 skb_queue_head_init(&ag6xx->txq); in ag6xx_open()
70 skb_queue_purge(&ag6xx->txq); in ag6xx_close()
84 skb_queue_purge(&ag6xx->txq); in ag6xx_flush()
93 skb = skb_dequeue(&ag6xx->txq); in ag6xx_dequeue()
106 skb_queue_tail(&ag6xx->txq, skb); in ag6xx_enqueue()
Dbtsdio.c64 struct sk_buff_head txq; member
116 while ((skb = skb_dequeue(&data->txq))) { in btsdio_work()
120 skb_queue_head(&data->txq, skb); in btsdio_work()
246 skb_queue_purge(&data->txq); in btsdio_flush()
274 skb_queue_tail(&data->txq, skb); in btsdio_send_frame()
312 skb_queue_head_init(&data->txq); in btsdio_probe()
Ddtl1_cs.c75 struct sk_buff_head txq; member
156 skb = skb_dequeue(&(info->txq)); in dtl1_write_wakeup()
168 skb_queue_head(&(info->txq), skb); in dtl1_write_wakeup()
370 skb_queue_purge(&(info->txq)); in dtl1_hci_flush()
421 skb_queue_tail(&(info->txq), s); in dtl1_hci_send_frame()
443 skb_queue_head_init(&(info->txq)); in dtl1_open()
Dhci_ll.c88 struct sk_buff_head txq; member
118 skb_queue_tail(&ll->txq, skb); in send_hcill_cmd()
134 skb_queue_head_init(&ll->txq); in ll_open()
159 skb_queue_purge(&ll->txq); in ll_flush()
172 skb_queue_purge(&ll->txq); in ll_close()
202 skb_queue_tail(&ll->txq, skb); in __ll_do_awake()
335 skb_queue_tail(&ll->txq, skb); in ll_enqueue()
454 return skb_dequeue(&ll->txq); in ll_dequeue()
Dbluecard_cs.c72 struct sk_buff_head txq; member
259 skb = skb_dequeue(&(info->txq)); in bluecard_write_wakeup()
324 skb_queue_head(&(info->txq), skb); in bluecard_write_wakeup()
597 skb_queue_tail(&(info->txq), skb); in bluecard_hci_set_baud_rate()
614 skb_queue_purge(&(info->txq)); in bluecard_hci_flush()
670 skb_queue_tail(&(info->txq), skb); in bluecard_hci_send_frame()
692 skb_queue_head_init(&(info->txq)); in bluecard_open()
775 skb_queue_purge(&(info->txq)); in bluecard_open()
Dbtmtkuart.c68 struct sk_buff_head txq; member
228 struct sk_buff *skb = skb_dequeue(&bdev->txq); in btmtkuart_tx_work()
240 skb_queue_head(&bdev->txq, skb); in btmtkuart_tx_work()
457 skb_queue_purge(&bdev->txq); in btmtkuart_flush()
542 skb_queue_tail(&bdev->txq, skb); in btmtkuart_send_frame()
567 skb_queue_head_init(&bdev->txq); in btmtkuart_probe()
Dhci_intel.c84 struct sk_buff_head txq; member
194 skb_queue_head(&intel->txq, skb); in intel_lpm_suspend()
238 skb_queue_head(&intel->txq, skb); in intel_lpm_resume()
277 skb_queue_head(&intel->txq, skb); in intel_lpm_host_wake()
416 skb_queue_head_init(&intel->txq); in intel_open()
439 skb_queue_purge(&intel->txq); in intel_close()
453 skb_queue_purge(&intel->txq); in intel_flush()
532 skb_queue_tail(&intel->txq, skb); in intel_set_baudrate()
1049 skb_queue_tail(&intel->txq, skb); in intel_enqueue()
1059 skb = skb_dequeue(&intel->txq); in intel_dequeue()
Dhci_qca.c91 struct sk_buff_head txq; member
278 skb_queue_tail(&qca->txq, skb); in send_hci_ibs_cmd()
460 skb_queue_head_init(&qca->txq); in qca_open()
592 skb_queue_purge(&qca->txq); in qca_flush()
608 skb_queue_purge(&qca->txq); in qca_close()
737 skb_queue_tail(&qca->txq, skb); in device_woke_up()
779 skb_queue_tail(&qca->txq, skb); in qca_enqueue()
789 skb_queue_tail(&qca->txq, skb); in qca_enqueue()
911 return skb_dequeue(&qca->txq); in qca_dequeue()
980 skb_queue_tail(&qca->txq, skb); in qca_set_baudrate()
[all …]
Dbt3c_cs.c77 struct sk_buff_head txq; member
196 skb = skb_dequeue(&(info->txq)); in bt3c_write_wakeup()
390 skb_queue_purge(&(info->txq)); in bt3c_hci_flush()
429 skb_queue_tail(&(info->txq), skb); in bt3c_hci_send_frame()
543 skb_queue_head_init(&(info->txq)); in bt3c_open()
Dhci_nokia.c144 struct sk_buff_head txq; member
491 skb_queue_purge(&btdev->txq); in nokia_flush()
505 skb_queue_purge(&btdev->txq); in nokia_close()
535 skb_queue_tail(&btdev->txq, skb); in nokia_enqueue()
657 struct sk_buff *result = skb_dequeue(&btdev->txq); in nokia_dequeue()
754 skb_queue_head_init(&btdev->txq); in nokia_bluetooth_serdev_probe()
Dhci_bcm.c129 struct sk_buff_head txq; member
367 skb_queue_tail(&bcm->txq, skb); in bcm_set_diag()
388 skb_queue_head_init(&bcm->txq); in bcm_open()
476 skb_queue_purge(&bcm->txq); in bcm_close()
490 skb_queue_purge(&bcm->txq); in bcm_flush()
636 skb_queue_tail(&bcm->txq, skb); in bcm_enqueue()
655 skb = skb_dequeue(&bcm->txq); in bcm_dequeue()
/linux-4.19.296/include/linux/
Dnetdevice.h2072 int netdev_txq_to_tc(struct net_device *dev, unsigned int txq);
3044 void netif_schedule_queue(struct netdev_queue *txq);
3075 struct netdev_queue *txq = netdev_get_tx_queue(dev, i); in netif_tx_start_all_queues() local
3076 netif_tx_start_queue(txq); in netif_tx_start_all_queues()
3099 struct netdev_queue *txq = netdev_get_tx_queue(dev, i); in netif_tx_wake_all_queues() local
3100 netif_tx_wake_queue(txq); in netif_tx_wake_all_queues()
3329 struct netdev_queue *txq = netdev_get_tx_queue(dev, queue_index); in netif_start_subqueue() local
3331 netif_tx_start_queue(txq); in netif_start_subqueue()
3343 struct netdev_queue *txq = netdev_get_tx_queue(dev, queue_index); in netif_stop_subqueue() local
3344 netif_tx_stop_queue(txq); in netif_stop_subqueue()
[all …]
Dnetpoll.h43 struct sk_buff_head txq; member
Dti_wilink_st.h155 struct sk_buff_head txq, tx_waitq; member
/linux-4.19.296/drivers/misc/ti-st/
Dst_core.c219 skb_queue_tail(&st_gdata->txq, waiting_skb); in st_wakeup_ack()
409 return skb_dequeue(&st_gdata->txq); in st_int_dequeue()
431 skb_queue_tail(&st_gdata->txq, skb); in st_int_enqueue()
783 skb_queue_purge(&st_gdata->txq); in st_tty_close()
879 skb_queue_head_init(&st_gdata->txq); in st_core_init()
911 skb_queue_purge(&st_gdata->txq); in st_core_exit()
/linux-4.19.296/include/net/
Dsch_generic.h169 static inline int qdisc_avail_bulklimit(const struct netdev_queue *txq) in qdisc_avail_bulklimit() argument
173 return dql_avail(&txq->dql); in qdisc_avail_bulklimit()
654 struct netdev_queue *txq = netdev_get_tx_queue(dev, i); in qdisc_all_tx_empty() local
655 const struct Qdisc *q = rcu_dereference(txq->qdisc); in qdisc_all_tx_empty()
672 struct netdev_queue *txq = netdev_get_tx_queue(dev, i); in qdisc_tx_changing() local
673 if (rcu_access_pointer(txq->qdisc) != txq->qdisc_sleeping) in qdisc_tx_changing()
685 struct netdev_queue *txq = netdev_get_tx_queue(dev, i); in qdisc_tx_is_noop() local
686 if (rcu_access_pointer(txq->qdisc) != &noop_qdisc) in qdisc_tx_is_noop()
Dpkt_sched.h114 struct net_device *dev, struct netdev_queue *txq,
Dmac80211.h1519 struct ieee80211_txq *txq; member
1883 struct ieee80211_txq *txq[IEEE80211_NUM_TIDS]; member
3810 struct ieee80211_txq *txq);
5979 struct ieee80211_txq *txq);
5992 void ieee80211_txq_get_depth(struct ieee80211_txq *txq,
/linux-4.19.296/include/linux/usb/
Dusbnet.h59 struct sk_buff_head txq; member
/linux-4.19.296/include/linux/avf/
Dvirtchnl.h324 struct virtchnl_txq_info txq; member