Lines Matching refs:nbuf

62 	qdf_nbuf_t  nbuf;  member
403 qdf_nbuf_len(tx_desc->nbuf), in dp_tx_process_htt_completion_be()
436 xmit_type = qdf_nbuf_get_vdev_xmit_type(tx_desc->nbuf); in dp_tx_process_htt_completion_be()
510 qdf_nbuf_t nbuf) in dp_tx_set_min_rates_for_critical_frames() argument
520 if (QDF_NBUF_CB_TX_EXTRA_IS_CRITICAL(nbuf)) { in dp_tx_set_min_rates_for_critical_frames()
532 qdf_nbuf_t nbuf) in dp_tx_set_min_rates_for_critical_frames() argument
550 qdf_nbuf_t nbuf) in dp_tx_set_particular_tx_queue() argument
555 if (qdf_unlikely(QDF_NBUF_CB_GET_PACKET_TYPE(nbuf) == in dp_tx_set_particular_tx_queue()
566 qdf_nbuf_t nbuf) in dp_tx_set_particular_tx_queue() argument
590 qdf_ether_header_t *eh = (qdf_ether_header_t *)qdf_nbuf_data(ptr->nbuf); in dp_tx_mlo_mcast_multipass_lookup()
633 nbuf_clone = qdf_nbuf_clone(ptr->nbuf); in dp_tx_mlo_mcast_multipass_send()
639 nbuf_clone = ptr->nbuf; in dp_tx_mlo_mcast_multipass_send()
644 msdu_info.xmit_type = qdf_nbuf_get_vdev_xmit_type(ptr->nbuf); in dp_tx_mlo_mcast_multipass_send()
695 qdf_nbuf_t nbuf) in dp_tx_mlo_mcast_multipass_handler() argument
704 mpass_buf.nbuf = nbuf; in dp_tx_mlo_mcast_multipass_handler()
733 nbuf_copy = qdf_nbuf_copy(nbuf); in dp_tx_mlo_mcast_multipass_handler()
743 mpass_buf_copy.nbuf = nbuf_copy; in dp_tx_mlo_mcast_multipass_handler()
776 qdf_nbuf_t nbuf) in dp_tx_mlo_mcast_multipass_handler() argument
787 qdf_nbuf_t nbuf = (qdf_nbuf_t)arg; in dp_tx_mlo_mcast_pkt_send() local
794 nbuf_clone = qdf_nbuf_clone(nbuf); in dp_tx_mlo_mcast_pkt_send()
800 nbuf_clone = nbuf; in dp_tx_mlo_mcast_pkt_send()
853 qdf_nbuf_t nbuf) in dp_tx_mlo_mcast_handler_be() argument
859 dp_tx_mlo_mcast_multipass_handler(soc, vdev, nbuf)) in dp_tx_mlo_mcast_handler_be()
864 nbuf, DP_MOD_ID_REINJECT, DP_LINK_VDEV_ITER, in dp_tx_mlo_mcast_handler_be()
868 dp_tx_mlo_mcast_pkt_send(be_vdev, vdev, nbuf); in dp_tx_mlo_mcast_handler_be()
894 qdf_nbuf_t nbuf = (qdf_nbuf_t)arg; in dp_tx_mlo_mcast_enhance_be() local
904 qdf_nbuf_ref(nbuf); in dp_tx_mlo_mcast_enhance_be()
906 if (qdf_unlikely(!dp_tx_mcast_enhance(ptnr_vdev, nbuf))) in dp_tx_mlo_mcast_enhance_be()
909 qdf_nbuf_free(nbuf); in dp_tx_mlo_mcast_enhance_be()
914 qdf_nbuf_t nbuf, in dp_tx_mlo_mcast_send_be() argument
921 return nbuf; in dp_tx_mlo_mcast_send_be()
924 qdf_nbuf_free(nbuf); in dp_tx_mlo_mcast_send_be()
944 qdf_nbuf_ref(nbuf); in dp_tx_mlo_mcast_send_be()
945 if (qdf_unlikely(!dp_tx_mcast_enhance(vdev, nbuf))) { in dp_tx_mlo_mcast_send_be()
948 nbuf, DP_MOD_ID_TX, in dp_tx_mlo_mcast_send_be()
951 qdf_nbuf_free(nbuf); in dp_tx_mlo_mcast_send_be()
955 qdf_nbuf_free(nbuf); in dp_tx_mlo_mcast_send_be()
957 dp_tx_mlo_mcast_handler_be(soc, vdev, nbuf); in dp_tx_mlo_mcast_send_be()
974 qdf_nbuf_t nbuf) in dp_tx_mlo_mcast_handler_be() argument
998 uint16_t *fw_metadata, qdf_nbuf_t nbuf, in dp_sawf_config_be() argument
1004 q_id = dp_sawf_queue_id_get(nbuf); in dp_sawf_config_be()
1024 dp_sawf_tcl_cmd(fw_metadata, nbuf); in dp_sawf_config_be()
1039 uint16_t *fw_metadata, qdf_nbuf_t nbuf, in dp_sawf_config_be() argument
1100 qdf_nbuf_t nbuf; in dp_ppeds_tx_comp_handler() local
1176 nbuf = dp_ppeds_tx_desc_free(soc, tx_desc); in dp_ppeds_tx_comp_handler()
1177 qdf_nbuf_free(nbuf); in dp_ppeds_tx_comp_handler()
1339 if (dp_sawf_tag_valid_get(tx_desc->nbuf)) { in dp_tx_hw_enqueue_be()
1341 &fw_metadata, tx_desc->nbuf, msdu_info); in dp_tx_hw_enqueue_be()
1369 if ((qdf_nbuf_get_tx_cksum(tx_desc->nbuf) == in dp_tx_hw_enqueue_be()
1371 qdf_nbuf_is_tso(tx_desc->nbuf)) { in dp_tx_hw_enqueue_be()
1385 tx_desc->nbuf); in dp_tx_hw_enqueue_be()
1387 tx_desc->nbuf); in dp_tx_hw_enqueue_be()
1414 dp_vdev_peer_stats_update_protocol_cnt_tx(vdev, tx_desc->nbuf); in dp_tx_hw_enqueue_be()
1434 qdf_get_log_timestamp(), tx_desc->nbuf); in dp_tx_hw_enqueue_be()
1848 qdf_nbuf_t nbuf) in dp_tx_nbuf_map_be() argument
1850 qdf_nbuf_dma_clean_range_no_dsb((void *)nbuf->data, in dp_tx_nbuf_map_be()
1851 (void *)(nbuf->data + 256)); in dp_tx_nbuf_map_be()
1853 return (qdf_dma_addr_t)qdf_mem_virt_to_phys(nbuf->data); in dp_tx_nbuf_map_be()
1864 qdf_nbuf_t nbuf) in dp_tx_fast_send_be() argument
1879 uint8_t xmit_type = qdf_nbuf_get_vdev_xmit_type(nbuf); in dp_tx_fast_send_be()
1883 return nbuf; in dp_tx_fast_send_be()
1887 return nbuf; in dp_tx_fast_send_be()
1889 desc_pool_id = qdf_nbuf_get_queue_mapping(nbuf) & DP_TX_QUEUE_MASK; in dp_tx_fast_send_be()
1891 pkt_len = qdf_nbuf_headlen(nbuf); in dp_tx_fast_send_be()
1897 if (dp_tx_limit_check(vdev, nbuf)) in dp_tx_fast_send_be()
1898 return nbuf; in dp_tx_fast_send_be()
1902 tid = qdf_nbuf_get_priority(nbuf); in dp_tx_fast_send_be()
1915 return nbuf; in dp_tx_fast_send_be()
1921 tx_desc->nbuf = nbuf; in dp_tx_fast_send_be()
1930 tx_desc->nbuf->fast_recycled = 1; in dp_tx_fast_send_be()
1932 if (nbuf->is_from_recycler && nbuf->fast_xmit) in dp_tx_fast_send_be()
1935 paddr = dp_tx_nbuf_map_be(vdev, tx_desc, nbuf); in dp_tx_fast_send_be()
1963 if (qdf_unlikely(dp_sawf_tag_valid_get(nbuf))) { in dp_tx_fast_send_be()
1965 NULL, nbuf, NULL); in dp_tx_fast_send_be()
2021 return nbuf; in dp_tx_fast_send_be()