Lines Matching refs:nbuf
204 qdf_nbuf_t nbuf) in dp_rx_mark_first_packet_after_wow_wakeup() argument
213 qdf_nbuf_mark_wakeup_frame(nbuf); in dp_rx_mark_first_packet_after_wow_wakeup()
345 (nbuf_frag_info_t->virt_addr).nbuf = in dp_pdev_nbuf_alloc_and_map_replenish()
350 if (!((nbuf_frag_info_t->virt_addr).nbuf)) { in dp_pdev_nbuf_alloc_and_map_replenish()
360 (nbuf_frag_info_t->virt_addr).nbuf, mac_id); in dp_pdev_nbuf_alloc_and_map_replenish()
367 qdf_nbuf_get_frag_paddr((nbuf_frag_info_t->virt_addr).nbuf, 0); in dp_pdev_nbuf_alloc_and_map_replenish()
369 (nbuf_frag_info_t->virt_addr).nbuf), in dp_pdev_nbuf_alloc_and_map_replenish()
373 ret = dp_check_paddr(dp_soc, &((nbuf_frag_info_t->virt_addr).nbuf), in dp_pdev_nbuf_alloc_and_map_replenish()
396 qdf_nbuf_t nbuf; in __dp_rx_buffers_no_map_lt_replenish() local
449 nbuf = dp_rx_nbuf_alloc(soc, rx_desc_pool); in __dp_rx_buffers_no_map_lt_replenish()
450 if (qdf_unlikely(!nbuf)) { in __dp_rx_buffers_no_map_lt_replenish()
455 paddr = dp_rx_nbuf_sync_no_dsb(soc, nbuf, in __dp_rx_buffers_no_map_lt_replenish()
462 desc_list->rx_desc.nbuf = nbuf; in __dp_rx_buffers_no_map_lt_replenish()
463 dp_rx_set_reuse_nbuf(&desc_list->rx_desc, nbuf); in __dp_rx_buffers_no_map_lt_replenish()
464 desc_list->rx_desc.rx_buf_start = nbuf->data; in __dp_rx_buffers_no_map_lt_replenish()
512 qdf_nbuf_t nbuf; in __dp_rx_buffers_no_map_replenish() local
534 nbuf = dp_rx_nbuf_alloc(soc, rx_desc_pool); in __dp_rx_buffers_no_map_replenish()
535 if (qdf_unlikely(!nbuf)) { in __dp_rx_buffers_no_map_replenish()
542 paddr = dp_rx_nbuf_sync_no_dsb(soc, nbuf, in __dp_rx_buffers_no_map_replenish()
545 QDF_NBUF_CB_PADDR(nbuf) = paddr; in __dp_rx_buffers_no_map_replenish()
548 nbuf); in __dp_rx_buffers_no_map_replenish()
552 nbuf = nbuf_head; in __dp_rx_buffers_no_map_replenish()
557 nbuf_next = nbuf->next; in __dp_rx_buffers_no_map_replenish()
566 (*desc_list)->rx_desc.nbuf = nbuf; in __dp_rx_buffers_no_map_replenish()
567 dp_rx_set_reuse_nbuf(&(*desc_list)->rx_desc, nbuf); in __dp_rx_buffers_no_map_replenish()
568 (*desc_list)->rx_desc.rx_buf_start = nbuf->data; in __dp_rx_buffers_no_map_replenish()
569 (*desc_list)->rx_desc.paddr_buf_start = QDF_NBUF_CB_PADDR(nbuf); in __dp_rx_buffers_no_map_replenish()
579 QDF_NBUF_CB_PADDR(nbuf), in __dp_rx_buffers_no_map_replenish()
584 nbuf = nbuf_next; in __dp_rx_buffers_no_map_replenish()
599 while (nbuf) { in __dp_rx_buffers_no_map_replenish()
600 nbuf_next = nbuf->next; in __dp_rx_buffers_no_map_replenish()
601 dp_rx_nbuf_unmap_pool(soc, rx_desc_pool, nbuf); in __dp_rx_buffers_no_map_replenish()
602 qdf_nbuf_free(nbuf); in __dp_rx_buffers_no_map_replenish()
603 nbuf = nbuf_next; in __dp_rx_buffers_no_map_replenish()
624 qdf_nbuf_t nbuf; in __dp_rx_comp2refill_replenish() local
654 (*desc_list)->rx_desc.nbuf = (*desc_list)->rx_desc.reuse_nbuf; in __dp_rx_comp2refill_replenish()
679 nbuf = cur->rx_desc.reuse_nbuf; in __dp_rx_comp2refill_replenish()
681 cur->rx_desc.nbuf = NULL; in __dp_rx_comp2refill_replenish()
685 if (!nbuf->recycled_for_ds) in __dp_rx_comp2refill_replenish()
686 dp_rx_nbuf_unmap_pool(soc, rx_desc_pool, nbuf); in __dp_rx_comp2refill_replenish()
688 nbuf->recycled_for_ds = 0; in __dp_rx_comp2refill_replenish()
689 nbuf->fast_recycled = 0; in __dp_rx_comp2refill_replenish()
690 qdf_nbuf_free(nbuf); in __dp_rx_comp2refill_replenish()
714 qdf_nbuf_t nbuf; in __dp_pdev_rx_buffers_no_map_attach() local
751 nbuf = dp_rx_nbuf_alloc(soc, rx_desc_pool); in __dp_pdev_rx_buffers_no_map_attach()
752 if (qdf_unlikely(!nbuf)) { in __dp_pdev_rx_buffers_no_map_attach()
757 paddr = dp_rx_nbuf_sync_no_dsb(soc, nbuf, in __dp_pdev_rx_buffers_no_map_attach()
762 qdf_nbuf_free(nbuf); in __dp_pdev_rx_buffers_no_map_attach()
766 desc_list->rx_desc.nbuf = nbuf; in __dp_pdev_rx_buffers_no_map_attach()
767 dp_rx_set_reuse_nbuf(&desc_list->rx_desc, nbuf); in __dp_pdev_rx_buffers_no_map_attach()
768 desc_list->rx_desc.rx_buf_start = nbuf->data; in __dp_pdev_rx_buffers_no_map_attach()
800 qdf_dma_addr_t dp_rx_rep_retrieve_paddr(struct dp_soc *dp_soc, qdf_nbuf_t nbuf, in dp_rx_rep_retrieve_paddr() argument
803 return dp_rx_nbuf_sync_no_dsb(dp_soc, nbuf, buf_size); in dp_rx_rep_retrieve_paddr()
807 qdf_dma_addr_t dp_rx_rep_retrieve_paddr(struct dp_soc *dp_soc, qdf_nbuf_t nbuf, in dp_rx_rep_retrieve_paddr() argument
810 return qdf_nbuf_get_frag_paddr(nbuf, 0); in dp_rx_rep_retrieve_paddr()
842 paddr = dp_rx_rep_retrieve_paddr(soc, rx_desc->nbuf, in dp_rx_desc_replenish()
883 rx_desc->nbuf->next = *nbuf_list; in dp_rx_desc_reuse()
884 *nbuf_list = rx_desc->nbuf; in dp_rx_desc_reuse()
1080 nbuf_frag_info.virt_addr.nbuf, in __dp_rx_buffers_replenish()
1129 qdf_nbuf_t nbuf; in dp_rx_deliver_raw() local
1131 nbuf = nbuf_list; in dp_rx_deliver_raw()
1132 while (nbuf) { in dp_rx_deliver_raw()
1133 qdf_nbuf_t next = qdf_nbuf_next(nbuf); in dp_rx_deliver_raw()
1135 DP_RX_LIST_APPEND(deliver_list_head, deliver_list_tail, nbuf); in dp_rx_deliver_raw()
1139 qdf_nbuf_len(nbuf), link_id); in dp_rx_deliver_raw()
1141 nbuf = next; in dp_rx_deliver_raw()
1153 struct dp_txrx_peer *ta_peer, qdf_nbuf_t nbuf) in dp_rx_da_learn() argument
1174 qdf_nbuf_t nbuf) in dp_classify_critical_pkts() argument
1177 vdev->tx_classify_critical_pkt_cb(vdev->osif_vdev, nbuf); in dp_classify_critical_pkts()
1182 qdf_nbuf_t nbuf) in dp_classify_critical_pkts() argument
1189 void dp_rx_nbuf_queue_mapping_set(qdf_nbuf_t nbuf, uint8_t ring_id) in dp_rx_nbuf_queue_mapping_set() argument
1191 qdf_nbuf_set_queue_mapping(nbuf, ring_id); in dp_rx_nbuf_queue_mapping_set()
1195 void dp_rx_nbuf_queue_mapping_set(qdf_nbuf_t nbuf, uint8_t ring_id) in dp_rx_nbuf_queue_mapping_set() argument
1201 uint8_t *rx_tlv_hdr, qdf_nbuf_t nbuf, in dp_rx_intrabss_mcbc_fwd() argument
1209 nbuf)) in dp_rx_intrabss_mcbc_fwd()
1212 if (!dp_rx_check_ndi_mdns_fwding(ta_peer, nbuf, link_id)) in dp_rx_intrabss_mcbc_fwd()
1221 nbuf_copy = qdf_nbuf_copy(nbuf); in dp_rx_intrabss_mcbc_fwd()
1225 len = QDF_NBUF_CB_RX_PKT_LEN(nbuf); in dp_rx_intrabss_mcbc_fwd()
1254 uint8_t *rx_tlv_hdr, qdf_nbuf_t nbuf, in dp_rx_intrabss_ucast_fwd() argument
1260 len = QDF_NBUF_CB_RX_PKT_LEN(nbuf); in dp_rx_intrabss_ucast_fwd()
1265 if (qdf_unlikely(qdf_nbuf_is_frag(nbuf))) { in dp_rx_intrabss_ucast_fwd()
1266 if (qdf_nbuf_linearize(nbuf) == -ENOMEM) in dp_rx_intrabss_ucast_fwd()
1269 nbuf = qdf_nbuf_unshare(nbuf); in dp_rx_intrabss_ucast_fwd()
1270 if (!nbuf) { in dp_rx_intrabss_ucast_fwd()
1284 qdf_mem_set(nbuf->cb, 0x0, sizeof(nbuf->cb)); in dp_rx_intrabss_ucast_fwd()
1285 dp_classify_critical_pkts(soc, ta_peer->vdev, nbuf); in dp_rx_intrabss_ucast_fwd()
1289 tx_vdev_id, nbuf)) { in dp_rx_intrabss_ucast_fwd()
1306 void dp_rx_fill_mesh_stats(struct dp_vdev *vdev, qdf_nbuf_t nbuf, in dp_rx_fill_mesh_stats() argument
1336 if (qdf_nbuf_is_rx_chfrag_start(nbuf)) in dp_rx_fill_mesh_stats()
1339 if (qdf_nbuf_is_rx_chfrag_end(nbuf)) in dp_rx_fill_mesh_stats()
1407 qdf_nbuf_set_rx_fctx_type(nbuf, (void *)rx_info, CB_FTYPE_MESH_RX_INFO); in dp_rx_fill_mesh_stats()
1420 QDF_STATUS dp_rx_filter_mesh_packets(struct dp_vdev *vdev, qdf_nbuf_t nbuf, in dp_rx_filter_mesh_packets() argument
1473 void dp_rx_fill_mesh_stats(struct dp_vdev *vdev, qdf_nbuf_t nbuf, in dp_rx_fill_mesh_stats() argument
1478 QDF_STATUS dp_rx_filter_mesh_packets(struct dp_vdev *vdev, qdf_nbuf_t nbuf, in dp_rx_filter_mesh_packets() argument
1570 msg.nbuf = mpdu; in dp_rx_process_invalid_peer()
1774 qdf_nbuf_t nbuf, in dp_rx_adjust_nbuf_len() argument
1787 qdf_nbuf_set_pktlen(nbuf, buf_size); in dp_rx_adjust_nbuf_len()
1791 qdf_nbuf_set_pktlen(nbuf, (*mpdu_len + pkt_hdr_size)); in dp_rx_adjust_nbuf_len()
1808 qdf_nbuf_t nbuf) in dp_get_l3_hdr_pad_len() argument
1814 while (nbuf) { in dp_get_l3_hdr_pad_len()
1815 if (!qdf_nbuf_is_rx_chfrag_cont(nbuf)) { in dp_get_l3_hdr_pad_len()
1817 rx_tlv_hdr = qdf_nbuf_data(nbuf); in dp_get_l3_hdr_pad_len()
1824 nbuf = nbuf->next; in dp_get_l3_hdr_pad_len()
1830 qdf_nbuf_t dp_rx_sg_create(struct dp_soc *soc, qdf_nbuf_t nbuf) in dp_rx_sg_create() argument
1843 mpdu_len = QDF_NBUF_CB_RX_PKT_LEN(nbuf); in dp_rx_sg_create()
1850 frag_tail = nbuf; in dp_rx_sg_create()
1855 QDF_NBUF_CB_RX_PKT_LEN(nbuf) = in dp_rx_sg_create()
1863 if (qdf_nbuf_is_rx_chfrag_start(nbuf) && in dp_rx_sg_create()
1864 qdf_nbuf_is_rx_chfrag_end(nbuf)) { in dp_rx_sg_create()
1865 qdf_nbuf_set_pktlen(nbuf, mpdu_len + soc->rx_pkt_tlv_size); in dp_rx_sg_create()
1866 qdf_nbuf_pull_head(nbuf, soc->rx_pkt_tlv_size); in dp_rx_sg_create()
1867 return nbuf; in dp_rx_sg_create()
1870 l3_hdr_pad_offset = dp_get_l3_hdr_pad_len(soc, nbuf); in dp_rx_sg_create()
1879 parent = nbuf; in dp_rx_sg_create()
1880 frag_list = nbuf->next; in dp_rx_sg_create()
1881 nbuf = nbuf->next; in dp_rx_sg_create()
1916 last_nbuf = dp_rx_adjust_nbuf_len(soc, nbuf, &mpdu_len, 0); in dp_rx_sg_create()
1917 qdf_nbuf_pull_head(nbuf, in dp_rx_sg_create()
1919 frag_list_len += qdf_nbuf_len(nbuf); in dp_rx_sg_create()
1922 next = nbuf->next; in dp_rx_sg_create()
1923 nbuf->next = NULL; in dp_rx_sg_create()
1925 } else if (qdf_nbuf_is_rx_chfrag_end(nbuf)) { in dp_rx_sg_create()
1929 nbuf = nbuf->next; in dp_rx_sg_create()
1932 qdf_nbuf_set_rx_chfrag_start(nbuf, 0); in dp_rx_sg_create()
1957 qdf_nbuf_t nbuf) in dp_rx_compute_tid_delay() argument
1960 uint32_t to_stack = qdf_nbuf_get_timedelta_ms(nbuf); in dp_rx_compute_tid_delay()
1966 void dp_rx_compute_delay(struct dp_vdev *vdev, qdf_nbuf_t nbuf) in dp_rx_compute_delay() argument
1968 uint8_t ring_id = QDF_NBUF_CB_RX_CTX_ID(nbuf); in dp_rx_compute_delay()
1970 uint32_t to_stack = qdf_nbuf_get_timedelta_ms(nbuf); in dp_rx_compute_delay()
1971 uint8_t tid = qdf_nbuf_get_tid_val(nbuf); in dp_rx_compute_delay()
2081 dp_set_nbuf_band(struct dp_peer *peer, qdf_nbuf_t nbuf) in dp_set_nbuf_band() argument
2085 link_id = dp_rx_get_stats_arr_idx_from_link_id(nbuf, peer->txrx_peer); in dp_set_nbuf_band()
2086 dp_rx_set_nbuf_band(nbuf, peer->txrx_peer, link_id); in dp_set_nbuf_band()
2090 dp_set_nbuf_band(struct dp_peer *peer, qdf_nbuf_t nbuf) in dp_set_nbuf_band() argument
2386 qdf_nbuf_t nbuf, next; in dp_rx_raw_pkt_mld_addr_conv() local
2404 nbuf = nbuf_head; in dp_rx_raw_pkt_mld_addr_conv()
2405 while (nbuf) { in dp_rx_raw_pkt_mld_addr_conv()
2406 next = nbuf->next; in dp_rx_raw_pkt_mld_addr_conv()
2407 wh = (struct ieee80211_frame *)qdf_nbuf_data(nbuf); in dp_rx_raw_pkt_mld_addr_conv()
2412 nbuf = next; in dp_rx_raw_pkt_mld_addr_conv()
2467 #define dp_rx_msdu_stats_update_prot_cnts(vdev_hdl, nbuf, txrx_peer) \ argument
2475 nbuf_local = nbuf; \
2487 #define dp_rx_msdu_stats_update_prot_cnts(vdev_hdl, nbuf, txrx_peer) argument
2512 dp_rx_rates_stats_update(struct dp_soc *soc, qdf_nbuf_t nbuf, in dp_rx_rates_stats_update() argument
2525 dp_rx_data_is_specific(soc->hal_soc, rx_tlv_hdr, nbuf)) { in dp_rx_rates_stats_update()
2558 dp_rx_rates_stats_update(struct dp_soc *soc, qdf_nbuf_t nbuf, in dp_rx_rates_stats_update() argument
2580 void dp_rx_msdu_extd_stats_update(struct dp_soc *soc, qdf_nbuf_t nbuf, in dp_rx_msdu_extd_stats_update() argument
2600 tid = qdf_nbuf_get_tid_val(nbuf); in dp_rx_msdu_extd_stats_update()
2626 ((mcs < MAX_MCS) && QDF_NBUF_CB_RX_CHFRAG_START(nbuf)), in dp_rx_msdu_extd_stats_update()
2629 ((mcs >= MAX_MCS) && QDF_NBUF_CB_RX_CHFRAG_START(nbuf)), in dp_rx_msdu_extd_stats_update()
2658 dp_rx_rates_stats_update(soc, nbuf, rx_tlv_hdr, txrx_peer, in dp_rx_msdu_extd_stats_update()
2663 void dp_rx_msdu_extd_stats_update(struct dp_soc *soc, qdf_nbuf_t nbuf, in dp_rx_msdu_extd_stats_update() argument
2674 qdf_nbuf_t nbuf, uint8_t link_id) in dp_peer_update_rx_pkt_per_lmac() argument
2676 uint8_t lmac_id = qdf_nbuf_get_lmac_id(nbuf); in dp_peer_update_rx_pkt_per_lmac()
2680 lmac_id, QDF_NBUF_CB_RX_VDEV_ID(nbuf)); in dp_peer_update_rx_pkt_per_lmac()
2690 QDF_NBUF_CB_RX_PKT_LEN(nbuf), in dp_peer_update_rx_pkt_per_lmac()
2696 qdf_nbuf_t nbuf, uint8_t link_id) in dp_peer_update_rx_pkt_per_lmac() argument
2701 void dp_rx_msdu_stats_update(struct dp_soc *soc, qdf_nbuf_t nbuf, in dp_rx_msdu_stats_update() argument
2712 uint16_t msdu_len = QDF_NBUF_CB_RX_PKT_LEN(nbuf); in dp_rx_msdu_stats_update()
2714 dp_rx_msdu_stats_update_prot_cnts(vdev, nbuf, txrx_peer); in dp_rx_msdu_stats_update()
2715 is_not_amsdu = qdf_nbuf_is_rx_chfrag_start(nbuf) & in dp_rx_msdu_stats_update()
2716 qdf_nbuf_is_rx_chfrag_end(nbuf); in dp_rx_msdu_stats_update()
2724 qdf_nbuf_is_rx_retry_flag(nbuf), link_id); in dp_rx_msdu_stats_update()
2725 dp_peer_update_rx_pkt_per_lmac(txrx_peer, nbuf, link_id); in dp_rx_msdu_stats_update()
2728 if (qdf_unlikely(qdf_nbuf_is_da_mcbc(nbuf) && in dp_rx_msdu_stats_update()
2730 eh = (qdf_ether_header_t *)qdf_nbuf_data(nbuf); in dp_rx_msdu_stats_update()
2746 dp_rx_msdu_extd_stats_update(soc, nbuf, rx_tlv_hdr, in dp_rx_msdu_stats_update()
2777 qdf_nbuf_t nbuf) in dp_rx_is_udp_allowed_over_roam_peer() argument
2789 QDF_MAC_ADDR_SIZE) && (qdf_nbuf_is_ipv4_udp_pkt(nbuf) || in dp_rx_is_udp_allowed_over_roam_peer()
2790 qdf_nbuf_is_ipv6_udp_pkt(nbuf))) in dp_rx_is_udp_allowed_over_roam_peer()
2798 qdf_nbuf_t nbuf) in dp_rx_is_udp_allowed_over_roam_peer() argument
2813 dp_rx_nbuf_band_set(struct dp_soc *soc, qdf_nbuf_t nbuf) in dp_rx_nbuf_band_set() argument
2821 mac_addr = (struct qdf_mac_addr *)(qdf_nbuf_data(nbuf) + in dp_rx_nbuf_band_set()
2829 link_id = QDF_NBUF_CB_RX_LOGICAL_LINK_ID(nbuf); in dp_rx_nbuf_band_set()
2830 qdf_nbuf_rx_set_band(nbuf, txrx_peer->ll_band[link_id]); in dp_rx_nbuf_band_set()
2837 dp_rx_nbuf_band_set(struct dp_soc *soc, qdf_nbuf_t nbuf) in dp_rx_nbuf_band_set() argument
2842 void dp_rx_deliver_to_stack_no_peer(struct dp_soc *soc, qdf_nbuf_t nbuf) in dp_rx_deliver_to_stack_no_peer() argument
2858 peer_id = QDF_NBUF_CB_RX_PEER_ID(nbuf); in dp_rx_deliver_to_stack_no_peer()
2862 vdev_id = QDF_NBUF_CB_RX_VDEV_ID(nbuf); in dp_rx_deliver_to_stack_no_peer()
2867 if (qdf_unlikely(qdf_nbuf_is_frag(nbuf))) in dp_rx_deliver_to_stack_no_peer()
2870 rx_tlv_hdr = qdf_nbuf_data(nbuf); in dp_rx_deliver_to_stack_no_peer()
2874 msdu_len = QDF_NBUF_CB_RX_PKT_LEN(nbuf); in dp_rx_deliver_to_stack_no_peer()
2876 QDF_NBUF_CB_RX_NUM_ELEMENTS_IN_LIST(nbuf) = 1; in dp_rx_deliver_to_stack_no_peer()
2878 qdf_nbuf_set_pktlen(nbuf, pkt_len); in dp_rx_deliver_to_stack_no_peer()
2879 qdf_nbuf_pull_head(nbuf, soc->rx_pkt_tlv_size + l2_hdr_offset); in dp_rx_deliver_to_stack_no_peer()
2881 is_special_frame = dp_rx_is_special_frame(nbuf, frame_mask); in dp_rx_deliver_to_stack_no_peer()
2885 nbuf)) { in dp_rx_deliver_to_stack_no_peer()
2886 dp_rx_nbuf_band_set(soc, nbuf); in dp_rx_deliver_to_stack_no_peer()
2887 qdf_nbuf_set_exc_frame(nbuf, 1); in dp_rx_deliver_to_stack_no_peer()
2889 vdev->osif_rx(vdev->osif_vdev, nbuf)) in dp_rx_deliver_to_stack_no_peer()
2911 qdf_nbuf_set_exc_frame(nbuf, 1); in dp_rx_deliver_to_stack_no_peer()
2914 dp_rx_enqueue_rx(peer, peer->txrx_peer, nbuf)) { in dp_rx_deliver_to_stack_no_peer()
2934 QDF_NBUF_CB_RX_PKT_LEN(nbuf)); in dp_rx_deliver_to_stack_no_peer()
2935 dp_rx_nbuf_free(nbuf); in dp_rx_deliver_to_stack_no_peer()
2940 void dp_rx_deliver_to_stack_no_peer(struct dp_soc *soc, qdf_nbuf_t nbuf) in dp_rx_deliver_to_stack_no_peer() argument
2943 QDF_NBUF_CB_RX_PKT_LEN(nbuf)); in dp_rx_deliver_to_stack_no_peer()
2944 dp_rx_nbuf_free(nbuf); in dp_rx_deliver_to_stack_no_peer()
2982 void dp_rx_skip_tlvs(struct dp_soc *soc, qdf_nbuf_t nbuf, uint32_t l3_padding) in dp_rx_skip_tlvs() argument
2984 QDF_NBUF_CB_RX_PACKET_L3_HDR_PAD(nbuf) = l3_padding; in dp_rx_skip_tlvs()
2985 qdf_nbuf_pull_head(nbuf, l3_padding + soc->rx_pkt_tlv_size); in dp_rx_skip_tlvs()
2988 void dp_rx_skip_tlvs(struct dp_soc *soc, qdf_nbuf_t nbuf, uint32_t l3_padding) in dp_rx_skip_tlvs() argument
2990 qdf_nbuf_pull_head(nbuf, l3_padding + soc->rx_pkt_tlv_size); in dp_rx_skip_tlvs()
2997 bool dp_rx_is_raw_frame_dropped(qdf_nbuf_t nbuf) in dp_rx_is_raw_frame_dropped() argument
2999 if (qdf_nbuf_is_raw_frame(nbuf)) { in dp_rx_is_raw_frame_dropped()
3000 dp_rx_nbuf_free(nbuf); in dp_rx_is_raw_frame_dropped()
3009 void dp_rx_update_stats(struct dp_soc *soc, qdf_nbuf_t nbuf) in dp_rx_update_stats() argument
3012 QDF_NBUF_CB_RX_PKT_LEN(nbuf)); in dp_rx_update_stats()
3026 void dp_rx_deliver_to_pkt_capture_no_peer(struct dp_soc *soc, qdf_nbuf_t nbuf, in dp_rx_deliver_to_pkt_capture_no_peer() argument
3031 soc, nbuf, HTT_INVALID_VDEV, in dp_rx_deliver_to_pkt_capture_no_peer()
3064 (nbuf_frag_info_t->virt_addr).nbuf = in dp_pdev_nbuf_alloc_and_map()
3070 (nbuf_frag_info_t->virt_addr).nbuf = in dp_pdev_nbuf_alloc_and_map()
3075 if (!((nbuf_frag_info_t->virt_addr).nbuf)) { in dp_pdev_nbuf_alloc_and_map()
3082 (nbuf_frag_info_t->virt_addr).nbuf, in dp_pdev_nbuf_alloc_and_map()
3087 qdf_nbuf_free((nbuf_frag_info_t->virt_addr).nbuf); in dp_pdev_nbuf_alloc_and_map()
3094 qdf_nbuf_get_frag_paddr((nbuf_frag_info_t->virt_addr).nbuf, 0); in dp_pdev_nbuf_alloc_and_map()
3096 ret = dp_check_paddr(dp_soc, &((nbuf_frag_info_t->virt_addr).nbuf), in dp_pdev_nbuf_alloc_and_map()
3122 qdf_nbuf_t nbuf; in dp_pdev_rx_buffers_attach() local
3233 nbuf = nf_info[buffer_index].virt_addr.nbuf; in dp_pdev_rx_buffers_attach()
3253 dp_soc, nbuf, in dp_pdev_rx_buffers_attach()
3259 QDF_NBUF_CB_PADDR(nbuf)), in dp_pdev_rx_buffers_attach()
3260 QDF_NBUF_CB_PADDR(nbuf), in dp_pdev_rx_buffers_attach()
3475 qdf_nbuf_t nbuf, uint32_t frame_mask, in dp_rx_deliver_special_frame() argument
3485 if (qdf_unlikely(qdf_nbuf_is_frag(nbuf))) { in dp_rx_deliver_special_frame()
3488 msdu_len = QDF_NBUF_CB_RX_PKT_LEN(nbuf); in dp_rx_deliver_special_frame()
3490 qdf_nbuf_set_pktlen(nbuf, msdu_len + skip_len); in dp_rx_deliver_special_frame()
3493 QDF_NBUF_CB_RX_NUM_ELEMENTS_IN_LIST(nbuf) = 1; in dp_rx_deliver_special_frame()
3494 dp_rx_set_hdr_pad(nbuf, l2_hdr_offset); in dp_rx_deliver_special_frame()
3495 qdf_nbuf_pull_head(nbuf, skip_len); in dp_rx_deliver_special_frame()
3498 dp_rx_send_pktlog(soc, txrx_peer->vdev->pdev, nbuf, in dp_rx_deliver_special_frame()
3502 if (dp_rx_is_special_frame(nbuf, frame_mask)) { in dp_rx_deliver_special_frame()
3505 qdf_nbuf_set_exc_frame(nbuf, 1); in dp_rx_deliver_special_frame()
3507 nbuf, NULL); in dp_rx_deliver_special_frame()
3516 bool dp_rx_multipass_process(struct dp_txrx_peer *txrx_peer, qdf_nbuf_t nbuf, in dp_rx_multipass_process() argument
3524 vethhdrp = (struct vlan_ethhdr *)qdf_nbuf_data(nbuf); in dp_rx_multipass_process()
3539 dp_tx_remove_vlan_tag(txrx_peer->vdev, nbuf); in dp_rx_multipass_process()