/linux-4.19.296/drivers/pci/controller/dwc/ |
D | pci-exynos.c | 76 struct exynos_pcie *ep); 77 int (*get_clk_resources)(struct exynos_pcie *ep); 78 int (*init_clk_resources)(struct exynos_pcie *ep); 79 void (*deinit_clk_resources)(struct exynos_pcie *ep); 83 struct exynos_pcie *ep) in exynos5440_pcie_get_mem_resources() argument 85 struct dw_pcie *pci = ep->pci; in exynos5440_pcie_get_mem_resources() 89 ep->mem_res = devm_kzalloc(dev, sizeof(*ep->mem_res), GFP_KERNEL); in exynos5440_pcie_get_mem_resources() 90 if (!ep->mem_res) in exynos5440_pcie_get_mem_resources() 94 ep->mem_res->elbi_base = devm_ioremap_resource(dev, res); in exynos5440_pcie_get_mem_resources() 95 if (IS_ERR(ep->mem_res->elbi_base)) in exynos5440_pcie_get_mem_resources() [all …]
|
D | pcie-designware-ep.c | 15 void dw_pcie_ep_linkup(struct dw_pcie_ep *ep) in dw_pcie_ep_linkup() argument 17 struct pci_epc *epc = ep->epc; in dw_pcie_ep_linkup() 79 struct dw_pcie_ep *ep = epc_get_drvdata(epc); in dw_pcie_ep_write_header() local 80 struct dw_pcie *pci = to_dw_pcie_from_ep(ep); in dw_pcie_ep_write_header() 101 static int dw_pcie_ep_inbound_atu(struct dw_pcie_ep *ep, enum pci_barno bar, in dw_pcie_ep_inbound_atu() argument 107 struct dw_pcie *pci = to_dw_pcie_from_ep(ep); in dw_pcie_ep_inbound_atu() 109 free_win = find_first_zero_bit(ep->ib_window_map, ep->num_ib_windows); in dw_pcie_ep_inbound_atu() 110 if (free_win >= ep->num_ib_windows) { in dw_pcie_ep_inbound_atu() 122 ep->bar_to_atu[bar] = free_win; in dw_pcie_ep_inbound_atu() 123 set_bit(free_win, ep->ib_window_map); in dw_pcie_ep_inbound_atu() [all …]
|
D | pcie-designware-plat.c | 70 static void dw_plat_pcie_ep_init(struct dw_pcie_ep *ep) in dw_plat_pcie_ep_init() argument 72 struct dw_pcie *pci = to_dw_pcie_from_ep(ep); in dw_plat_pcie_ep_init() 73 struct pci_epc *epc = ep->epc; in dw_plat_pcie_ep_init() 83 static int dw_plat_pcie_ep_raise_irq(struct dw_pcie_ep *ep, u8 func_no, in dw_plat_pcie_ep_raise_irq() argument 87 struct dw_pcie *pci = to_dw_pcie_from_ep(ep); in dw_plat_pcie_ep_raise_irq() 91 return dw_pcie_ep_raise_legacy_irq(ep, func_no); in dw_plat_pcie_ep_raise_irq() 93 return dw_pcie_ep_raise_msi_irq(ep, func_no, interrupt_num); in dw_plat_pcie_ep_raise_irq() 95 return dw_pcie_ep_raise_msix_irq(ep, func_no, interrupt_num); in dw_plat_pcie_ep_raise_irq() 141 struct dw_pcie_ep *ep; in dw_plat_add_pcie_ep() local 146 ep = &pci->ep; in dw_plat_add_pcie_ep() [all …]
|
D | pcie-designware.h | 181 void (*ep_init)(struct dw_pcie_ep *ep); 182 int (*raise_irq)(struct dw_pcie_ep *ep, u8 func_no, 222 struct dw_pcie_ep ep; member 229 container_of((endpoint), struct dw_pcie, ep) 348 void dw_pcie_ep_linkup(struct dw_pcie_ep *ep); 349 int dw_pcie_ep_init(struct dw_pcie_ep *ep); 350 void dw_pcie_ep_exit(struct dw_pcie_ep *ep); 351 int dw_pcie_ep_raise_legacy_irq(struct dw_pcie_ep *ep, u8 func_no); 352 int dw_pcie_ep_raise_msi_irq(struct dw_pcie_ep *ep, u8 func_no, 354 int dw_pcie_ep_raise_msix_irq(struct dw_pcie_ep *ep, u8 func_no, [all …]
|
/linux-4.19.296/drivers/misc/mic/scif/ |
D | scif_epd.c | 21 void scif_cleanup_ep_qp(struct scif_endpt *ep) in scif_cleanup_ep_qp() argument 23 struct scif_qp *qp = ep->qp_info.qp; in scif_cleanup_ep_qp() 27 qp->outbound_q.size, ep->remote_dev); in scif_cleanup_ep_qp() 32 sizeof(struct scif_qp), ep->remote_dev); in scif_cleanup_ep_qp() 36 scif_unmap_single(qp->local_qp, ep->remote_dev, in scif_cleanup_ep_qp() 41 scif_unmap_single(qp->local_buf, ep->remote_dev, in scif_cleanup_ep_qp() 49 struct scif_endpt *ep = endpt; in scif_teardown_ep() local 50 struct scif_qp *qp = ep->qp_info.qp; in scif_teardown_ep() 53 spin_lock(&ep->lock); in scif_teardown_ep() 54 scif_cleanup_ep_qp(ep); in scif_teardown_ep() [all …]
|
D | scif_api.c | 53 struct scif_endpt *ep; in scif_open() local 57 ep = kzalloc(sizeof(*ep), GFP_KERNEL); in scif_open() 58 if (!ep) in scif_open() 61 ep->qp_info.qp = kzalloc(sizeof(*ep->qp_info.qp), GFP_KERNEL); in scif_open() 62 if (!ep->qp_info.qp) in scif_open() 65 err = scif_anon_inode_getfile(ep); in scif_open() 69 spin_lock_init(&ep->lock); in scif_open() 70 mutex_init(&ep->sendlock); in scif_open() 71 mutex_init(&ep->recvlock); in scif_open() 73 scif_rma_ep_init(ep); in scif_open() [all …]
|
D | scif_mmap.c | 37 struct scif_endpt *ep; in scif_recv_munmap() local 39 ep = (struct scif_endpt *)recv_window->ep; in scif_recv_munmap() 45 req.head = &ep->rma_info.reg_list; in scif_recv_munmap() 46 msg->payload[0] = ep->remote_ep; in scif_recv_munmap() 48 mutex_lock(&ep->rma_info.rma_lock); in scif_recv_munmap() 60 atomic_inc(&ep->rma_info.tw_refcount); in scif_recv_munmap() 61 ep->rma_info.async_list_del = 1; in scif_recv_munmap() 63 scif_free_window_offset(ep, window, window->offset); in scif_recv_munmap() 66 mutex_unlock(&ep->rma_info.rma_lock); in scif_recv_munmap() 75 static void __scif_zap_mmaps(struct scif_endpt *ep) in __scif_zap_mmaps() argument [all …]
|
D | scif_fence.c | 29 struct scif_endpt *ep = (struct scif_endpt *)msg->payload[0]; in scif_recv_mark() local 33 err = _scif_fence_mark(ep, &mark); in scif_recv_mark() 38 msg->payload[0] = ep->remote_ep; in scif_recv_mark() 40 scif_nodeqp_send(ep->remote_dev, msg); in scif_recv_mark() 51 struct scif_endpt *ep = (struct scif_endpt *)msg->payload[0]; in scif_recv_mark_resp() local 55 mutex_lock(&ep->rma_info.rma_lock); in scif_recv_mark_resp() 62 mutex_unlock(&ep->rma_info.rma_lock); in scif_recv_mark_resp() 74 struct scif_endpt *ep = (struct scif_endpt *)msg->payload[0]; in scif_recv_wait() local 84 msg->payload[0] = ep->remote_ep; in scif_recv_wait() 86 scif_nodeqp_send(ep->remote_dev, msg); in scif_recv_wait() [all …]
|
D | scif_rma.c | 37 void scif_rma_ep_init(struct scif_endpt *ep) in scif_rma_ep_init() argument 39 struct scif_endpt_rma_info *rma = &ep->rma_info; in scif_rma_ep_init() 65 int scif_rma_ep_can_uninit(struct scif_endpt *ep) in scif_rma_ep_can_uninit() argument 69 mutex_lock(&ep->rma_info.rma_lock); in scif_rma_ep_can_uninit() 71 if (list_empty(&ep->rma_info.reg_list) && in scif_rma_ep_can_uninit() 72 list_empty(&ep->rma_info.remote_reg_list) && in scif_rma_ep_can_uninit() 73 list_empty(&ep->rma_info.mmn_list) && in scif_rma_ep_can_uninit() 74 !atomic_read(&ep->rma_info.tw_refcount) && in scif_rma_ep_can_uninit() 75 !atomic_read(&ep->rma_info.tcw_refcount) && in scif_rma_ep_can_uninit() 76 !atomic_read(&ep->rma_info.fence_refcount)) in scif_rma_ep_can_uninit() [all …]
|
D | scif_nm.c | 29 struct scif_endpt *ep; in scif_invalidate_ep() local 35 ep = list_entry(pos, struct scif_endpt, list); in scif_invalidate_ep() 36 if (ep->remote_dev->node == node) { in scif_invalidate_ep() 37 scif_unmap_all_windows(ep); in scif_invalidate_ep() 38 spin_lock(&ep->lock); in scif_invalidate_ep() 39 scif_cleanup_ep_qp(ep); in scif_invalidate_ep() 40 spin_unlock(&ep->lock); in scif_invalidate_ep() 44 ep = list_entry(pos, struct scif_endpt, list); in scif_invalidate_ep() 45 if (ep->remote_dev->node == node) { in scif_invalidate_ep() 47 spin_lock(&ep->lock); in scif_invalidate_ep() [all …]
|
D | scif_dma.c | 85 int scif_reserve_dma_chan(struct scif_endpt *ep) in scif_reserve_dma_chan() argument 93 if (!scif_info.nodeid && scifdev_self(ep->remote_dev)) in scif_reserve_dma_chan() 98 scifdev = ep->remote_dev; in scif_reserve_dma_chan() 104 mutex_lock(&ep->rma_info.rma_lock); in scif_reserve_dma_chan() 105 ep->rma_info.dma_chan = chan; in scif_reserve_dma_chan() 106 mutex_unlock(&ep->rma_info.rma_lock); in scif_reserve_dma_chan() 144 struct scif_endpt *ep = mmn->ep; in scif_rma_destroy_tcw() local 146 spin_lock(&ep->rma_info.tc_lock); in scif_rma_destroy_tcw() 148 spin_unlock(&ep->rma_info.tc_lock); in scif_rma_destroy_tcw() 151 static void scif_rma_destroy_tcw_ep(struct scif_endpt *ep) in scif_rma_destroy_tcw_ep() argument [all …]
|
D | scif_rma_list.c | 85 int scif_query_tcw(struct scif_endpt *ep, struct scif_rma_req *req) in scif_query_tcw() argument 206 struct scif_endpt *ep = (struct scif_endpt *)window->ep; in scif_rma_list_unregister() local 207 struct list_head *head = &ep->rma_info.reg_list; in scif_rma_list_unregister() 238 struct scif_endpt *ep = (struct scif_endpt *)epd; in scif_unmap_all_windows() local 239 struct list_head *head = &ep->rma_info.reg_list; in scif_unmap_all_windows() 241 mutex_lock(&ep->rma_info.rma_lock); in scif_unmap_all_windows() 244 scif_unmap_window(ep->remote_dev, window); in scif_unmap_all_windows() 246 mutex_unlock(&ep->rma_info.rma_lock); in scif_unmap_all_windows() 260 struct scif_endpt *ep = (struct scif_endpt *)epd; in scif_unregister_all_windows() local 261 struct list_head *head = &ep->rma_info.reg_list; in scif_unregister_all_windows() [all …]
|
D | scif_epd.h | 139 static inline int scifdev_alive(struct scif_endpt *ep) in scifdev_alive() argument 141 return _scifdev_alive(ep->remote_dev); in scifdev_alive() 151 static inline int scif_verify_epd(struct scif_endpt *ep) in scif_verify_epd() argument 153 if (ep->state == SCIFEP_DISCONNECTED) in scif_verify_epd() 156 if (ep->state != SCIFEP_CONNECTED) in scif_verify_epd() 159 if (!scifdev_alive(ep)) in scif_verify_epd() 183 void scif_cleanup_ep_qp(struct scif_endpt *ep); 184 void scif_add_epd_to_zombie_list(struct scif_endpt *ep, bool eplock_held); 207 struct scif_endpt *ep);
|
D | scif_debugfs.c | 109 struct scif_endpt *ep; in scif_rma_test() local 114 ep = list_entry(pos, struct scif_endpt, list); in scif_rma_test() 115 seq_printf(s, "ep %p self windows\n", ep); in scif_rma_test() 116 mutex_lock(&ep->rma_info.rma_lock); in scif_rma_test() 117 scif_display_all_windows(&ep->rma_info.reg_list, s); in scif_rma_test() 118 seq_printf(s, "ep %p remote windows\n", ep); in scif_rma_test() 119 scif_display_all_windows(&ep->rma_info.remote_reg_list, s); in scif_rma_test() 120 mutex_unlock(&ep->rma_info.rma_lock); in scif_rma_test()
|
D | scif_rma.h | 205 struct scif_endpt *ep; member 251 u64 ep; member 294 struct scif_endpt *ep; member 305 void scif_rma_ep_init(struct scif_endpt *ep); 307 int scif_rma_ep_can_uninit(struct scif_endpt *ep); 309 int scif_get_window_offset(struct scif_endpt *ep, int flags, 312 void scif_free_window_offset(struct scif_endpt *ep, 315 struct scif_window *scif_create_window(struct scif_endpt *ep, int nr_pages, 318 int scif_destroy_window(struct scif_endpt *ep, struct scif_window *window); 335 int scif_reserve_dma_chan(struct scif_endpt *ep);
|
/linux-4.19.296/fs/ |
D | eventpoll.c | 169 struct eventpoll *ep; member 380 static inline int ep_events_available(struct eventpoll *ep) in ep_events_available() argument 382 return !list_empty(&ep->rdllist) || ep->ovflist != EP_UNACTIVE_PTR; in ep_events_available() 388 struct eventpoll *ep = p; in ep_busy_loop_end() local 390 return ep_events_available(ep) || busy_loop_timeout(start_time); in ep_busy_loop_end() 399 static void ep_busy_loop(struct eventpoll *ep, int nonblock) in ep_busy_loop() argument 401 unsigned int napi_id = READ_ONCE(ep->napi_id); in ep_busy_loop() 404 napi_busy_loop(napi_id, nonblock ? NULL : ep_busy_loop_end, ep); in ep_busy_loop() 407 static inline void ep_reset_busy_poll_napi_id(struct eventpoll *ep) in ep_reset_busy_poll_napi_id() argument 409 if (ep->napi_id) in ep_reset_busy_poll_napi_id() [all …]
|
/linux-4.19.296/drivers/pci/controller/ |
D | pcie-cadence-ep.c | 52 struct cdns_pcie_ep *ep = epc_get_drvdata(epc); in cdns_pcie_ep_write_header() local 53 struct cdns_pcie *pcie = &ep->pcie; in cdns_pcie_ep_write_header() 83 struct cdns_pcie_ep *ep = epc_get_drvdata(epc); in cdns_pcie_ep_set_bar() local 84 struct cdns_pcie *pcie = &ep->pcie; in cdns_pcie_ep_set_bar() 150 struct cdns_pcie_ep *ep = epc_get_drvdata(epc); in cdns_pcie_ep_clear_bar() local 151 struct cdns_pcie *pcie = &ep->pcie; in cdns_pcie_ep_clear_bar() 177 struct cdns_pcie_ep *ep = epc_get_drvdata(epc); in cdns_pcie_ep_map_addr() local 178 struct cdns_pcie *pcie = &ep->pcie; in cdns_pcie_ep_map_addr() 181 r = find_first_zero_bit(&ep->ob_region_map, BITS_PER_LONG); in cdns_pcie_ep_map_addr() 182 if (r >= ep->max_regions - 1) { in cdns_pcie_ep_map_addr() [all …]
|
D | pcie-rockchip-ep.c | 128 struct rockchip_pcie_ep *ep = epc_get_drvdata(epc); in rockchip_pcie_ep_write_header() local 129 struct rockchip_pcie *rockchip = &ep->rockchip; in rockchip_pcie_ep_write_header() 166 struct rockchip_pcie_ep *ep = epc_get_drvdata(epc); in rockchip_pcie_ep_set_bar() local 167 struct rockchip_pcie *rockchip = &ep->rockchip; in rockchip_pcie_ep_set_bar() 234 struct rockchip_pcie_ep *ep = epc_get_drvdata(epc); in rockchip_pcie_ep_clear_bar() local 235 struct rockchip_pcie *rockchip = &ep->rockchip; in rockchip_pcie_ep_clear_bar() 264 struct rockchip_pcie_ep *ep = epc_get_drvdata(epc); in rockchip_pcie_ep_map_addr() local 265 struct rockchip_pcie *pcie = &ep->rockchip; in rockchip_pcie_ep_map_addr() 268 r = find_first_zero_bit(&ep->ob_region_map, BITS_PER_LONG); in rockchip_pcie_ep_map_addr() 273 if (r >= ep->max_regions - 1) { in rockchip_pcie_ep_map_addr() [all …]
|
/linux-4.19.296/include/linux/usb/ |
D | gadget.h | 110 void (*complete)(struct usb_ep *ep, 129 int (*enable) (struct usb_ep *ep, 131 int (*disable) (struct usb_ep *ep); 132 void (*dispose) (struct usb_ep *ep); 134 struct usb_request *(*alloc_request) (struct usb_ep *ep, 136 void (*free_request) (struct usb_ep *ep, struct usb_request *req); 138 int (*queue) (struct usb_ep *ep, struct usb_request *req, 140 int (*dequeue) (struct usb_ep *ep, struct usb_request *req); 142 int (*set_halt) (struct usb_ep *ep, int value); 143 int (*set_wedge) (struct usb_ep *ep); [all …]
|
D | musb.h | 44 #define MUSB_EP_FIFO(ep, st, m, pkt) \ argument 46 .hw_ep_num = ep, \ 52 #define MUSB_EP_FIFO_SINGLE(ep, st, pkt) \ argument 53 MUSB_EP_FIFO(ep, st, BUF_SINGLE, pkt) 55 #define MUSB_EP_FIFO_DOUBLE(ep, st, pkt) \ argument 56 MUSB_EP_FIFO(ep, st, BUF_DOUBLE, pkt)
|
/linux-4.19.296/drivers/char/xillybus/ |
D | xillybus_core.c | 128 struct xilly_endpoint *ep = data; in xillybus_isr() local 136 buf = ep->msgbuf_addr; in xillybus_isr() 137 buf_size = ep->msg_buf_size/sizeof(u32); in xillybus_isr() 139 ep->ephw->hw_sync_sgl_for_cpu(ep, in xillybus_isr() 140 ep->msgbuf_dma_addr, in xillybus_isr() 141 ep->msg_buf_size, in xillybus_isr() 145 if (((buf[i+1] >> 28) & 0xf) != ep->msg_counter) { in xillybus_isr() 146 malformed_message(ep, &buf[i]); in xillybus_isr() 147 dev_warn(ep->dev, in xillybus_isr() 150 ep->msg_counter, in xillybus_isr() [all …]
|
D | xillybus_of.c | 38 static void xilly_dma_sync_single_for_cpu_of(struct xilly_endpoint *ep, in xilly_dma_sync_single_for_cpu_of() argument 43 dma_sync_single_for_cpu(ep->dev, dma_handle, size, direction); in xilly_dma_sync_single_for_cpu_of() 46 static void xilly_dma_sync_single_for_device_of(struct xilly_endpoint *ep, in xilly_dma_sync_single_for_device_of() argument 51 dma_sync_single_for_device(ep->dev, dma_handle, size, direction); in xilly_dma_sync_single_for_device_of() 54 static void xilly_dma_sync_single_nop(struct xilly_endpoint *ep, in xilly_dma_sync_single_nop() argument 71 static int xilly_map_single_of(struct xilly_endpoint *ep, in xilly_map_single_of() argument 85 addr = dma_map_single(ep->dev, ptr, size, direction); in xilly_map_single_of() 87 if (dma_mapping_error(ep->dev, addr)) { in xilly_map_single_of() 92 this->device = ep->dev; in xilly_map_single_of() 99 return devm_add_action_or_reset(ep->dev, xilly_of_unmap, this); in xilly_map_single_of()
|
D | xillybus_pcie.c | 52 static void xilly_dma_sync_single_for_cpu_pci(struct xilly_endpoint *ep, in xilly_dma_sync_single_for_cpu_pci() argument 57 pci_dma_sync_single_for_cpu(ep->pdev, in xilly_dma_sync_single_for_cpu_pci() 63 static void xilly_dma_sync_single_for_device_pci(struct xilly_endpoint *ep, in xilly_dma_sync_single_for_device_pci() argument 68 pci_dma_sync_single_for_device(ep->pdev, in xilly_dma_sync_single_for_device_pci() 90 static int xilly_map_single_pci(struct xilly_endpoint *ep, in xilly_map_single_pci() argument 107 addr = pci_map_single(ep->pdev, ptr, size, pci_direction); in xilly_map_single_pci() 109 if (pci_dma_mapping_error(ep->pdev, addr)) { in xilly_map_single_pci() 114 this->device = ep->pdev; in xilly_map_single_pci() 121 return devm_add_action_or_reset(ep->dev, xilly_pci_unmap, this); in xilly_map_single_pci()
|
/linux-4.19.296/include/net/sctp/ |
D | auth.h | 92 int sctp_auth_asoc_copy_shkeys(const struct sctp_endpoint *ep, 95 int sctp_auth_init_hmacs(struct sctp_endpoint *ep, gfp_t gfp); 114 int sctp_auth_ep_add_chunkid(struct sctp_endpoint *ep, __u8 chunk_id); 115 int sctp_auth_ep_set_hmacs(struct sctp_endpoint *ep, 117 int sctp_auth_set_key(struct sctp_endpoint *ep, struct sctp_association *asoc, 119 int sctp_auth_set_active_key(struct sctp_endpoint *ep, 121 int sctp_auth_del_key_id(struct sctp_endpoint *ep, 123 int sctp_auth_deact_key_id(struct sctp_endpoint *ep,
|
/linux-4.19.296/drivers/base/ |
D | devcon.c | 28 int ep, void *data)) in device_connection_find_match() argument 33 int ep; in device_connection_find_match() local 41 ep = match_string(con->endpoint, 2, devname); in device_connection_find_match() 42 if (ep < 0) in device_connection_find_match() 48 ret = match(con, !ep, data); in device_connection_find_match() 79 static void *generic_match(struct device_connection *con, int ep, void *data) in generic_match() argument 85 dev = bus_find_device_by_name(bus, NULL, con->endpoint[ep]); in generic_match()
|