/linux-4.19.296/crypto/async_tx/ |
D | async_tx.c | 79 struct dma_async_tx_descriptor *tx) in async_tx_channel_switch() argument 87 if (txd_parent(depend_tx) && depend_tx->chan == tx->chan) { in async_tx_channel_switch() 88 txd_chain(depend_tx, tx); in async_tx_channel_switch() 113 txd_chain(intr_tx, tx); in async_tx_channel_switch() 134 tx->tx_submit(tx); in async_tx_channel_switch() 156 async_tx_submit(struct dma_chan *chan, struct dma_async_tx_descriptor *tx, in async_tx_submit() argument 161 tx->callback = submit->cb_fn; in async_tx_submit() 162 tx->callback_param = submit->cb_param; in async_tx_submit() 174 txd_parent(tx)); in async_tx_submit() 186 txd_chain(depend_tx, tx); in async_tx_submit() [all …]
|
D | async_raid6_recov.c | 49 struct dma_async_tx_descriptor *tx; in async_sum_product() local 64 tx = dma->device_prep_dma_pq(chan, pq, unmap->addr, 2, coef, in async_sum_product() 66 if (tx) { in async_sum_product() 67 dma_set_unmap(tx, unmap); in async_sum_product() 68 async_tx_submit(chan, tx, submit); in async_sum_product() 70 return tx; in async_sum_product() 113 struct dma_async_tx_descriptor *tx; in async_mult() local 129 tx = dma->device_prep_dma_pq(chan, dma_dest, unmap->addr, in async_mult() 132 if (tx) { in async_mult() 133 dma_set_unmap(tx, unmap); in async_mult() [all …]
|
D | async_xor.c | 40 struct dma_async_tx_descriptor *tx = NULL; in do_async_xor() local 78 tx = dma->device_prep_dma_xor(chan, dma_dest, src_list, in do_async_xor() 82 if (unlikely(!tx)) in do_async_xor() 86 while (unlikely(!tx)) { in do_async_xor() 88 tx = dma->device_prep_dma_xor(chan, dma_dest, in do_async_xor() 95 dma_set_unmap(tx, unmap); in do_async_xor() 96 async_tx_submit(chan, tx, submit); in do_async_xor() 97 submit->depend_tx = tx; in do_async_xor() 109 return tx; in do_async_xor() 188 struct dma_async_tx_descriptor *tx; in async_xor() local [all …]
|
D | async_pq.c | 57 struct dma_async_tx_descriptor *tx = NULL; in do_async_gen_syndrome() local 94 tx = dma->device_prep_dma_pq(chan, dma_dest, in do_async_gen_syndrome() 99 if (likely(tx)) in do_async_gen_syndrome() 105 dma_set_unmap(tx, unmap); in do_async_gen_syndrome() 106 async_tx_submit(chan, tx, submit); in do_async_gen_syndrome() 107 submit->depend_tx = tx; in do_async_gen_syndrome() 116 return tx; in do_async_gen_syndrome() 199 struct dma_async_tx_descriptor *tx; in async_gen_syndrome() local 244 tx = do_async_gen_syndrome(chan, coefs, j, unmap, dma_flags, submit); in async_gen_syndrome() 246 return tx; in async_gen_syndrome() [all …]
|
D | raid6test.c | 72 struct dma_async_tx_descriptor *tx = NULL; in raid6_dual_recov() local 82 tx = async_gen_syndrome(ptrs, 0, disks, bytes, &submit); in raid6_dual_recov() 102 tx = async_xor(dest, blocks, 0, count, bytes, &submit); in raid6_dual_recov() 104 init_async_submit(&submit, 0, tx, NULL, NULL, addr_conv); in raid6_dual_recov() 105 tx = async_gen_syndrome(ptrs, 0, disks, bytes, &submit); in raid6_dual_recov() 111 tx = async_raid6_datap_recov(disks, bytes, faila, ptrs, &submit); in raid6_dual_recov() 115 tx = async_raid6_2data_recov(disks, bytes, faila, failb, ptrs, &submit); in raid6_dual_recov() 119 init_async_submit(&submit, ASYNC_TX_ACK, tx, callback, &cmp, addr_conv); in raid6_dual_recov() 120 tx = async_syndrome_val(ptrs, 0, disks, bytes, &result, spare, &submit); in raid6_dual_recov() 121 async_tx_issue_pending(tx); in raid6_dual_recov() [all …]
|
D | async_memcpy.c | 52 struct dma_async_tx_descriptor *tx = NULL; in async_memcpy() local 74 tx = device->device_prep_dma_memcpy(chan, unmap->addr[1], in async_memcpy() 79 if (tx) { in async_memcpy() 82 dma_set_unmap(tx, unmap); in async_memcpy() 83 async_tx_submit(chan, tx, submit); in async_memcpy() 104 return tx; in async_memcpy()
|
/linux-4.19.296/drivers/clk/mediatek/ |
D | clk-apmixed.c | 39 struct mtk_ref2usb_tx *tx = to_mtk_ref2usb_tx(hw); in mtk_ref2usb_tx_is_prepared() local 41 return (readl(tx->base_addr) & REF2USB_EN_MASK) == REF2USB_EN_MASK; in mtk_ref2usb_tx_is_prepared() 46 struct mtk_ref2usb_tx *tx = to_mtk_ref2usb_tx(hw); in mtk_ref2usb_tx_prepare() local 49 val = readl(tx->base_addr); in mtk_ref2usb_tx_prepare() 52 writel(val, tx->base_addr); in mtk_ref2usb_tx_prepare() 56 writel(val, tx->base_addr); in mtk_ref2usb_tx_prepare() 59 writel(val, tx->base_addr); in mtk_ref2usb_tx_prepare() 66 struct mtk_ref2usb_tx *tx = to_mtk_ref2usb_tx(hw); in mtk_ref2usb_tx_unprepare() local 69 val = readl(tx->base_addr); in mtk_ref2usb_tx_unprepare() 71 writel(val, tx->base_addr); in mtk_ref2usb_tx_unprepare() [all …]
|
/linux-4.19.296/drivers/iio/gyro/ |
D | adxrs450.c | 77 __be32 tx ____cacheline_aligned; 94 u32 tx; in adxrs450_spi_read_reg_16() local 98 .tx_buf = &st->tx, in adxrs450_spi_read_reg_16() 100 .len = sizeof(st->tx), in adxrs450_spi_read_reg_16() 110 tx = ADXRS450_READ_DATA | (reg_address << 17); in adxrs450_spi_read_reg_16() 112 if (!(hweight32(tx) & 1)) in adxrs450_spi_read_reg_16() 113 tx |= ADXRS450_P; in adxrs450_spi_read_reg_16() 115 st->tx = cpu_to_be32(tx); in adxrs450_spi_read_reg_16() 142 u32 tx; in adxrs450_spi_write_reg_16() local 146 tx = ADXRS450_WRITE_DATA | (reg_address << 17) | (val << 1); in adxrs450_spi_write_reg_16() [all …]
|
/linux-4.19.296/drivers/i2c/busses/ |
D | i2c-dln2.c | 61 } tx; in dln2_i2c_enable() local 63 tx.port = dln2->port; in dln2_i2c_enable() 70 return dln2_transfer_tx(dln2->pdev, cmd, &tx, sizeof(tx)); in dln2_i2c_enable() 84 } __packed *tx = dln2->buf; in dln2_i2c_write() local 87 BUILD_BUG_ON(sizeof(*tx) > DLN2_I2C_BUF_SIZE); in dln2_i2c_write() 89 tx->port = dln2->port; in dln2_i2c_write() 90 tx->addr = addr; in dln2_i2c_write() 91 tx->mem_addr_len = 0; in dln2_i2c_write() 92 tx->mem_addr = 0; in dln2_i2c_write() 93 tx->buf_len = cpu_to_le16(data_len); in dln2_i2c_write() [all …]
|
/linux-4.19.296/drivers/iio/imu/ |
D | adis.c | 38 .tx_buf = adis->tx, in adis_write_reg() 44 .tx_buf = adis->tx + 2, in adis_write_reg() 50 .tx_buf = adis->tx + 4, in adis_write_reg() 56 .tx_buf = adis->tx + 6, in adis_write_reg() 61 .tx_buf = adis->tx + 8, in adis_write_reg() 73 adis->tx[0] = ADIS_WRITE_REG(ADIS_REG_PAGE_ID); in adis_write_reg() 74 adis->tx[1] = page; in adis_write_reg() 80 adis->tx[8] = ADIS_WRITE_REG(reg + 3); in adis_write_reg() 81 adis->tx[9] = (value >> 24) & 0xff; in adis_write_reg() 82 adis->tx[6] = ADIS_WRITE_REG(reg + 2); in adis_write_reg() [all …]
|
D | adis_buffer.c | 30 __be16 *tx, *rx; in adis_update_scan_mode() local 49 tx = rx + scan_count; in adis_update_scan_mode() 60 adis->xfer[j].tx_buf = &tx[j]; in adis_update_scan_mode() 71 *tx++ = cpu_to_be16((chan->address + 2) << 8); in adis_update_scan_mode() 72 *tx++ = cpu_to_be16(chan->address << 8); in adis_update_scan_mode() 89 adis->tx[0] = ADIS_WRITE_REG(ADIS_REG_PAGE_ID); in adis_trigger_handler() 90 adis->tx[1] = 0; in adis_trigger_handler() 91 spi_write(adis->spi, adis->tx, 2); in adis_trigger_handler()
|
D | adis16400_buffer.c | 23 u8 *tx; in adis16400_update_scan_mode() local 47 tx = adis->buffer + burst_length; in adis16400_update_scan_mode() 48 tx[0] = ADIS_READ_REG(ADIS16400_GLOB_CMD); in adis16400_update_scan_mode() 49 tx[1] = 0; in adis16400_update_scan_mode() 51 adis->xfer[0].tx_buf = tx; in adis16400_update_scan_mode()
|
/linux-4.19.296/drivers/media/radio/wl128x/ |
D | fmdrv_tx.c | 234 struct fmtx_data *tx = &fmdev->tx_data; in set_audio_io() local 239 payload = tx->audio_io; in set_audio_io() 252 struct fmtx_data *tx = &fmdev->tx_data; in enable_xmit() local 282 tx->xmit_state = new_xmit_state; in enable_xmit() 291 struct fmtx_data *tx = &fmdev->tx_data; in fm_tx_set_pwr_lvl() local 300 tx->pwr_lvl = new_pwr_lvl; in fm_tx_set_pwr_lvl() 318 tx->pwr_lvl = new_pwr_lvl; in fm_tx_set_pwr_lvl() 329 struct fmtx_data *tx = &fmdev->tx_data; in fm_tx_set_preemph_filter() local 353 tx->preemph = payload; in fm_tx_set_preemph_filter() 381 struct fmtx_data *tx = &fmdev->tx_data; in fm_tx_set_freq() local [all …]
|
/linux-4.19.296/drivers/iio/pressure/ |
D | mpl115_spi.c | 22 u8 tx[4]; member 45 .tx_buf = buf->tx, in mpl115_spi_read() 51 buf->tx[0] = MPL115_SPI_READ(address); in mpl115_spi_read() 52 buf->tx[2] = MPL115_SPI_READ(address + 1); in mpl115_spi_read() 66 .tx_buf = buf->tx, in mpl115_spi_write() 70 buf->tx[0] = MPL115_SPI_WRITE(address); in mpl115_spi_write() 71 buf->tx[1] = value; in mpl115_spi_write()
|
/linux-4.19.296/include/linux/ |
D | async_tx.h | 101 static inline void async_tx_issue_pending(struct dma_async_tx_descriptor *tx) in async_tx_issue_pending() argument 103 if (likely(tx)) { in async_tx_issue_pending() 104 struct dma_chan *chan = tx->chan; in async_tx_issue_pending() 125 static inline void async_tx_issue_pending(struct dma_async_tx_descriptor *tx) in async_tx_issue_pending() argument 160 struct dma_async_tx_descriptor *tx, in init_async_submit() argument 165 args->depend_tx = tx; in init_async_submit() 171 void async_tx_submit(struct dma_chan *chan, struct dma_async_tx_descriptor *tx, 207 void async_tx_quiesce(struct dma_async_tx_descriptor **tx);
|
D | dmaengine.h | 513 dma_cookie_t (*tx_submit)(struct dma_async_tx_descriptor *tx); 514 int (*desc_free)(struct dma_async_tx_descriptor *tx); 527 static inline void dma_set_unmap(struct dma_async_tx_descriptor *tx, in dma_set_unmap() argument 531 tx->unmap = unmap; in dma_set_unmap() 538 static inline void dma_set_unmap(struct dma_async_tx_descriptor *tx, in dma_set_unmap() argument 552 static inline void dma_descriptor_unmap(struct dma_async_tx_descriptor *tx) in dma_descriptor_unmap() argument 554 if (tx->unmap) { in dma_descriptor_unmap() 555 dmaengine_unmap_put(tx->unmap); in dma_descriptor_unmap() 556 tx->unmap = NULL; in dma_descriptor_unmap() 1196 void dma_async_tx_descriptor_init(struct dma_async_tx_descriptor *tx, [all …]
|
/linux-4.19.296/drivers/misc/mic/scif/ |
D | scif_dma.c | 427 struct dma_async_tx_descriptor *tx = NULL; in scif_sync_dma() local 440 tx = ddev->device_prep_dma_memcpy(chan, 0, 0, 0, flags); in scif_sync_dma() 441 if (!tx) { in scif_sync_dma() 447 cookie = tx->tx_submit(tx); in scif_sync_dma() 493 struct dma_async_tx_descriptor *tx = NULL; in scif_async_dma() local 507 tx = ddev->device_prep_dma_memcpy(chan, 0, 0, 0, flags); in scif_async_dma() 508 if (!tx) { in scif_async_dma() 515 tx->callback = scif_dma_callback; in scif_async_dma() 516 tx->callback_param = &done_wait; in scif_async_dma() 517 cookie = tx->tx_submit(tx); in scif_async_dma() [all …]
|
D | scif_fence.c | 210 struct dma_async_tx_descriptor *tx; in _scif_prog_signal() local 216 tx = ddev->device_prep_dma_memcpy(chan, 0, 0, 0, DMA_PREP_FENCE); in _scif_prog_signal() 217 if (!tx) { in _scif_prog_signal() 223 cookie = tx->tx_submit(tx); in _scif_prog_signal() 236 tx = ddev->device_prep_dma_imm_data(chan, dst, val, 0); in _scif_prog_signal() 250 tx = ddev->device_prep_dma_memcpy(chan, dst, src, sizeof(val), in _scif_prog_signal() 253 if (!tx) { in _scif_prog_signal() 260 tx->callback = scif_prog_signal_cb; in _scif_prog_signal() 261 tx->callback_param = status; in _scif_prog_signal() 263 cookie = tx->tx_submit(tx); in _scif_prog_signal() [all …]
|
/linux-4.19.296/drivers/media/dvb-frontends/cxd2880/ |
D | cxd2880_spi_device.c | 19 struct spi_transfer tx; in cxd2880_spi_device_write() local 27 memset(&tx, 0, sizeof(tx)); in cxd2880_spi_device_write() 28 tx.tx_buf = data; in cxd2880_spi_device_write() 29 tx.len = size; in cxd2880_spi_device_write() 32 spi_message_add_tail(&tx, &msg); in cxd2880_spi_device_write()
|
/linux-4.19.296/drivers/slimbus/ |
D | qcom-ctrl.c | 110 struct slim_ctrl_buf tx; member 155 spin_lock_irqsave(&ctrl->tx.lock, flags); in slim_ack_txn() 156 idx = ctrl->tx.head; in slim_ack_txn() 157 ctrl->tx.head = (ctrl->tx.head + 1) % ctrl->tx.n; in slim_ack_txn() 158 spin_unlock_irqrestore(&ctrl->tx.lock, flags); in slim_ack_txn() 309 spin_lock_irqsave(&ctrl->tx.lock, flags); in slim_alloc_txbuf() 310 if (((ctrl->tx.head + 1) % ctrl->tx.n) == ctrl->tx.tail) { in slim_alloc_txbuf() 311 spin_unlock_irqrestore(&ctrl->tx.lock, flags); in slim_alloc_txbuf() 315 idx = ctrl->tx.tail; in slim_alloc_txbuf() 317 ctrl->tx.tail = (ctrl->tx.tail + 1) % ctrl->tx.n; in slim_alloc_txbuf() [all …]
|
/linux-4.19.296/drivers/misc/echo/ |
D | echo.c | 274 int16_t oslec_update(struct oslec_state *ec, int16_t tx, int16_t rx) in oslec_update() argument 287 ec->tx = tx; in oslec_update() 289 tx >>= 1; in oslec_update() 345 new = (int)tx * (int)tx; in oslec_update() 356 ec->ltxacc += abs(tx) - ec->ltx; in oslec_update() 364 echo_value = fir16(&ec->fir_state, tx); in oslec_update() 371 echo_value = fir16(&ec->fir_state_bg, tx); in oslec_update() 566 int16_t oslec_hpf_tx(struct oslec_state *ec, int16_t tx) in oslec_hpf_tx() argument 572 tmp = tx << 15; in oslec_hpf_tx() 590 tx = tmp1; in oslec_hpf_tx() [all …]
|
D | oslec.h | 83 int16_t oslec_update(struct oslec_state *ec, int16_t tx, int16_t rx); 92 int16_t oslec_hpf_tx(struct oslec_state *ec, int16_t tx);
|
/linux-4.19.296/drivers/ptp/ |
D | ptp_clock.c | 125 static int ptp_clock_adjtime(struct posix_clock *pc, struct timex *tx) in ptp_clock_adjtime() argument 133 if (tx->modes & ADJ_SETOFFSET) { in ptp_clock_adjtime() 138 ts.tv_sec = tx->time.tv_sec; in ptp_clock_adjtime() 139 ts.tv_nsec = tx->time.tv_usec; in ptp_clock_adjtime() 141 if (!(tx->modes & ADJ_NANO)) in ptp_clock_adjtime() 150 } else if (tx->modes & ADJ_FREQUENCY) { in ptp_clock_adjtime() 151 long ppb = scaled_ppm_to_ppb(tx->freq); in ptp_clock_adjtime() 155 err = ops->adjfine(ops, tx->freq); in ptp_clock_adjtime() 158 ptp->dialed_frequency = tx->freq; in ptp_clock_adjtime() 159 } else if (tx->modes == 0) { in ptp_clock_adjtime() [all …]
|
/linux-4.19.296/drivers/rtc/ |
D | rtc-mcp795.c | 68 u8 tx[2]; in mcp795_rtcc_read() local 70 tx[0] = MCP795_READ; in mcp795_rtcc_read() 71 tx[1] = addr; in mcp795_rtcc_read() 72 ret = spi_write_then_read(spi, tx, sizeof(tx), buf, count); in mcp795_rtcc_read() 85 u8 tx[257]; in mcp795_rtcc_write() local 87 tx[0] = MCP795_WRITE; in mcp795_rtcc_write() 88 tx[1] = addr; in mcp795_rtcc_write() 89 memcpy(&tx[2], data, count); in mcp795_rtcc_write() 91 ret = spi_write(spi, tx, 2 + count); in mcp795_rtcc_write()
|
/linux-4.19.296/drivers/clk/tegra/ |
D | clk-bpmp.c | 53 } tx; member 80 memcpy(req + 4, clk->tx.data, clk->tx.size); in tegra_bpmp_clk_transfer() 84 msg.tx.data = &request; in tegra_bpmp_clk_transfer() 85 msg.tx.size = sizeof(request); in tegra_bpmp_clk_transfer() 158 msg.tx.data = &request; in tegra_bpmp_clk_recalc_rate() 159 msg.tx.size = sizeof(request); in tegra_bpmp_clk_recalc_rate() 185 msg.tx.data = &request; in tegra_bpmp_clk_round_rate() 186 msg.tx.size = sizeof(request); in tegra_bpmp_clk_round_rate() 211 msg.tx.data = &request; in tegra_bpmp_clk_set_parent() 212 msg.tx.size = sizeof(request); in tegra_bpmp_clk_set_parent() [all …]
|