Home
last modified time | relevance | path

Searched refs:queue (Results 1 – 25 of 199) sorted by relevance

12345678

/linux-4.19.296/drivers/iio/buffer/
Dindustrialio-buffer-dma.c102 dma_free_coherent(block->queue->dev, PAGE_ALIGN(block->size), in iio_buffer_block_release()
105 iio_buffer_put(&block->queue->buffer); in iio_buffer_block_release()
168 struct iio_dma_buffer_queue *queue, size_t size) in iio_dma_buffer_alloc_block() argument
176 block->vaddr = dma_alloc_coherent(queue->dev, PAGE_ALIGN(size), in iio_dma_buffer_alloc_block()
185 block->queue = queue; in iio_dma_buffer_alloc_block()
189 iio_buffer_get(&queue->buffer); in iio_dma_buffer_alloc_block()
196 struct iio_dma_buffer_queue *queue = block->queue; in _iio_dma_buffer_block_done() local
204 list_add_tail(&block->head, &queue->outgoing); in _iio_dma_buffer_block_done()
217 struct iio_dma_buffer_queue *queue = block->queue; in iio_dma_buffer_block_done() local
220 spin_lock_irqsave(&queue->list_lock, flags); in iio_dma_buffer_block_done()
[all …]
Dindustrialio-buffer-dmaengine.c32 struct iio_dma_buffer_queue queue; member
44 return container_of(buffer, struct dmaengine_buffer, queue.buffer); in iio_buffer_to_dmaengine_buffer()
52 spin_lock_irqsave(&block->queue->list_lock, flags); in iio_dmaengine_buffer_block_done()
54 spin_unlock_irqrestore(&block->queue->list_lock, flags); in iio_dmaengine_buffer_block_done()
58 static int iio_dmaengine_buffer_submit_block(struct iio_dma_buffer_queue *queue, in iio_dmaengine_buffer_submit_block() argument
62 iio_buffer_to_dmaengine_buffer(&queue->buffer); in iio_dmaengine_buffer_submit_block()
83 spin_lock_irq(&dmaengine_buffer->queue.list_lock); in iio_dmaengine_buffer_submit_block()
85 spin_unlock_irq(&dmaengine_buffer->queue.list_lock); in iio_dmaengine_buffer_submit_block()
92 static void iio_dmaengine_buffer_abort(struct iio_dma_buffer_queue *queue) in iio_dmaengine_buffer_abort() argument
95 iio_buffer_to_dmaengine_buffer(&queue->buffer); in iio_dmaengine_buffer_abort()
[all …]
/linux-4.19.296/drivers/misc/genwqe/
Dcard_ddcb.c91 static int queue_empty(struct ddcb_queue *queue) in queue_empty() argument
93 return queue->ddcb_next == queue->ddcb_act; in queue_empty()
96 static int queue_enqueued_ddcbs(struct ddcb_queue *queue) in queue_enqueued_ddcbs() argument
98 if (queue->ddcb_next >= queue->ddcb_act) in queue_enqueued_ddcbs()
99 return queue->ddcb_next - queue->ddcb_act; in queue_enqueued_ddcbs()
101 return queue->ddcb_max - (queue->ddcb_act - queue->ddcb_next); in queue_enqueued_ddcbs()
104 static int queue_free_ddcbs(struct ddcb_queue *queue) in queue_free_ddcbs() argument
106 int free_ddcbs = queue->ddcb_max - queue_enqueued_ddcbs(queue) - 1; in queue_free_ddcbs()
172 static void print_ddcb_info(struct genwqe_dev *cd, struct ddcb_queue *queue) in print_ddcb_info() argument
183 cd->card_idx, queue->ddcb_act, queue->ddcb_next); in print_ddcb_info()
[all …]
Dcard_debugfs.c236 struct ddcb_queue *queue; in genwqe_ddcb_info_show() local
239 queue = &cd->queue; in genwqe_ddcb_info_show()
250 queue->ddcb_max, (long long)queue->ddcb_daddr, in genwqe_ddcb_info_show()
251 (long long)queue->ddcb_daddr + in genwqe_ddcb_info_show()
252 (queue->ddcb_max * DDCB_LENGTH), in genwqe_ddcb_info_show()
253 (long long)queue->ddcb_vaddr, queue->ddcbs_in_flight, in genwqe_ddcb_info_show()
254 queue->ddcbs_max_in_flight, queue->ddcbs_completed, in genwqe_ddcb_info_show()
255 queue->return_on_busy, queue->wait_on_busy, in genwqe_ddcb_info_show()
268 queue->IO_QUEUE_CONFIG, in genwqe_ddcb_info_show()
269 __genwqe_readq(cd, queue->IO_QUEUE_CONFIG), in genwqe_ddcb_info_show()
[all …]
/linux-4.19.296/include/drm/
Dspsc_queue.h48 static inline void spsc_queue_init(struct spsc_queue *queue) in spsc_queue_init() argument
50 queue->head = NULL; in spsc_queue_init()
51 atomic_long_set(&queue->tail, (long)&queue->head); in spsc_queue_init()
52 atomic_set(&queue->job_count, 0); in spsc_queue_init()
55 static inline struct spsc_node *spsc_queue_peek(struct spsc_queue *queue) in spsc_queue_peek() argument
57 return queue->head; in spsc_queue_peek()
60 static inline int spsc_queue_count(struct spsc_queue *queue) in spsc_queue_count() argument
62 return atomic_read(&queue->job_count); in spsc_queue_count()
65 static inline bool spsc_queue_push(struct spsc_queue *queue, struct spsc_node *node) in spsc_queue_push() argument
73 tail = (struct spsc_node **)atomic_long_xchg(&queue->tail, (long)&node->next); in spsc_queue_push()
[all …]
/linux-4.19.296/include/net/
Drequest_sock.h179 void reqsk_queue_alloc(struct request_sock_queue *queue);
184 static inline bool reqsk_queue_empty(const struct request_sock_queue *queue) in reqsk_queue_empty() argument
186 return READ_ONCE(queue->rskq_accept_head) == NULL; in reqsk_queue_empty()
189 static inline struct request_sock *reqsk_queue_remove(struct request_sock_queue *queue, in reqsk_queue_remove() argument
194 spin_lock_bh(&queue->rskq_lock); in reqsk_queue_remove()
195 req = queue->rskq_accept_head; in reqsk_queue_remove()
198 WRITE_ONCE(queue->rskq_accept_head, req->dl_next); in reqsk_queue_remove()
199 if (queue->rskq_accept_head == NULL) in reqsk_queue_remove()
200 queue->rskq_accept_tail = NULL; in reqsk_queue_remove()
202 spin_unlock_bh(&queue->rskq_lock); in reqsk_queue_remove()
[all …]
/linux-4.19.296/drivers/s390/crypto/
Dzcrypt_error.h85 int card = AP_QID_CARD(zq->queue->qid); in convert_error()
86 int queue = AP_QID_QUEUE(zq->queue->qid); in convert_error() local
103 card, queue, ehdr->reply_code); in convert_error()
115 card, queue); in convert_error()
118 card, queue, ehdr->reply_code); in convert_error()
127 card, queue); in convert_error()
130 card, queue, ehdr->reply_code); in convert_error()
135 card, queue); in convert_error()
138 card, queue, ehdr->reply_code); in convert_error()
Dzcrypt_queue.c66 AP_QID_CARD(zq->queue->qid), in online_store()
67 AP_QID_QUEUE(zq->queue->qid), in online_store()
71 ap_flush_queue(zq->queue); in online_store()
102 ap_flush_queue(zq->queue); in zcrypt_queue_force_online()
164 zc = zq->queue->card->private; in zcrypt_queue_register()
170 AP_QID_CARD(zq->queue->qid), AP_QID_QUEUE(zq->queue->qid)); in zcrypt_queue_register()
176 rc = sysfs_create_group(&zq->queue->ap_dev.device.kobj, in zcrypt_queue_register()
180 get_device(&zq->queue->ap_dev.device); in zcrypt_queue_register()
190 sysfs_remove_group(&zq->queue->ap_dev.device.kobj, in zcrypt_queue_register()
192 put_device(&zq->queue->ap_dev.device); in zcrypt_queue_register()
[all …]
Dzcrypt_msgtype6.c300 msg->cprbx.domain = AP_QID_QUEUE(zq->queue->qid); in ICAMEX_msg_to_type6MEX_msgX()
370 msg->cprbx.domain = AP_QID_QUEUE(zq->queue->qid); in ICACRT_msg_to_type6CRT_msgX()
656 AP_QID_CARD(zq->queue->qid), in convert_type86_ica()
657 AP_QID_QUEUE(zq->queue->qid), in convert_type86_ica()
666 AP_QID_CARD(zq->queue->qid), in convert_type86_ica()
667 AP_QID_QUEUE(zq->queue->qid), in convert_type86_ica()
673 AP_QID_CARD(zq->queue->qid), in convert_type86_ica()
674 AP_QID_QUEUE(zq->queue->qid)); in convert_type86_ica()
677 AP_QID_CARD(zq->queue->qid), in convert_type86_ica()
678 AP_QID_QUEUE(zq->queue->qid), in convert_type86_ica()
[all …]
Dzcrypt_api.c159 if (!zq || !try_module_get(zq->queue->ap_dev.drv->driver.owner)) in zcrypt_pick_queue()
162 get_device(&zq->queue->ap_dev.device); in zcrypt_pick_queue()
173 struct module *mod = zq->queue->ap_dev.drv->driver.owner; in zcrypt_drop_queue()
178 put_device(&zq->queue->ap_dev.device); in zcrypt_drop_queue()
208 return zq->queue->total_request_count > in zcrypt_queue_compare()
209 pref_zq->queue->total_request_count; in zcrypt_queue_compare()
278 qid = pref_zq->queue->qid; in zcrypt_rsa_modexpo()
353 qid = pref_zq->queue->qid; in zcrypt_rsa_crt()
403 (*domain != AP_QID_QUEUE(zq->queue->qid)))) in zcrypt_send_cprb()
422 qid = pref_zq->queue->qid; in zcrypt_send_cprb()
[all …]
/linux-4.19.296/drivers/isdn/i4l/
Disdn_net.h85 lp = nd->queue; /* get lp on top of queue */ in isdn_net_get_locked_lp()
86 while (isdn_net_lp_busy(nd->queue)) { in isdn_net_get_locked_lp()
87 nd->queue = nd->queue->next; in isdn_net_get_locked_lp()
88 if (nd->queue == lp) { /* not found -- should never happen */ in isdn_net_get_locked_lp()
93 lp = nd->queue; in isdn_net_get_locked_lp()
94 nd->queue = nd->queue->next; in isdn_net_get_locked_lp()
114 lp = nd->queue; in isdn_net_add_to_bundle()
121 nd->queue = nlp; in isdn_net_add_to_bundle()
141 if (master_lp->netdev->queue == lp) { in isdn_net_rm_from_bundle()
142 master_lp->netdev->queue = lp->next; in isdn_net_rm_from_bundle()
[all …]
/linux-4.19.296/block/
Dblk-mq-rdma.c36 unsigned int queue, cpu; in blk_mq_rdma_map_queues() local
38 for (queue = 0; queue < set->nr_hw_queues; queue++) { in blk_mq_rdma_map_queues()
39 mask = ib_get_vector_affinity(dev, first_vec + queue); in blk_mq_rdma_map_queues()
44 set->mq_map[cpu] = queue; in blk_mq_rdma_map_queues()
Dblk-mq-pci.c38 unsigned int queue, cpu; in blk_mq_pci_map_queues() local
40 for (queue = 0; queue < set->nr_hw_queues; queue++) { in blk_mq_pci_map_queues()
41 mask = pci_irq_get_affinity(pdev, queue + offset); in blk_mq_pci_map_queues()
46 set->mq_map[cpu] = queue; in blk_mq_pci_map_queues()
Dblk-mq-virtio.c36 unsigned int queue, cpu; in blk_mq_virtio_map_queues() local
41 for (queue = 0; queue < set->nr_hw_queues; queue++) { in blk_mq_virtio_map_queues()
42 mask = vdev->config->get_vq_affinity(vdev, first_vec + queue); in blk_mq_virtio_map_queues()
47 set->mq_map[cpu] = queue; in blk_mq_virtio_map_queues()
Dnoop-iosched.c12 struct list_head queue; member
26 rq = list_first_entry_or_null(&nd->queue, struct request, queuelist); in noop_dispatch()
39 list_add_tail(&rq->queuelist, &nd->queue); in noop_add_request()
47 if (rq->queuelist.prev == &nd->queue) in noop_former_request()
57 if (rq->queuelist.next == &nd->queue) in noop_latter_request()
78 INIT_LIST_HEAD(&nd->queue); in noop_init_queue()
90 BUG_ON(!list_empty(&nd->queue)); in noop_exit_queue()
/linux-4.19.296/include/linux/iio/
Dbuffer-dma.h65 struct iio_dma_buffer_queue *queue; member
127 int (*submit)(struct iio_dma_buffer_queue *queue,
129 void (*abort)(struct iio_dma_buffer_queue *queue);
133 void iio_dma_buffer_block_list_abort(struct iio_dma_buffer_queue *queue,
147 int iio_dma_buffer_init(struct iio_dma_buffer_queue *queue,
149 void iio_dma_buffer_exit(struct iio_dma_buffer_queue *queue);
150 void iio_dma_buffer_release(struct iio_dma_buffer_queue *queue);
/linux-4.19.296/include/linux/
Dptr_ring.h44 void **queue; member
55 return r->queue[r->producer]; in __ptr_ring_full()
110 if (unlikely(!r->size) || r->queue[r->producer]) in __ptr_ring_produce()
117 WRITE_ONCE(r->queue[r->producer++], ptr); in __ptr_ring_produce()
176 return READ_ONCE(r->queue[r->consumer_head]); in __ptr_ring_peek()
201 return !r->queue[READ_ONCE(r->consumer_head)]; in __ptr_ring_empty()
284 r->queue[head--] = NULL; in __ptr_ring_discard_one()
481 r->batch = SMP_CACHE_BYTES * 2 / sizeof(*(r->queue)); in __ptr_ring_set_size()
493 r->queue = __ptr_ring_init_queue_alloc(size, gfp); in ptr_ring_init()
494 if (!r->queue) in ptr_ring_init()
[all …]
/linux-4.19.296/drivers/media/common/videobuf2/
Dvideobuf2-v4l2.c705 return vdev->queue->owner && vdev->queue->owner != file->private_data; in vb2_queue_is_busy()
714 int res = vb2_verify_memory_type(vdev->queue, p->memory, p->type); in vb2_ioctl_reqbufs()
720 res = vb2_core_reqbufs(vdev->queue, p->memory, &p->count); in vb2_ioctl_reqbufs()
724 vdev->queue->owner = p->count ? file->private_data : NULL; in vb2_ioctl_reqbufs()
733 int res = vb2_verify_memory_type(vdev->queue, p->memory, in vb2_ioctl_create_bufs()
736 p->index = vdev->queue->num_buffers; in vb2_ioctl_create_bufs()
748 res = vb2_create_bufs(vdev->queue, p); in vb2_ioctl_create_bufs()
750 vdev->queue->owner = file->private_data; in vb2_ioctl_create_bufs()
762 return vb2_prepare_buf(vdev->queue, p); in vb2_ioctl_prepare_buf()
771 return vb2_querybuf(vdev->queue, p); in vb2_ioctl_querybuf()
[all …]
/linux-4.19.296/crypto/
Dmcryptd.c45 struct mcryptd_queue *queue; member
68 static int mcryptd_init_queue(struct mcryptd_queue *queue, in mcryptd_init_queue() argument
74 queue->cpu_queue = alloc_percpu(struct mcryptd_cpu_queue); in mcryptd_init_queue()
75 pr_debug("mqueue:%p mcryptd_cpu_queue %p\n", queue, queue->cpu_queue); in mcryptd_init_queue()
76 if (!queue->cpu_queue) in mcryptd_init_queue()
79 cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu); in mcryptd_init_queue()
80 pr_debug("cpu_queue #%d %p\n", cpu, queue->cpu_queue); in mcryptd_init_queue()
81 crypto_init_queue(&cpu_queue->queue, max_cpu_qlen); in mcryptd_init_queue()
88 static void mcryptd_fini_queue(struct mcryptd_queue *queue) in mcryptd_fini_queue() argument
94 cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu); in mcryptd_fini_queue()
[all …]
Dcryptd.c40 struct crypto_queue queue; member
50 struct cryptd_queue *queue; member
55 struct cryptd_queue *queue; member
60 struct cryptd_queue *queue; member
65 struct cryptd_queue *queue; member
107 static int cryptd_init_queue(struct cryptd_queue *queue, in cryptd_init_queue() argument
113 queue->cpu_queue = alloc_percpu(struct cryptd_cpu_queue); in cryptd_init_queue()
114 if (!queue->cpu_queue) in cryptd_init_queue()
117 cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu); in cryptd_init_queue()
118 crypto_init_queue(&cpu_queue->queue, max_cpu_qlen); in cryptd_init_queue()
[all …]
/linux-4.19.296/include/crypto/
Dalgapi.h191 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen);
192 int crypto_enqueue_request(struct crypto_queue *queue,
194 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue);
195 int crypto_tfm_in_queue(struct crypto_queue *queue, struct crypto_tfm *tfm);
196 static inline unsigned int crypto_queue_len(struct crypto_queue *queue) in crypto_queue_len() argument
198 return queue->qlen; in crypto_queue_len()
354 struct crypto_queue *queue) in crypto_get_backlog() argument
356 return queue->backlog == &queue->list ? NULL : in crypto_get_backlog()
357 container_of(queue->backlog, struct crypto_async_request, list); in crypto_get_backlog()
360 static inline int ablkcipher_enqueue_request(struct crypto_queue *queue, in ablkcipher_enqueue_request() argument
[all …]
/linux-4.19.296/include/crypto/internal/
Daead.h131 static inline void aead_init_queue(struct aead_queue *queue, in aead_init_queue() argument
134 crypto_init_queue(&queue->base, max_qlen); in aead_init_queue()
137 static inline int aead_enqueue_request(struct aead_queue *queue, in aead_enqueue_request() argument
140 return crypto_enqueue_request(&queue->base, &request->base); in aead_enqueue_request()
144 struct aead_queue *queue) in aead_dequeue_request() argument
148 req = crypto_dequeue_request(&queue->base); in aead_dequeue_request()
153 static inline struct aead_request *aead_get_backlog(struct aead_queue *queue) in aead_get_backlog() argument
157 req = crypto_get_backlog(&queue->base); in aead_get_backlog()
/linux-4.19.296/include/uapi/sound/
Dasequencer.h252 unsigned char queue; /* affected queue */ member
278 unsigned char queue; /* schedule queue */ member
291 struct snd_seq_ev_queue_control queue; member
397 unsigned char queue; /* Queue for REMOVE_DEST */ member
476 int queue; /* queue id */ member
492 int queue; /* queue id */ member
504 int queue; /* sequencer queue */ member
520 int queue; /* sequencer queue */ member
533 int queue; /* sequencer queue */ member
551 unsigned char queue; /* input time-stamp queue (optional) */ member
[all …]
/linux-4.19.296/virt/kvm/
Dasync_pf.c68 INIT_LIST_HEAD(&vcpu->async_pf.queue); in kvm_async_pf_vcpu_init()
121 while (!list_empty(&vcpu->async_pf.queue)) { in kvm_clear_async_pf_completion_queue()
123 list_first_entry(&vcpu->async_pf.queue, in kvm_clear_async_pf_completion_queue()
124 typeof(*work), queue); in kvm_clear_async_pf_completion_queue()
125 list_del(&work->queue); in kvm_clear_async_pf_completion_queue()
174 list_del(&work->queue); in kvm_check_async_pf_completion()
216 list_add_tail(&work->queue, &vcpu->async_pf.queue); in kvm_setup_async_pf()
239 INIT_LIST_HEAD(&work->queue); /* for list_del to work */ in kvm_async_pf_wakeup_all()
/linux-4.19.296/drivers/misc/vmw_vmci/
Dvmci_queue_pair.c256 struct vmci_queue *queue = q; in qp_free_queue() local
258 if (queue) { in qp_free_queue()
264 queue->kernel_if->u.g.vas[i], in qp_free_queue()
265 queue->kernel_if->u.g.pas[i]); in qp_free_queue()
268 vfree(queue); in qp_free_queue()
280 struct vmci_queue *queue; in qp_alloc_queue() local
283 size_t queue_size = sizeof(*queue) + sizeof(*queue->kernel_if); in qp_alloc_queue()
291 (sizeof(*queue->kernel_if->u.g.pas) + in qp_alloc_queue()
292 sizeof(*queue->kernel_if->u.g.vas))) in qp_alloc_queue()
295 pas_size = num_pages * sizeof(*queue->kernel_if->u.g.pas); in qp_alloc_queue()
[all …]

12345678