Lines Matching refs:m2m_dev

228 void *v4l2_m2m_get_curr_priv(struct v4l2_m2m_dev *m2m_dev)  in v4l2_m2m_get_curr_priv()  argument
233 spin_lock_irqsave(&m2m_dev->job_spinlock, flags); in v4l2_m2m_get_curr_priv()
234 if (m2m_dev->curr_ctx) in v4l2_m2m_get_curr_priv()
235 ret = m2m_dev->curr_ctx->priv; in v4l2_m2m_get_curr_priv()
236 spin_unlock_irqrestore(&m2m_dev->job_spinlock, flags); in v4l2_m2m_get_curr_priv()
248 static void v4l2_m2m_try_run(struct v4l2_m2m_dev *m2m_dev) in v4l2_m2m_try_run() argument
252 spin_lock_irqsave(&m2m_dev->job_spinlock, flags); in v4l2_m2m_try_run()
253 if (NULL != m2m_dev->curr_ctx) { in v4l2_m2m_try_run()
254 spin_unlock_irqrestore(&m2m_dev->job_spinlock, flags); in v4l2_m2m_try_run()
259 if (list_empty(&m2m_dev->job_queue)) { in v4l2_m2m_try_run()
260 spin_unlock_irqrestore(&m2m_dev->job_spinlock, flags); in v4l2_m2m_try_run()
265 m2m_dev->curr_ctx = list_first_entry(&m2m_dev->job_queue, in v4l2_m2m_try_run()
267 m2m_dev->curr_ctx->job_flags |= TRANS_RUNNING; in v4l2_m2m_try_run()
268 spin_unlock_irqrestore(&m2m_dev->job_spinlock, flags); in v4l2_m2m_try_run()
270 dprintk("Running job on m2m_ctx: %p\n", m2m_dev->curr_ctx); in v4l2_m2m_try_run()
271 m2m_dev->m2m_ops->device_run(m2m_dev->curr_ctx->priv); in v4l2_m2m_try_run()
283 static void __v4l2_m2m_try_queue(struct v4l2_m2m_dev *m2m_dev, in __v4l2_m2m_try_queue() argument
296 spin_lock_irqsave(&m2m_dev->job_spinlock, flags_job); in __v4l2_m2m_try_queue()
300 spin_unlock_irqrestore(&m2m_dev->job_spinlock, flags_job); in __v4l2_m2m_try_queue()
306 spin_unlock_irqrestore(&m2m_dev->job_spinlock, flags_job); in __v4l2_m2m_try_queue()
316 spin_unlock_irqrestore(&m2m_dev->job_spinlock, flags_job); in __v4l2_m2m_try_queue()
327 spin_unlock_irqrestore(&m2m_dev->job_spinlock, flags_job); in __v4l2_m2m_try_queue()
334 if (m2m_dev->m2m_ops->job_ready in __v4l2_m2m_try_queue()
335 && (!m2m_dev->m2m_ops->job_ready(m2m_ctx->priv))) { in __v4l2_m2m_try_queue()
336 spin_unlock_irqrestore(&m2m_dev->job_spinlock, flags_job); in __v4l2_m2m_try_queue()
341 list_add_tail(&m2m_ctx->queue, &m2m_dev->job_queue); in __v4l2_m2m_try_queue()
344 spin_unlock_irqrestore(&m2m_dev->job_spinlock, flags_job); in __v4l2_m2m_try_queue()
361 struct v4l2_m2m_dev *m2m_dev = m2m_ctx->m2m_dev; in v4l2_m2m_try_schedule() local
363 __v4l2_m2m_try_queue(m2m_dev, m2m_ctx); in v4l2_m2m_try_schedule()
364 v4l2_m2m_try_run(m2m_dev); in v4l2_m2m_try_schedule()
379 struct v4l2_m2m_dev *m2m_dev; in v4l2_m2m_cancel_job() local
382 m2m_dev = m2m_ctx->m2m_dev; in v4l2_m2m_cancel_job()
383 spin_lock_irqsave(&m2m_dev->job_spinlock, flags); in v4l2_m2m_cancel_job()
387 spin_unlock_irqrestore(&m2m_dev->job_spinlock, flags); in v4l2_m2m_cancel_job()
388 if (m2m_dev->m2m_ops->job_abort) in v4l2_m2m_cancel_job()
389 m2m_dev->m2m_ops->job_abort(m2m_ctx->priv); in v4l2_m2m_cancel_job()
396 spin_unlock_irqrestore(&m2m_dev->job_spinlock, flags); in v4l2_m2m_cancel_job()
401 spin_unlock_irqrestore(&m2m_dev->job_spinlock, flags); in v4l2_m2m_cancel_job()
405 void v4l2_m2m_job_finish(struct v4l2_m2m_dev *m2m_dev, in v4l2_m2m_job_finish() argument
410 spin_lock_irqsave(&m2m_dev->job_spinlock, flags); in v4l2_m2m_job_finish()
411 if (!m2m_dev->curr_ctx || m2m_dev->curr_ctx != m2m_ctx) { in v4l2_m2m_job_finish()
412 spin_unlock_irqrestore(&m2m_dev->job_spinlock, flags); in v4l2_m2m_job_finish()
417 list_del(&m2m_dev->curr_ctx->queue); in v4l2_m2m_job_finish()
418 m2m_dev->curr_ctx->job_flags &= ~(TRANS_QUEUED | TRANS_RUNNING); in v4l2_m2m_job_finish()
419 wake_up(&m2m_dev->curr_ctx->finished); in v4l2_m2m_job_finish()
420 m2m_dev->curr_ctx = NULL; in v4l2_m2m_job_finish()
422 spin_unlock_irqrestore(&m2m_dev->job_spinlock, flags); in v4l2_m2m_job_finish()
579 struct v4l2_m2m_dev *m2m_dev; in v4l2_m2m_streamoff() local
592 m2m_dev = m2m_ctx->m2m_dev; in v4l2_m2m_streamoff()
593 spin_lock_irqsave(&m2m_dev->job_spinlock, flags_job); in v4l2_m2m_streamoff()
606 if (m2m_dev->curr_ctx == m2m_ctx) { in v4l2_m2m_streamoff()
607 m2m_dev->curr_ctx = NULL; in v4l2_m2m_streamoff()
610 spin_unlock_irqrestore(&m2m_dev->job_spinlock, flags_job); in v4l2_m2m_streamoff()
712 void v4l2_m2m_unregister_media_controller(struct v4l2_m2m_dev *m2m_dev) in v4l2_m2m_unregister_media_controller() argument
714 media_remove_intf_links(&m2m_dev->intf_devnode->intf); in v4l2_m2m_unregister_media_controller()
715 media_devnode_remove(m2m_dev->intf_devnode); in v4l2_m2m_unregister_media_controller()
717 media_entity_remove_links(m2m_dev->source); in v4l2_m2m_unregister_media_controller()
718 media_entity_remove_links(&m2m_dev->sink); in v4l2_m2m_unregister_media_controller()
719 media_entity_remove_links(&m2m_dev->proc); in v4l2_m2m_unregister_media_controller()
720 media_device_unregister_entity(m2m_dev->source); in v4l2_m2m_unregister_media_controller()
721 media_device_unregister_entity(&m2m_dev->sink); in v4l2_m2m_unregister_media_controller()
722 media_device_unregister_entity(&m2m_dev->proc); in v4l2_m2m_unregister_media_controller()
723 kfree(m2m_dev->source->name); in v4l2_m2m_unregister_media_controller()
724 kfree(m2m_dev->sink.name); in v4l2_m2m_unregister_media_controller()
725 kfree(m2m_dev->proc.name); in v4l2_m2m_unregister_media_controller()
730 struct v4l2_m2m_dev *m2m_dev, enum v4l2_m2m_entity_type type, in v4l2_m2m_register_entity() argument
742 entity = m2m_dev->source; in v4l2_m2m_register_entity()
743 pads = &m2m_dev->source_pad; in v4l2_m2m_register_entity()
748 entity = &m2m_dev->sink; in v4l2_m2m_register_entity()
749 pads = &m2m_dev->sink_pad; in v4l2_m2m_register_entity()
754 entity = &m2m_dev->proc; in v4l2_m2m_register_entity()
755 pads = m2m_dev->proc_pads; in v4l2_m2m_register_entity()
787 int v4l2_m2m_register_media_controller(struct v4l2_m2m_dev *m2m_dev, in v4l2_m2m_register_media_controller() argument
803 m2m_dev->source = &vdev->entity; in v4l2_m2m_register_media_controller()
804 ret = v4l2_m2m_register_entity(mdev, m2m_dev, in v4l2_m2m_register_media_controller()
808 ret = v4l2_m2m_register_entity(mdev, m2m_dev, in v4l2_m2m_register_media_controller()
812 ret = v4l2_m2m_register_entity(mdev, m2m_dev, in v4l2_m2m_register_media_controller()
818 ret = media_create_pad_link(m2m_dev->source, 0, &m2m_dev->proc, 0, in v4l2_m2m_register_media_controller()
823 ret = media_create_pad_link(&m2m_dev->proc, 1, &m2m_dev->sink, 0, in v4l2_m2m_register_media_controller()
829 m2m_dev->intf_devnode = media_devnode_create(mdev, in v4l2_m2m_register_media_controller()
832 if (!m2m_dev->intf_devnode) { in v4l2_m2m_register_media_controller()
838 link = media_create_intf_link(m2m_dev->source, in v4l2_m2m_register_media_controller()
839 &m2m_dev->intf_devnode->intf, in v4l2_m2m_register_media_controller()
846 link = media_create_intf_link(&m2m_dev->sink, in v4l2_m2m_register_media_controller()
847 &m2m_dev->intf_devnode->intf, in v4l2_m2m_register_media_controller()
856 media_remove_intf_links(&m2m_dev->intf_devnode->intf); in v4l2_m2m_register_media_controller()
858 media_devnode_remove(m2m_dev->intf_devnode); in v4l2_m2m_register_media_controller()
860 media_entity_remove_links(&m2m_dev->sink); in v4l2_m2m_register_media_controller()
862 media_entity_remove_links(&m2m_dev->proc); in v4l2_m2m_register_media_controller()
863 media_entity_remove_links(m2m_dev->source); in v4l2_m2m_register_media_controller()
865 media_device_unregister_entity(&m2m_dev->proc); in v4l2_m2m_register_media_controller()
866 kfree(m2m_dev->proc.name); in v4l2_m2m_register_media_controller()
868 media_device_unregister_entity(&m2m_dev->sink); in v4l2_m2m_register_media_controller()
869 kfree(m2m_dev->sink.name); in v4l2_m2m_register_media_controller()
871 media_device_unregister_entity(m2m_dev->source); in v4l2_m2m_register_media_controller()
872 kfree(m2m_dev->source->name); in v4l2_m2m_register_media_controller()
881 struct v4l2_m2m_dev *m2m_dev; in v4l2_m2m_init() local
886 m2m_dev = kzalloc(sizeof *m2m_dev, GFP_KERNEL); in v4l2_m2m_init()
887 if (!m2m_dev) in v4l2_m2m_init()
890 m2m_dev->curr_ctx = NULL; in v4l2_m2m_init()
891 m2m_dev->m2m_ops = m2m_ops; in v4l2_m2m_init()
892 INIT_LIST_HEAD(&m2m_dev->job_queue); in v4l2_m2m_init()
893 spin_lock_init(&m2m_dev->job_spinlock); in v4l2_m2m_init()
895 return m2m_dev; in v4l2_m2m_init()
899 void v4l2_m2m_release(struct v4l2_m2m_dev *m2m_dev) in v4l2_m2m_release() argument
901 kfree(m2m_dev); in v4l2_m2m_release()
905 struct v4l2_m2m_ctx *v4l2_m2m_ctx_init(struct v4l2_m2m_dev *m2m_dev, in v4l2_m2m_ctx_init() argument
918 m2m_ctx->m2m_dev = m2m_dev; in v4l2_m2m_ctx_init()