Home
last modified time | relevance | path

Searched refs:atomic_dec_return (Results 1 – 25 of 39) sorted by relevance

12

/linux-4.19.296/include/linux/
Datomic.h173 #ifndef atomic_dec_return
174 #define atomic_dec_return(v) atomic_sub_return(1, (v)) macro
179 #define atomic_dec_return_relaxed atomic_dec_return
180 #define atomic_dec_return_acquire atomic_dec_return
181 #define atomic_dec_return_release atomic_dec_return
188 __atomic_op_acquire(atomic_dec_return, __VA_ARGS__)
193 __atomic_op_release(atomic_dec_return, __VA_ARGS__)
196 #ifndef atomic_dec_return
197 #define atomic_dec_return(...) \ macro
198 __atomic_op_fence(atomic_dec_return, __VA_ARGS__)
[all …]
Dmbcache.h51 unsigned int cnt = atomic_dec_return(&entry->e_refcnt); in mb_cache_entry_put()
Dpage_ref.h148 int ret = atomic_dec_return(&page->_refcount); in page_ref_dec_return()
/linux-4.19.296/include/asm-generic/
Datomic-instrumented.h220 #define atomic_dec_return atomic_dec_return macro
221 static __always_inline int atomic_dec_return(atomic_t *v) in atomic_dec_return() function
/linux-4.19.296/drivers/misc/sgi-xp/
Dxpc_main.c752 if (atomic_dec_return(&ch->kthreads_assigned) == 0 && in xpc_kthread_start()
753 atomic_dec_return(&part->nchannels_engaged) == 0) { in xpc_kthread_start()
829 if (atomic_dec_return(&ch->kthreads_assigned) == 0 && in xpc_create_kthreads()
830 atomic_dec_return(&part->nchannels_engaged) == 0) { in xpc_create_kthreads()
Dxpnet.c352 if (atomic_dec_return(&queued_msg->use_count) == 0) { in xpnet_send_completed()
487 if (atomic_dec_return(&queued_msg->use_count) == 0) { in xpnet_dev_hard_start_xmit()
Dxpc.h956 s32 refs = atomic_dec_return(&ch->references); in xpc_msgqueue_deref()
973 s32 refs = atomic_dec_return(&part->references); in xpc_part_deref()
/linux-4.19.296/drivers/s390/char/
Draw3270.h187 if (atomic_dec_return(&view->ref_count) == 0) in raw3270_put_view()
/linux-4.19.296/drivers/misc/sgi-gru/
Dgrutlbpurge.c325 if (atomic_dec_return(&gms->ms_refcnt) == 0) { in gru_drop_mmu_notifier()
/linux-4.19.296/fs/afs/
Dcell.c491 if (atomic_dec_return(&cell->usage) > 1) in afs_put_cell()
730 usage = atomic_dec_return(&cell->usage); in afs_manage_cells()
Drxrpc.c159 int n = atomic_dec_return(&call->usage); in afs_put_call()
185 o = atomic_dec_return(&net->nr_outstanding_calls); in afs_put_call()
Dserver.c354 usage = atomic_dec_return(&server->usage); in afs_put_server()
/linux-4.19.296/block/
Dblk-iolatency.c588 inflight = atomic_dec_return(&rqw->inflight); in blkcg_iolatency_done_bio()
772 if (atomic_dec_return(&blkiolat->enable_cnt) == 0) in iolatency_set_min_lat_nsec()
Dblk-wbt.c132 inflight = atomic_dec_return(&rqw->inflight); in wbt_rqw_done()
/linux-4.19.296/include/linux/sunrpc/
Dxprt.h484 if (atomic_dec_return(&xprt->inject_disconnect)) in xprt_inject_disconnect()
/linux-4.19.296/drivers/edac/
Dedac_pci_sysfs.c415 if (atomic_dec_return(&edac_pci_sysfs_refcount) == 0) { in edac_pci_main_kobj_teardown()
Dghes_edac.c541 if (atomic_dec_return(&ghes_init)) in ghes_edac_unregister()
/linux-4.19.296/lib/
Dsbitmap.c471 wait_cnt = atomic_dec_return(&ws->wait_cnt); in __sbq_wake_up()
/linux-4.19.296/drivers/cpufreq/
Darm_big_little.c330 if (atomic_dec_return(&cluster_usage[cluster])) in put_cluster_clk_and_freq_table()
/linux-4.19.296/fs/proc/
Dinode.c146 if (unlikely(atomic_dec_return(&pde->in_use) == BIAS)) in unuse_pde()
/linux-4.19.296/drivers/reset/
Dcore.c320 if (atomic_dec_return(&rstc->deassert_count) != 0) in reset_control_assert()
/linux-4.19.296/fs/cachefiles/
Dinterface.c347 u = atomic_dec_return(&object->usage); in cachefiles_put_object()
/linux-4.19.296/fs/nfsd/
Dnfssvc.c401 if (atomic_dec_return(&nfsd_notifier_refcount) == 0) { in nfsd_last_thread()
/linux-4.19.296/fs/fscache/
Dcookie.c857 usage = atomic_dec_return(&cookie->usage); in fscache_cookie_put()
/linux-4.19.296/drivers/s390/block/
Ddasd_int.h596 if (atomic_dec_return(&device->ref_count) == 0) in dasd_put_device()

12