Lines Matching refs:gts
83 struct gru_thread_state *gts = NULL; in gru_find_lock_gts() local
88 gts = gru_find_thread_state(vma, TSID(vaddr, vma)); in gru_find_lock_gts()
89 if (gts) in gru_find_lock_gts()
90 mutex_lock(>s->ts_ctxlock); in gru_find_lock_gts()
93 return gts; in gru_find_lock_gts()
100 struct gru_thread_state *gts = ERR_PTR(-EINVAL); in gru_alloc_locked_gts() local
107 gts = gru_alloc_thread_state(vma, TSID(vaddr, vma)); in gru_alloc_locked_gts()
108 if (IS_ERR(gts)) in gru_alloc_locked_gts()
110 mutex_lock(>s->ts_ctxlock); in gru_alloc_locked_gts()
112 return gts; in gru_alloc_locked_gts()
116 return gts; in gru_alloc_locked_gts()
122 static void gru_unlock_gts(struct gru_thread_state *gts) in gru_unlock_gts() argument
124 mutex_unlock(>s->ts_ctxlock); in gru_unlock_gts()
265 static int gru_vtop(struct gru_thread_state *gts, unsigned long vaddr, in gru_vtop() argument
268 struct mm_struct *mm = gts->ts_mm; in gru_vtop()
323 struct gru_thread_state *gts, int atomic, in gru_preload_tlb() argument
345 ret = gru_vtop(gts, vaddr, write, atomic, &gpa, &pageshift); in gru_preload_tlb()
351 atomic ? "atomic" : "non-atomic", gru->gs_gid, gts, tfh, in gru_preload_tlb()
369 struct gru_thread_state *gts, in gru_try_dropin() argument
374 unsigned char tlb_preload_count = gts->ts_tlb_preload_count; in gru_try_dropin()
423 if (atomic_read(>s->ts_gms->ms_range_active)) in gru_try_dropin()
426 ret = gru_vtop(gts, vaddr, write, atomic, &gpa, &pageshift); in gru_try_dropin()
432 if (!(gts->ts_sizeavail & GRU_SIZEAVAIL(pageshift))) { in gru_try_dropin()
433 gts->ts_sizeavail |= GRU_SIZEAVAIL(pageshift); in gru_try_dropin()
434 if (atomic || !gru_update_cch(gts)) { in gru_try_dropin()
435 gts->ts_force_cch_reload = 1; in gru_try_dropin()
441 gru_preload_tlb(gru, gts, atomic, vaddr, asid, write, tlb_preload_count, tfh, cbe); in gru_try_dropin()
446 gts->ustats.tlbdropin++; in gru_try_dropin()
452 atomic ? "atomic" : "non-atomic", gru->gs_gid, gts, tfh, vaddr, asid, in gru_try_dropin()
536 struct gru_thread_state *gts; in gru_intr() local
577 gts = gru->gs_gts[ctxnum]; in gru_intr()
580 if (!gts) { in gru_intr()
589 gts->ustats.fmm_tlbmiss++; in gru_intr()
590 if (!gts->ts_force_cch_reload && in gru_intr()
591 down_read_trylock(>s->ts_mm->mmap_sem)) { in gru_intr()
592 gru_try_dropin(gru, gts, tfh, NULL); in gru_intr()
593 up_read(>s->ts_mm->mmap_sem); in gru_intr()
626 static int gru_user_dropin(struct gru_thread_state *gts, in gru_user_dropin() argument
630 struct gru_mm_struct *gms = gts->ts_gms; in gru_user_dropin()
633 gts->ustats.upm_tlbmiss++; in gru_user_dropin()
638 ret = gru_try_dropin(gts->ts_gru, gts, tfh, cb); in gru_user_dropin()
653 struct gru_thread_state *gts; in gru_handle_user_call_os() local
665 gts = gru_find_lock_gts(cb); in gru_handle_user_call_os()
666 if (!gts) in gru_handle_user_call_os()
668 …u_dbg(grudev, "address 0x%lx, gid %d, gts 0x%p\n", cb, gts->ts_gru ? gts->ts_gru->gs_gid : -1, gts… in gru_handle_user_call_os()
670 if (ucbnum >= gts->ts_cbr_au_count * GRU_CBR_AU_SIZE) in gru_handle_user_call_os()
673 if (gru_check_context_placement(gts)) { in gru_handle_user_call_os()
674 gru_unlock_gts(gts); in gru_handle_user_call_os()
675 gru_unload_context(gts, 1); in gru_handle_user_call_os()
682 if (gts->ts_gru && gts->ts_force_cch_reload) { in gru_handle_user_call_os()
683 gts->ts_force_cch_reload = 0; in gru_handle_user_call_os()
684 gru_update_cch(gts); in gru_handle_user_call_os()
688 cbrnum = thread_cbr_number(gts, ucbnum); in gru_handle_user_call_os()
689 if (gts->ts_gru) { in gru_handle_user_call_os()
690 tfh = get_tfh_by_index(gts->ts_gru, cbrnum); in gru_handle_user_call_os()
691 cbk = get_gseg_base_address_cb(gts->ts_gru->gs_gru_base_vaddr, in gru_handle_user_call_os()
692 gts->ts_ctxnum, ucbnum); in gru_handle_user_call_os()
693 ret = gru_user_dropin(gts, tfh, cbk); in gru_handle_user_call_os()
696 gru_unlock_gts(gts); in gru_handle_user_call_os()
708 struct gru_thread_state *gts; in gru_get_exception_detail() local
715 gts = gru_find_lock_gts(excdet.cb); in gru_get_exception_detail()
716 if (!gts) in gru_get_exception_detail()
719 …rudev, "address 0x%lx, gid %d, gts 0x%p\n", excdet.cb, gts->ts_gru ? gts->ts_gru->gs_gid : -1, gts… in gru_get_exception_detail()
721 if (ucbnum >= gts->ts_cbr_au_count * GRU_CBR_AU_SIZE) { in gru_get_exception_detail()
723 } else if (gts->ts_gru) { in gru_get_exception_detail()
724 cbrnum = thread_cbr_number(gts, ucbnum); in gru_get_exception_detail()
725 cbe = get_cbe_by_index(gts->ts_gru, cbrnum); in gru_get_exception_detail()
740 gru_unlock_gts(gts); in gru_get_exception_detail()
757 struct gru_thread_state *gts; in gru_unload_all_contexts() local
767 gts = gru->gs_gts[ctxnum]; in gru_unload_all_contexts()
768 if (gts && mutex_trylock(>s->ts_ctxlock)) { in gru_unload_all_contexts()
770 gru_unload_context(gts, 1); in gru_unload_all_contexts()
771 mutex_unlock(>s->ts_ctxlock); in gru_unload_all_contexts()
782 struct gru_thread_state *gts; in gru_user_unload_context() local
794 gts = gru_find_lock_gts(req.gseg); in gru_user_unload_context()
795 if (!gts) in gru_user_unload_context()
798 if (gts->ts_gru) in gru_user_unload_context()
799 gru_unload_context(gts, 1); in gru_user_unload_context()
800 gru_unlock_gts(gts); in gru_user_unload_context()
811 struct gru_thread_state *gts; in gru_user_flush_tlb() local
822 gts = gru_find_lock_gts(req.gseg); in gru_user_flush_tlb()
823 if (!gts) in gru_user_flush_tlb()
826 gms = gts->ts_gms; in gru_user_flush_tlb()
827 gru_unlock_gts(gts); in gru_user_flush_tlb()
838 struct gru_thread_state *gts; in gru_get_gseg_statistics() local
849 gts = gru_find_lock_gts(req.gseg); in gru_get_gseg_statistics()
850 if (gts) { in gru_get_gseg_statistics()
851 memcpy(&req.stats, >s->ustats, sizeof(gts->ustats)); in gru_get_gseg_statistics()
852 gru_unlock_gts(gts); in gru_get_gseg_statistics()
854 memset(&req.stats, 0, sizeof(gts->ustats)); in gru_get_gseg_statistics()
869 struct gru_thread_state *gts; in gru_set_context_option() local
878 gts = gru_find_lock_gts(req.gseg); in gru_set_context_option()
879 if (!gts) { in gru_set_context_option()
880 gts = gru_alloc_locked_gts(req.gseg); in gru_set_context_option()
881 if (IS_ERR(gts)) in gru_set_context_option()
882 return PTR_ERR(gts); in gru_set_context_option()
893 gts->ts_user_blade_id = req.val1; in gru_set_context_option()
894 gts->ts_user_chiplet_id = req.val0; in gru_set_context_option()
895 if (gru_check_context_placement(gts)) { in gru_set_context_option()
896 gru_unlock_gts(gts); in gru_set_context_option()
897 gru_unload_context(gts, 1); in gru_set_context_option()
904 gts->ts_tgid_owner = current->tgid; in gru_set_context_option()
908 gts->ts_cch_req_slice = req.val1 & 3; in gru_set_context_option()
913 gru_unlock_gts(gts); in gru_set_context_option()