Lines Matching refs:bs

140 static void gru_load_kernel_context(struct gru_blade_state *bs, int blade_id)  in gru_load_kernel_context()  argument
147 up_read(&bs->bs_kgts_sema); in gru_load_kernel_context()
148 down_write(&bs->bs_kgts_sema); in gru_load_kernel_context()
150 if (!bs->bs_kgts) { in gru_load_kernel_context()
152 bs->bs_kgts = gru_alloc_gts(NULL, 0, 0, 0, 0, 0); in gru_load_kernel_context()
153 if (!IS_ERR(bs->bs_kgts)) in gru_load_kernel_context()
157 bs->bs_kgts->ts_user_blade_id = blade_id; in gru_load_kernel_context()
159 kgts = bs->bs_kgts; in gru_load_kernel_context()
165 GRU_NUM_KERNEL_CBR * ncpus + bs->bs_async_cbrs); in gru_load_kernel_context()
168 bs->bs_async_dsr_bytes); in gru_load_kernel_context()
174 gru = bs->bs_kgts->ts_gru; in gru_load_kernel_context()
177 bs->kernel_cb = get_gseg_base_address_cb(vaddr, ctxnum, 0); in gru_load_kernel_context()
178 bs->kernel_dsr = get_gseg_base_address_ds(vaddr, ctxnum, 0); in gru_load_kernel_context()
180 downgrade_write(&bs->bs_kgts_sema); in gru_load_kernel_context()
189 struct gru_blade_state *bs; in gru_free_kernel_contexts() local
194 bs = gru_base[bid]; in gru_free_kernel_contexts()
195 if (!bs) in gru_free_kernel_contexts()
199 if (down_write_trylock(&bs->bs_kgts_sema)) { in gru_free_kernel_contexts()
200 kgts = bs->bs_kgts; in gru_free_kernel_contexts()
203 bs->bs_kgts = NULL; in gru_free_kernel_contexts()
204 up_write(&bs->bs_kgts_sema); in gru_free_kernel_contexts()
218 struct gru_blade_state *bs; in gru_lock_kernel_context() local
224 bs = gru_base[bid]; in gru_lock_kernel_context()
227 down_read(&bs->bs_kgts_sema); in gru_lock_kernel_context()
229 up_read(&bs->bs_kgts_sema); in gru_lock_kernel_context()
232 if (!bs->bs_kgts || !bs->bs_kgts->ts_gru) in gru_lock_kernel_context()
233 gru_load_kernel_context(bs, bid); in gru_lock_kernel_context()
234 return bs; in gru_lock_kernel_context()
244 struct gru_blade_state *bs; in gru_unlock_kernel_context() local
246 bs = gru_base[blade_id]; in gru_unlock_kernel_context()
247 up_read(&bs->bs_kgts_sema); in gru_unlock_kernel_context()
257 struct gru_blade_state *bs; in gru_get_cpu_resources() local
261 bs = gru_lock_kernel_context(-1); in gru_get_cpu_resources()
263 *cb = bs->kernel_cb + lcpu * GRU_HANDLE_STRIDE; in gru_get_cpu_resources()
264 *dsr = bs->kernel_dsr + lcpu * GRU_NUM_KERNEL_DSR_BYTES; in gru_get_cpu_resources()
291 struct gru_blade_state *bs; in gru_reserve_async_resources() local
295 bs = gru_base[blade_id]; in gru_reserve_async_resources()
297 down_write(&bs->bs_kgts_sema); in gru_reserve_async_resources()
300 if (bs->bs_async_dsr_bytes + bs->bs_async_cbrs) in gru_reserve_async_resources()
302 bs->bs_async_dsr_bytes = dsr_bytes; in gru_reserve_async_resources()
303 bs->bs_async_cbrs = cbrs; in gru_reserve_async_resources()
304 bs->bs_async_wq = cmp; in gru_reserve_async_resources()
305 kgts = bs->bs_kgts; in gru_reserve_async_resources()
313 up_write(&bs->bs_kgts_sema); in gru_reserve_async_resources()
325 struct gru_blade_state *bs = ASYNC_HAN_TO_BS(han); in gru_release_async_resources() local
327 down_write(&bs->bs_kgts_sema); in gru_release_async_resources()
328 bs->bs_async_dsr_bytes = 0; in gru_release_async_resources()
329 bs->bs_async_cbrs = 0; in gru_release_async_resources()
330 bs->bs_async_wq = NULL; in gru_release_async_resources()
331 up_write(&bs->bs_kgts_sema); in gru_release_async_resources()
342 struct gru_blade_state *bs = ASYNC_HAN_TO_BS(han); in gru_wait_async_cbr() local
344 wait_for_completion(bs->bs_async_wq); in gru_wait_async_cbr()
359 struct gru_blade_state *bs = ASYNC_HAN_TO_BS(han); in gru_lock_async_resource() local
366 *cb = bs->kernel_cb + ncpus * GRU_HANDLE_STRIDE; in gru_lock_async_resource()
368 *dsr = bs->kernel_dsr + ncpus * GRU_NUM_KERNEL_DSR_BYTES; in gru_lock_async_resource()