Lines Matching +full:77 +full:v
262 static inline unsigned long kvmppc_actual_pgsz(unsigned long v, unsigned long r) in kvmppc_actual_pgsz() argument
264 int shift = kvmppc_hpte_actual_page_shift(v, r); in kvmppc_actual_pgsz()
298 static inline unsigned long compute_tlbie_rb(unsigned long v, unsigned long r, in compute_tlbie_rb() argument
304 b_pgshift = a_pgshift = kvmppc_hpte_page_shifts(v, r); in compute_tlbie_rb()
312 * v have top two bits covering segment size, hence move in compute_tlbie_rb()
314 * AVA field in v also have the lower 23 bits ignored. in compute_tlbie_rb()
320 rb = (v & ~0x7fUL) << 16; /* AVA field */ in compute_tlbie_rb()
323 * AVA in v had cleared lower 23 bits. We need to derive in compute_tlbie_rb()
327 if (v & HPTE_V_SECONDARY) in compute_tlbie_rb()
331 * In v we have va with 23 bits dropped and then left shifted in compute_tlbie_rb()
335 if (!(v & HPTE_V_1TB_SEG)) in compute_tlbie_rb()
336 va_low ^= v >> (SID_SHIFT - 16); in compute_tlbie_rb()
338 va_low ^= v >> (SID_SHIFT_1T - 16); in compute_tlbie_rb()
359 * AVAL field 58..77 - base_page_shift bits of va in compute_tlbie_rb()
363 aval_shift = 64 - (77 - b_pgshift) + 1; in compute_tlbie_rb()
373 rb |= (v >> HPTE_V_SSIZE_SHIFT) << 8; /* B field */ in compute_tlbie_rb()