Lines Matching +full:0 +full:xfffffffe

18 		*ptr = *(&regs->exregs[0] + index - 16);  in csky_insn_reg_get_val()
47 *(&regs->exregs[0] + index - 16) = val; in csky_insn_reg_set_val()
72 addr + sign_extend32((opcode & 0x3ff) << 1, 9)); in simulate_br16()
79 addr + sign_extend32((opcode & 0xffff0000) >> 15, 15)); in simulate_br32()
87 addr + sign_extend32((opcode & 0x3ff) << 1, 9)); in simulate_bt16()
97 addr + sign_extend32((opcode & 0xffff0000) >> 15, 15)); in simulate_bt32()
107 addr + sign_extend32((opcode & 0x3ff) << 1, 9)); in simulate_bf16()
117 addr + sign_extend32((opcode & 0xffff0000) >> 15, 15)); in simulate_bf32()
125 unsigned long tmp = (opcode >> 2) & 0xf; in simulate_jmp16()
129 instruction_pointer_set(regs, tmp & 0xfffffffe); in simulate_jmp16()
135 unsigned long tmp = opcode & 0x1f; in simulate_jmp32()
139 instruction_pointer_set(regs, tmp & 0xfffffffe); in simulate_jmp32()
145 unsigned long tmp = (opcode >> 2) & 0xf; in simulate_jsr16()
151 instruction_pointer_set(regs, tmp & 0xfffffffe); in simulate_jsr16()
157 unsigned long tmp = opcode & 0x1f; in simulate_jsr32()
163 instruction_pointer_set(regs, tmp & 0xfffffffe); in simulate_jsr32()
170 unsigned long tmp = (opcode & 0x300) >> 3; in simulate_lrw16()
171 unsigned long offset = ((opcode & 0x1f) | tmp) << 2; in simulate_lrw16()
173 tmp = (opcode & 0xe0) >> 5; in simulate_lrw16()
184 unsigned long offset = (opcode & 0xffff0000) >> 14; in simulate_lrw32()
185 unsigned long tmp = opcode & 0x0000001f; in simulate_lrw32()
188 ((instruction_pointer(regs) + offset) & 0xfffffffc); in simulate_lrw32()
199 for (i = 0; i < (opcode & 0xf); i++) { in simulate_pop16()
204 if (opcode & 0x10) { in simulate_pop16()
220 for (i = 0; i < ((opcode & 0xf0000) >> 16); i++) { in simulate_pop32()
225 if (opcode & 0x100000) { in simulate_pop32()
230 for (i = 0; i < ((opcode & 0xe00000) >> 21); i++) { in simulate_pop32()
235 if (opcode & 0x1000000) { in simulate_pop32()
248 unsigned long tmp = opcode & 0x1f; in simulate_bez32()
252 if (tmp == 0) { in simulate_bez32()
254 addr + sign_extend32((opcode & 0xffff0000) >> 15, 15)); in simulate_bez32()
262 unsigned long tmp = opcode & 0x1f; in simulate_bnez32()
266 if (tmp != 0) { in simulate_bnez32()
268 addr + sign_extend32((opcode & 0xffff0000) >> 15, 15)); in simulate_bnez32()
276 unsigned long tmp = opcode & 0x1f; in simulate_bnezad32()
283 if (val > 0) { in simulate_bnezad32()
285 addr + sign_extend32((opcode & 0xffff0000) >> 15, 15)); in simulate_bnezad32()
295 unsigned long tmp = opcode & 0x1f; in simulate_bhsz32()
300 if ((long) val >= 0) { in simulate_bhsz32()
302 addr + sign_extend32((opcode & 0xffff0000) >> 15, 15)); in simulate_bhsz32()
310 unsigned long tmp = opcode & 0x1f; in simulate_bhz32()
315 if ((long) val > 0) { in simulate_bhz32()
317 addr + sign_extend32((opcode & 0xffff0000) >> 15, 15)); in simulate_bhz32()
325 unsigned long tmp = opcode & 0x1f; in simulate_blsz32()
330 if ((long) val <= 0) { in simulate_blsz32()
332 addr + sign_extend32((opcode & 0xffff0000) >> 15, 15)); in simulate_blsz32()
340 unsigned long tmp = opcode & 0x1f; in simulate_blz32()
345 if ((long) val < 0) { in simulate_blz32()
347 addr + sign_extend32((opcode & 0xffff0000) >> 15, 15)); in simulate_blz32()
357 tmp = (opcode & 0xffff) << 16; in simulate_bsr32()
358 tmp |= (opcode & 0xffff0000) >> 16; in simulate_bsr32()
361 addr + sign_extend32((tmp & 0x3ffffff) << 1, 15)); in simulate_bsr32()
370 unsigned long offset = ((opcode & 0xffff0000) >> 14); in simulate_jmpi32()
373 ((instruction_pointer(regs) + offset) & 0xfffffffc); in simulate_jmpi32()
382 unsigned long offset = ((opcode & 0xffff0000) >> 14); in simulate_jsri32()
385 ((instruction_pointer(regs) + offset) & 0xfffffffc); in simulate_jsri32()