Lines Matching +full:0 +full:x0f00
52 if ((count == 1) && dst[0] & 0x80) { in sign_extend()
53 dst[1] = 0xff; in sign_extend()
54 dst[2] = 0xff; in sign_extend()
55 dst[3] = 0xff; in sign_extend()
57 if ((count == 2) && dst[1] & 0x80) { in sign_extend()
58 dst[2] = 0xff; in sign_extend()
59 dst[3] = 0xff; in sign_extend()
62 if ((count == 1) && dst[3] & 0x80) { in sign_extend()
63 dst[2] = 0xff; in sign_extend()
64 dst[1] = 0xff; in sign_extend()
65 dst[0] = 0xff; in sign_extend()
67 if ((count == 2) && dst[2] & 0x80) { in sign_extend()
68 dst[1] = 0xff; in sign_extend()
69 dst[0] = 0xff; in sign_extend()
101 * - return 0 if emulation okay, -EFAULT on existential error
111 index = (instruction>>8)&15; /* 0x0F00 */ in handle_unaligned_ins()
114 index = (instruction>>4)&15; /* 0x00F0 */ in handle_unaligned_ins()
128 case 0: /* mov.[bwl] to/from memory via r0+rn */ in handle_unaligned_ins()
132 srcu += regs->regs[0]; in handle_unaligned_ins()
134 *(unsigned long *)dst = 0; in handle_unaligned_ins()
150 dstu += regs->regs[0]; in handle_unaligned_ins()
155 ret = 0; in handle_unaligned_ins()
161 dstu += (instruction&0x000F)<<2; in handle_unaligned_ins()
165 ret = 0; in handle_unaligned_ins()
178 ret = 0; in handle_unaligned_ins()
183 srcu += (instruction & 0x000F) << 2; in handle_unaligned_ins()
185 *(unsigned long *)dst = 0; in handle_unaligned_ins()
189 ret = 0; in handle_unaligned_ins()
197 *(unsigned long*)dst = 0; in handle_unaligned_ins()
205 ret = 0; in handle_unaligned_ins()
209 switch ((instruction&0xFF00)>>8) { in handle_unaligned_ins()
210 case 0x81: /* mov.w R0,@(disp,Rn) */ in handle_unaligned_ins()
211 src = (unsigned char *) ®s->regs[0]; in handle_unaligned_ins()
216 dstu += (instruction & 0x000F) << 1; in handle_unaligned_ins()
220 ret = 0; in handle_unaligned_ins()
223 case 0x85: /* mov.w @(disp,Rm),R0 */ in handle_unaligned_ins()
225 srcu += (instruction & 0x000F) << 1; in handle_unaligned_ins()
226 dst = (unsigned char *) ®s->regs[0]; in handle_unaligned_ins()
227 *(unsigned long *)dst = 0; in handle_unaligned_ins()
235 ret = 0; in handle_unaligned_ins()
243 srcu += (instruction & 0x00FF) << 1; in handle_unaligned_ins()
245 *(unsigned long *)dst = 0; in handle_unaligned_ins()
254 ret = 0; in handle_unaligned_ins()
257 case 0xd: /* mov.l @(disp,PC),Rn */ in handle_unaligned_ins()
258 srcu = (unsigned char __user *)(regs->pc & ~0x3); in handle_unaligned_ins()
260 srcu += (instruction & 0x00FF) << 2; in handle_unaligned_ins()
262 *(unsigned long *)dst = 0; in handle_unaligned_ins()
266 ret = 0; in handle_unaligned_ins()
275 die_if_no_fixup("Fault in unaligned fixup", regs, 0); in handle_unaligned_ins()
298 regs, 0); in handle_delayslot()
312 * - return 0 if handled, -EFAULT if failed (may not return if in kernel)
333 index = (instruction>>8)&15; /* 0x0F00 */ in handle_unaligned_access()
350 switch (instruction&0xF000) { in handle_unaligned_access()
351 case 0x0000: in handle_unaligned_access()
352 if (instruction==0x000B) { in handle_unaligned_access()
355 if (ret==0) in handle_unaligned_access()
358 else if ((instruction&0x00FF)==0x0023) { in handle_unaligned_access()
361 if (ret==0) in handle_unaligned_access()
364 else if ((instruction&0x00FF)==0x0003) { in handle_unaligned_access()
367 if (ret==0) { in handle_unaligned_access()
378 case 0x1000: /* mov.l Rm,@(disp,Rn) */ in handle_unaligned_access()
381 case 0x2000: /* mov.[bwl] to memory, possibly with pre-decrement */ in handle_unaligned_access()
384 case 0x4000: in handle_unaligned_access()
385 if ((instruction&0x00FF)==0x002B) { in handle_unaligned_access()
388 if (ret==0) in handle_unaligned_access()
391 else if ((instruction&0x00FF)==0x000B) { in handle_unaligned_access()
394 if (ret==0) { in handle_unaligned_access()
405 case 0x5000: /* mov.l @(disp,Rm),Rn */ in handle_unaligned_access()
408 case 0x6000: /* mov.[bwl] from memory, possibly with post-increment */ in handle_unaligned_access()
411 case 0x8000: /* bf lab, bf/s lab, bt lab, bt/s lab */ in handle_unaligned_access()
412 switch (instruction&0x0F00) { in handle_unaligned_access()
413 case 0x0100: /* mov.w R0,@(disp,Rm) */ in handle_unaligned_access()
415 case 0x0500: /* mov.w @(disp,Rm),R0 */ in handle_unaligned_access()
417 case 0x0B00: /* bf lab - no delayslot*/ in handle_unaligned_access()
418 ret = 0; in handle_unaligned_access()
420 case 0x0F00: /* bf/s lab */ in handle_unaligned_access()
422 if (ret==0) { in handle_unaligned_access()
424 if ((regs->sr & 0x00000001) != 0) in handle_unaligned_access()
431 case 0x0900: /* bt lab - no delayslot */ in handle_unaligned_access()
432 ret = 0; in handle_unaligned_access()
434 case 0x0D00: /* bt/s lab */ in handle_unaligned_access()
436 if (ret==0) { in handle_unaligned_access()
438 if ((regs->sr & 0x00000001) == 0) in handle_unaligned_access()
448 case 0x9000: /* mov.w @(disp,Rm),Rn */ in handle_unaligned_access()
451 case 0xA000: /* bra label */ in handle_unaligned_access()
453 if (ret==0) in handle_unaligned_access()
457 case 0xB000: /* bsr label */ in handle_unaligned_access()
459 if (ret==0) { in handle_unaligned_access()
465 case 0xD000: /* mov.l @(disp,Rm),Rn */ in handle_unaligned_access()
473 if (ret==0) in handle_unaligned_access()
482 * PC >= 0x80000000 in user mode
485 * access to >= 0x80000000 is user mode
493 unsigned long error_code = 0; in do_address_error()
536 &user_mem_access, 0, in do_address_error()
539 if (tmp == 0) in do_address_error()
558 die("insn faulting in do_address_error", regs, 0); in do_address_error()
564 0, address); in do_address_error()
574 unsigned short inst = 0; in is_dsp_inst()
581 return 0; in is_dsp_inst()
585 inst &= 0xf000; in is_dsp_inst()
588 if ((inst == 0xf000) || (inst == 0x4000)) in is_dsp_inst()
591 return 0; in is_dsp_inst()
594 static inline int is_dsp_inst(struct pt_regs *regs) { return 0; } in is_dsp_inst()
623 unsigned short inst = 0; in do_reserved_inst()
662 * braf:0x23: PC+=Rn*2+4; in emulate_branch()
663 * bsrf:0x03: PC+=Rn*2+4 after PR=PC+4; in emulate_branch()
668 if (((inst & 0xf000) == 0xb000) || /* bsr */ in emulate_branch()
669 ((inst & 0xf0ff) == 0x0003) || /* bsrf */ in emulate_branch()
670 ((inst & 0xf0ff) == 0x400b)) /* jsr */ in emulate_branch()
673 if ((inst & 0xfd00) == 0x8d00) { /* bfs, bts */ in emulate_branch()
675 return 0; in emulate_branch()
678 if ((inst & 0xe000) == 0xa000) { /* bra, bsr */ in emulate_branch()
680 return 0; in emulate_branch()
683 if ((inst & 0xf0df) == 0x0003) { /* braf, bsrf */ in emulate_branch()
684 regs->pc += regs->regs[(inst & 0x0f00) >> 8] + 4; in emulate_branch()
685 return 0; in emulate_branch()
688 if ((inst & 0xf0df) == 0x400b) { /* jmp, jsr */ in emulate_branch()
689 regs->pc = regs->regs[(inst & 0x0f00) >> 8]; in emulate_branch()
690 return 0; in emulate_branch()
693 if ((inst & 0xffff) == 0x000b) { /* rts */ in emulate_branch()
695 return 0; in emulate_branch()
707 if (kprobe_handle_illslot(regs->pc) == 0) in do_illegal_slot_inst()
744 asm volatile("ldc %0, vbr" in per_cpu_trap_init()
775 set_exception_table_evt(0x800, do_reserved_inst); in trap_init()
776 set_exception_table_evt(0x820, do_illegal_slot_inst); in trap_init()
778 set_exception_table_evt(0x800, fpu_state_restore_trap_handler); in trap_init()
779 set_exception_table_evt(0x820, fpu_state_restore_trap_handler); in trap_init()