This is the mail archive of the gdb-patches@sourceware.org mailing list for the GDB project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

Displaced stepping 0002: refactor and create some copy helpers


The patch continues to refactor code, in order to
 1) make copy functions separated for ARM and Thumb,
 2) define some copy helper functions for some ARM and Thumb-2 instructions.

-- 
Yao (éå)
	Refactor code.  Split copy_* routines to mode-dependent part (arm_copy_*)
	and mode-independent part (copy_*).  Define some copy helpers.
	
	* arm-tdep.c (THUMB_NOP): New macro.
	(union instruction_instance): New.
	(copy_unmodified): Renamed to arm_copy_unmodified.
	(arm_copy_unmodified): New.
	(copy_preload, copy_preload_reg, copy_b_bl_blx): Move mode-dependent
	part and leave mode-independent part.
	(copy_bx_blx_reg copy_ldr_str_ldrb_strb, copy_alu_reg): Likewise.
	(copy_block_xfer, copy_copro_load_store): Likewise.
	(arm_copy_preload, arm_copy_preload_reg, arm_copy_b_bl_blx): Mode-dependent
	part.
	(arm_copy_bx_blx_reg, arm_copy_ldr_str_ldrb_strb): Likewise.
	(arm_copy_alu_reg, arm_copy_block_xfer): Likewise.
	(copy_undef): Renamed to arm_copy_undef.
	(arm_copy_undef): New.
	(copy_unpred): Renamed to arm_copy_unpred.
	(arm_copy_unpred): New.
	(arm_copy_alu_shifted_reg): Renamed from copy_alu_shifted_reg.
	(arm_copy_unmodified_helper, arm_copy_undef_helper): Copy helpers for ARM.
	(arm_copy_copro_load_store_helper, arm_copy_ldm_with_pc_helper): Likewise.
	(arm_copy_svc_helper): Likewise.
	(copy_svc): Delete.
	(decode_misc_memhint_neon, decode_unconditional): Update callers.
	(decode_dp_misc, decode_miscellaneous, decode_ld_st_word_ubyte): Likewise.
	(decode_media, decode_b_bl_ldmstm, decode_ext_reg_ld_st): Likewise.
	(decode_svc_copro, copy_alu_imm, copy_extra_ld_st): Likewise.
	(arm_decode_svc_copro): Renamed from decode_svc_copro.  Call copy
	helpers routine.
	(arm_process_displaced_insn): Update callers to decode_svc_copro.
---
 gdb/arm-tdep.c |  926 +++++++++++++++++++++++++++++++++-----------------------
 1 files changed, 554 insertions(+), 372 deletions(-)

diff --git a/gdb/arm-tdep.c b/gdb/arm-tdep.c
index d1f5d7b..2d06d8e 100644
--- a/gdb/arm-tdep.c
+++ b/gdb/arm-tdep.c
@@ -5105,6 +5105,7 @@ arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
 
 /* NOP instruction (mov r0, r0).  */
 #define ARM_NOP				0xe1a00000
+#define THUMB_NOP				0x4600
 
 static int displaced_in_arm_mode (struct regcache *regs);
 
@@ -5310,9 +5311,16 @@ insn_references_pc (uint32_t insn, uint32_t bitmask)
 /* The simplest copy function.  Many instructions have the same effect no
    matter what address they are executed at: in those cases, use this.  */
 
+union instruction_instance
+{
+  uint32_t _32_bit;
+  uint16_t _16_bit[2];
+};
+
+/* Copy ARM instruction without any modification.  */
 static int
-copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
-		 const char *iname, struct displaced_step_closure *dsc)
+arm_copy_unmodified (uint32_t insn, const char *iname,
+		     struct displaced_step_closure *dsc)
 {
   if (debug_displaced)
     fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
@@ -5336,20 +5344,12 @@ cleanup_preload (struct gdbarch *gdbarch,
 }
 
 static int
-copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
+copy_preload (struct gdbarch *gdbarch, unsigned int rn, struct regcache *regs,
 	      struct displaced_step_closure *dsc)
 {
-  unsigned int rn = bits (insn, 16, 19);
   ULONGEST rn_val;
   CORE_ADDR from = dsc->insn_addr;
 
-  if (!insn_references_pc (insn, 0x000f0000ul))
-    return copy_unmodified (gdbarch, insn, "preload", dsc);
-
-  if (debug_displaced)
-    fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
-			(unsigned long) insn);
-
   /* Preload instructions:
 
      {pli/pld} [rn, #+/-imm]
@@ -5362,32 +5362,39 @@ copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
 
   dsc->u.preload.immed = 1;
 
-  RECORD_ARM_MODE_INSN (0, (insn & 0xfff0ffff));
-
   dsc->cleanup = &cleanup_preload;
 
   return 0;
 }
 
-/* Preload instructions with register offset.  */
-
 static int
-copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
-		  struct regcache *regs,
-		  struct displaced_step_closure *dsc)
+arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
+	      struct displaced_step_closure *dsc)
 {
   unsigned int rn = bits (insn, 16, 19);
-  unsigned int rm = bits (insn, 0, 3);
-  ULONGEST rn_val, rm_val;
-  CORE_ADDR from = dsc->insn_addr;
 
-  if (!insn_references_pc (insn, 0x000f000ful))
-    return copy_unmodified (gdbarch, insn, "preload reg", dsc);
+  if (!insn_references_pc (insn, 0x000f0000ul))
+    return arm_copy_unmodified (insn, "preload", dsc);
 
   if (debug_displaced)
     fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
 			(unsigned long) insn);
 
+  RECORD_ARM_MODE_INSN (0, (insn & 0xfff0ffff));
+
+  return copy_preload (gdbarch, rn, regs, dsc);
+}
+
+/* Preload instructions with register offset.  */
+
+static int
+copy_preload_reg (struct gdbarch *gdbarch, unsigned int rn, unsigned int rm,
+		  struct regcache *regs,
+		  struct displaced_step_closure *dsc)
+{
+  ULONGEST rn_val, rm_val;
+  CORE_ADDR from = dsc->insn_addr;
+
   /* Preload register-offset instructions:
 
      {pli/pld} [rn, rm {, shift}]
@@ -5400,15 +5407,31 @@ copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
   rm_val = displaced_read_reg (regs, from, rm);
   displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
   displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
-
   dsc->u.preload.immed = 0;
 
-  RECORD_ARM_MODE_INSN (0, ((insn & 0xfff0fff0) | 0x1));
-
   dsc->cleanup = &cleanup_preload;
 
   return 0;
 }
+static int
+arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
+		      struct regcache *regs,
+		      struct displaced_step_closure *dsc)
+{
+  unsigned int rn = bits (insn, 16, 19);
+  unsigned int rm = bits (insn, 0, 3);
+
+  if (!insn_references_pc (insn, 0x000f000ful))
+    return arm_copy_unmodified (insn, "preload reg", dsc);
+
+  if (debug_displaced)
+    fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
+			(unsigned long) insn);
+
+  RECORD_ARM_MODE_INSN (0, ((insn & 0xfff0fff0) | 0x1));
+
+  return copy_preload_reg (gdbarch, rn, rm, regs, dsc);
+}
 
 /* Copy/cleanup coprocessor load and store instructions.  */
 
@@ -5426,21 +5449,14 @@ cleanup_copro_load_store (struct gdbarch *gdbarch,
 }
 
 static int
-copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
+copy_copro_load_store (struct gdbarch *gdbarch, unsigned int rn,
 		       struct regcache *regs,
 		       struct displaced_step_closure *dsc)
 {
-  unsigned int rn = bits (insn, 16, 19);
+
   ULONGEST rn_val;
   CORE_ADDR from = dsc->insn_addr;
 
-  if (!insn_references_pc (insn, 0x000f0000ul))
-    return copy_unmodified (gdbarch, insn, "copro load/store", dsc);
-
-  if (debug_displaced)
-    fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
-			"load/store insn %.8lx\n", (unsigned long) insn);
-
   /* Coprocessor load/store instructions:
 
      {stc/stc2} [<Rn>, #+/-imm]  (and other immediate addressing modes)
@@ -5453,11 +5469,6 @@ copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
   rn_val = displaced_read_reg (regs, from, rn);
   displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
 
-  dsc->u.ldst.writeback = bit (insn, 25);
-  dsc->u.ldst.rn = rn;
-
-  RECORD_ARM_MODE_INSN (0, (insn & 0xfff0ffff));
-
   dsc->cleanup = &cleanup_copro_load_store;
 
   return 0;
@@ -5465,10 +5476,9 @@ copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
 
 /* Clean up branch instructions (actually perform the branch, by setting
    PC).  */
-
 static void
 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
-		struct displaced_step_closure *dsc)
+	       struct displaced_step_closure *dsc)
 {
   ULONGEST from = dsc->insn_addr;
   uint32_t status = displaced_read_reg (regs, from, ARM_PS_REGNUM);
@@ -5482,29 +5492,25 @@ cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
   if (dsc->u.branch.link)
     {
       ULONGEST pc = displaced_read_reg (regs, from, 15);
-      displaced_write_reg (regs, dsc, 14, pc - 4, CANNOT_WRITE_PC);
+
+      if (displaced_in_arm_mode (regs))
+	displaced_write_reg (regs, dsc, 14, pc - 4, CANNOT_WRITE_PC);
+      else
+	displaced_write_reg (regs, dsc, 14, (pc - 2) | 1u,
+			     CANNOT_WRITE_PC);
     }
 
-  displaced_write_reg (regs, dsc, 15, dsc->u.branch.dest, write_pc);
+  displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
 }
 
 /* Copy B/BL/BLX instructions with immediate destinations.  */
 
 static int
-copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
-	       struct regcache *regs, struct displaced_step_closure *dsc)
+copy_b_bl_blx (struct gdbarch *gdbarch, unsigned int cond, int exchange,
+	       int link, long offset, struct regcache *regs,
+	       struct displaced_step_closure *dsc)
 {
-  unsigned int cond = bits (insn, 28, 31);
-  int exchange = (cond == 0xf);
-  int link = exchange || bit (insn, 24);
   CORE_ADDR from = dsc->insn_addr;
-  long offset;
-
-  if (debug_displaced)
-    fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
-			"%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
-			(unsigned long) insn);
-
   /* Implement "BL<cond> <label>" as:
 
      Preparation: cond <- instruction condition
@@ -5513,6 +5519,29 @@ copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
 
      B<cond> similar, but don't set r14 in cleanup.  */
 
+
+  dsc->u.branch.cond = cond;
+  dsc->u.branch.link = link;
+  dsc->u.branch.exchange = exchange;
+  dsc->cleanup = &cleanup_branch;
+
+  return 0;
+}
+
+/* Copy B/BL/BLX ARM instructions with immediate destinations.  */
+static int
+arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
+		   struct regcache *regs, struct displaced_step_closure *dsc)
+{
+  unsigned int cond = bits (insn, 28, 31);
+  int exchange = (cond == 0xf);
+  int link = exchange || bit (insn, 24);
+  long offset;
+
+  if (debug_displaced)
+    fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
+			"%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
+			(unsigned long) insn);
   if (exchange)
     /* For BLX, set bit 0 of the destination.  The cleanup_branch function will
        then arrange the switch into Thumb mode.  */
@@ -5523,36 +5552,21 @@ copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
   if (bit (offset, 25))
     offset = offset | ~0x3ffffff;
 
-  dsc->u.branch.cond = cond;
-  dsc->u.branch.link = link;
-  dsc->u.branch.exchange = exchange;
-  dsc->u.branch.dest = from + 8 + offset;
-
+  dsc->u.branch.dest = dsc->insn_addr + 8 + offset;
   RECORD_ARM_MODE_INSN (0, ARM_NOP);
 
-  dsc->cleanup = &cleanup_branch;
-
-  return 0;
+  return copy_b_bl_blx (gdbarch, cond, exchange, link, offset, regs, dsc);
 }
 
 /* Copy BX/BLX with register-specified destinations.  */
 
 static int
-copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
-		 struct regcache *regs, struct displaced_step_closure *dsc)
+copy_bx_blx_reg (struct gdbarch *gdbarch, unsigned int cond, int link,
+		 unsigned int rm, struct regcache *regs,
+		 struct displaced_step_closure *dsc)
 {
-  unsigned int cond = bits (insn, 28, 31);
-  /* BX:  x12xxx1x
-     BLX: x12xxx3x.  */
-  int link = bit (insn, 5);
-  unsigned int rm = bits (insn, 0, 3);
   CORE_ADDR from = dsc->insn_addr;
 
-  if (debug_displaced)
-    fprintf_unfiltered (gdb_stdlog, "displaced: copying %s register insn "
-			"%.8lx\n", (link) ? "blx" : "bx",
-			(unsigned long) insn);
-
   /* Implement {BX,BLX}<cond> <reg>" as:
 
      Preparation: cond <- instruction condition
@@ -5563,17 +5577,38 @@ copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
 
   dsc->u.branch.dest = displaced_read_reg (regs, from, rm);
 
-  dsc->u.branch.cond = cond;
   dsc->u.branch.link = link;
   dsc->u.branch.exchange = 1;
 
-  RECORD_ARM_MODE_INSN (0, ARM_NOP);
+  if (debug_displaced)
+    fprintf_unfiltered (gdb_stdlog, " %s r%d -> 0x%.8lx\n",
+			(link) ? "blx" : "bx", rm, dsc->u.branch.dest);
 
   dsc->cleanup = &cleanup_branch;
 
   return 0;
 }
 
+static int
+arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
+		     struct regcache *regs, struct displaced_step_closure *dsc)
+{
+  unsigned int cond = bits (insn, 28, 31);
+  /* BX:  x12xxx1x
+     BLX: x12xxx3x.  */
+  int link = bit (insn, 5);
+  unsigned int rm = bits (insn, 0, 3);
+
+  if (debug_displaced)
+    fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
+			(unsigned long) insn);
+
+  dsc->u.branch.cond = cond;
+  RECORD_ARM_MODE_INSN (0, ARM_NOP);
+
+  return copy_bx_blx_reg (gdbarch, cond, link, rm, regs, dsc);
+}
+
 /* Copy/cleanup arithmetic/logic instruction with immediate RHS.  */
 
 static void
@@ -5598,7 +5633,7 @@ copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
   CORE_ADDR from = dsc->insn_addr;
 
   if (!insn_references_pc (insn, 0x000ff000ul))
-    return copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
+    return arm_copy_unmodified (insn, "ALU immediate", dsc);
 
   if (debug_displaced)
     fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
@@ -5652,25 +5687,18 @@ cleanup_alu_reg (struct gdbarch *gdbarch,
   displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
 }
 
+
 static int
-copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
-	      struct displaced_step_closure *dsc)
+copy_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
+	      struct displaced_step_closure *dsc, unsigned int reg_ids[])
 {
-  unsigned int rn = bits (insn, 16, 19);
-  unsigned int rm = bits (insn, 0, 3);
-  unsigned int rd = bits (insn, 12, 15);
-  unsigned int op = bits (insn, 21, 24);
-  int is_mov = (op == 0xd);
+  unsigned int rn = reg_ids[1];
+  unsigned int rm = reg_ids[2];
+  unsigned int rd = reg_ids[0];
+
   ULONGEST rd_val, rn_val, rm_val;
   CORE_ADDR from = dsc->insn_addr;
 
-  if (!insn_references_pc (insn, 0x000ff00ful))
-    return copy_unmodified (gdbarch, insn, "ALU reg", dsc);
-
-  if (debug_displaced)
-    fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
-			is_mov ? "move" : "ALU", (unsigned long) insn);
-
   /* Instruction is of form:
 
      <op><cond> rd, [rn,] rm [, <shift>]
@@ -5694,14 +5722,35 @@ copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
   displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
   dsc->rd = rd;
 
+  dsc->cleanup = &cleanup_alu_reg;
+
+  return 0;
+}
+
+static int
+arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
+		  struct displaced_step_closure *dsc)
+{
+  unsigned int reg_ids[3];
+  unsigned int op = bits (insn, 21, 24);
+  int is_mov = (op == 0xd);
+
+  reg_ids[1] = bits (insn, 16, 19); /* Rn */
+  reg_ids[2] = bits (insn, 0, 3); /* Rm */
+  reg_ids[0] = bits (insn, 12, 15); /* Rd */
+  if (!insn_references_pc (insn, 0x000ff00ful))
+    return arm_copy_unmodified (insn, "ALU reg", dsc);
+
+  if (debug_displaced)
+    fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
+				is_mov ? "move" : "ALU", (unsigned long) insn);
+
   if (is_mov)
     RECORD_ARM_MODE_INSN (0, ((insn & 0xfff00ff0) | 0x2));
   else
     RECORD_ARM_MODE_INSN (0, ((insn & 0xfff00ff0) | 0x10002));
 
-  dsc->cleanup = &cleanup_alu_reg;
-
-  return 0;
+  return copy_alu_reg (gdbarch, regs, dsc, reg_ids);
 }
 
 /* Cleanup/copy arithmetic/logic insns with shifted register RHS.  */
@@ -5721,7 +5770,7 @@ cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
 }
 
 static int
-copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
+arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
 		      struct regcache *regs,
 		      struct displaced_step_closure *dsc)
 {
@@ -5735,7 +5784,7 @@ copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
   CORE_ADDR from = dsc->insn_addr;
 
   if (!insn_references_pc (insn, 0x000fff0ful))
-    return copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
+    return arm_copy_unmodified (insn, "ALU shifted reg", dsc);
 
   if (debug_displaced)
     fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
@@ -5852,7 +5901,7 @@ copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unpriveleged,
   CORE_ADDR from = dsc->insn_addr;
 
   if (!insn_references_pc (insn, 0x000ff00ful))
-    return copy_unmodified (gdbarch, insn, "extra load/store", dsc);
+    return arm_copy_unmodified (insn, "extra load/store", dsc);
 
   if (debug_displaced)
     fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
@@ -5911,49 +5960,32 @@ copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unpriveleged,
 /* Copy byte/word loads and stores.  */
 
 static int
-copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
-			struct regcache *regs,
+copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, struct regcache *regs,
 			struct displaced_step_closure *dsc, int load, int byte,
-			int usermode)
+			int usermode, int writeback, int rm)
 {
-  int immed = !bit (insn, 25);
-  unsigned int rt = bits (insn, 12, 15);
-  unsigned int rn = bits (insn, 16, 19);
-  unsigned int rm = bits (insn, 0, 3);  /* Only valid if !immed.  */
   ULONGEST rt_val, rn_val, rm_val = 0;
   CORE_ADDR from = dsc->insn_addr;
 
-  if (!insn_references_pc (insn, 0x000ff00ful))
-    return copy_unmodified (gdbarch, insn, "load/store", dsc);
-
-  if (debug_displaced)
-    fprintf_unfiltered (gdb_stdlog, "displaced: copying %s%s insn %.8lx\n",
-			load ? (byte ? "ldrb" : "ldr")
-			     : (byte ? "strb" : "str"), usermode ? "t" : "",
-			(unsigned long) insn);
-
   dsc->tmp[0] = displaced_read_reg (regs, from, 0);
   dsc->tmp[2] = displaced_read_reg (regs, from, 2);
-  if (!immed)
+  if (!dsc->u.ldst.immed)
     dsc->tmp[3] = displaced_read_reg (regs, from, 3);
   if (!load)
     dsc->tmp[4] = displaced_read_reg (regs, from, 4);
 
-  rt_val = displaced_read_reg (regs, from, rt);
-  rn_val = displaced_read_reg (regs, from, rn);
-  if (!immed)
+  rt_val = displaced_read_reg (regs, from, dsc->rd);
+  rn_val = displaced_read_reg (regs, from, dsc->u.ldst.rn);
+  if (!dsc->u.ldst.immed)
     rm_val = displaced_read_reg (regs, from, rm);
 
   displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
   displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
-  if (!immed)
+  if (!dsc->u.ldst.immed)
     displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
 
-  dsc->rd = rt;
   dsc->u.ldst.xfersize = byte ? 1 : 4;
-  dsc->u.ldst.rn = rn;
-  dsc->u.ldst.immed = immed;
-  dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
+  dsc->u.ldst.writeback = writeback;
 
   /* To write PC we can do:
 
@@ -5976,7 +6008,41 @@ copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
      of this can be found in Section "Saving from r15" in
      http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
 
-  if (load || rt != 15)
+  dsc->cleanup = load ? &cleanup_load : &cleanup_store;
+
+  return 0;
+}
+
+static int
+arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
+			    struct regcache *regs,
+			    struct displaced_step_closure *dsc,
+			    int load, int byte, int usermode)
+{
+  int immed = !bit (insn, 25);
+  unsigned int rt = bits (insn, 12, 15);
+  unsigned int rn = bits (insn, 16, 19);
+  unsigned int rm = bits (insn, 0, 3);  /* Only valid if !immed.  */
+
+  if (!insn_references_pc (insn, 0x000ff00ful))
+    return arm_copy_unmodified (insn, "load/store", dsc);
+
+  if (debug_displaced)
+    fprintf_unfiltered (gdb_stdlog,
+			"displaced: copying %s%s r%d [r%d] insn %.8lx\n",
+			load ? (byte ? "ldrb" : "ldr")
+			     : (byte ? "strb" : "str"), usermode ? "t" : "",
+			rt, rn,
+			(unsigned long) insn);
+
+  dsc->rd = rt;
+  dsc->u.ldst.rn = rn;
+  dsc->u.ldst.immed = immed;
+
+  copy_ldr_str_ldrb_strb (gdbarch, regs, dsc, load, byte, usermode,
+			  (bit (insn, 24) == 0 || bit (insn, 21) != 0), rm);
+
+    if (load || rt != ARM_PC_REGNUM)
     {
       dsc->u.ldst.restore_r4 = 0;
 
@@ -5995,8 +6061,9 @@ copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
     {
       /* We need to use r4 as scratch.  Make sure it's restored afterwards.  */
       dsc->u.ldst.restore_r4 = 1;
-      RECORD_ARM_MODE_INSN (0, 0xe92d8000); /* push {pc} */
-      RECORD_ARM_MODE_INSN (1, 0xe8bd0010); /* pop  {r4} */
+
+      RECORD_ARM_MODE_INSN (0, 0xe92d8000);  /* push {pc} */
+      RECORD_ARM_MODE_INSN (1, 0xe8bd0010);  /* pop  {r4} */
       RECORD_ARM_MODE_INSN (2, 0xe044400f); /* sub r4, r4, pc.  */
       RECORD_ARM_MODE_INSN (3, 0xe2844008); /* add r4, r4, #8.  */
       RECORD_ARM_MODE_INSN (4, 0xe0800004);  /* add r0, r0, r4.  */
@@ -6009,12 +6076,11 @@ copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
 
       RECORD_ARM_MODE_INSN (6, 0x00); /* breakpoint location.  */
       RECORD_ARM_MODE_INSN (7, 0x00); /* scratch space.  */
+
       dsc->numinsns = 6;
     }
 
-  dsc->cleanup = load ? &cleanup_load : &cleanup_store;
-
-  return 0;
+    return 0;
 }
 
 /* Cleanup LDM instructions with fully-populated register list.  This is an
@@ -6228,105 +6294,193 @@ cleanup_block_load_pc (struct gdbarch *gdbarch,
     }
 }
 
+/* Copy helper functions.  */
+typedef int (*copy_undef_helper)(union instruction_instance,
+				 struct displaced_step_closure *);
+typedef int (*copy_unmodified_helper)(union instruction_instance, const char*,
+				      struct displaced_step_closure *);
+typedef int (*copy_copro_load_store_helper)(struct gdbarch *,
+					    union instruction_instance,
+					    struct regcache *,
+					    struct displaced_step_closure *);
+typedef int (*copy_ldm_with_pc_helper) (union instruction_instance,
+					struct displaced_step_closure *,
+					struct regcache *);
+typedef int (*copy_svc_helper)(struct gdbarch *,
+			       union instruction_instance, CORE_ADDR,
+			       struct regcache *,
+			       struct displaced_step_closure *);
+
+/* Define helpers for ARM.  */
+static int
+arm_copy_unmodified_helper (union instruction_instance insn, const char *iname,
+			    struct displaced_step_closure *dsc)
+{
+  return arm_copy_unmodified (insn._32_bit, iname, dsc);
+}
+
+static int arm_copy_undef (uint32_t insn, struct displaced_step_closure *dsc);
+
+static int
+arm_copy_undef_helper (union instruction_instance ii,
+		       struct displaced_step_closure *dsc)
+{
+  return arm_copy_undef (ii._32_bit, dsc);
+}
+
+static int
+arm_copy_copro_load_store_helper (struct gdbarch *gdbarch,
+				  union instruction_instance insn,
+				  struct regcache *regs,
+				  struct displaced_step_closure *dsc)
+{
+  unsigned int rn = bits (insn._32_bit, 16, 19);
+  if (rn != ARM_PC_REGNUM)
+    return arm_copy_unmodified (insn._32_bit, "copro load/store", dsc);
+
+  if (debug_displaced)
+    fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
+			"load/store insn %.8lx\n",
+			(unsigned long) insn._32_bit);
+
+  dsc->u.ldst.writeback = bit (insn._32_bit, 25);
+  dsc->u.ldst.rn = rn;
+
+  RECORD_ARM_MODE_INSN (0, (insn._32_bit & 0xfff0ffff));
+
+  return copy_copro_load_store (gdbarch, rn, regs, dsc);
+}
+
+static int
+arm_copy_ldm_with_pc_helper(union instruction_instance insn,
+			    struct displaced_step_closure *dsc,
+			    struct regcache *regs)
+{
+  /* LDM of a list of registers which includes PC.  Implement by
+     rewriting the list of registers to be transferred into a
+     contiguous chunk r0...rX before doing the transfer, then shuffling
+     registers into the correct places in the cleanup routine.  */
+  unsigned int regmask = dsc->u.block.regmask;
+  unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
+  unsigned int to = 0, from = 0, i, new_rn;
+
+  for (i = 0; i < num_in_list; i++)
+    dsc->tmp[i] = displaced_read_reg (regs, from, i);
+
+  /* Writeback makes things complicated.  We need to avoid clobbering
+     the base register with one of the registers in our modified
+     register list, but just using a different register can't work in
+     all cases, e.g.:
+
+     ldm r14!, {r0-r13,pc}
+
+     which would need to be rewritten as:
+
+     ldm rN!, {r0-r14}
+
+     but that can't work, because there's no free register for N.
+
+     Solve this by turning off the writeback bit, and emulating
+     writeback manually in the cleanup routine.  */
+
+  if (dsc->u.block.writeback )
+    insn._32_bit &= ~(1 << 21);
+
+  new_regmask = (1 << num_in_list) - 1;
+
+  if (debug_displaced)
+    fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
+			"{..., pc}: original reg list %.4x, modified "
+			"list %.4x\n"), dsc->u.block.rn,
+					   dsc->u.block.writeback ? "!" : "",
+					   (int) dsc->u.block.regmask,
+					   new_regmask);
+
+  /* In Thumb encoding, bit 13 should be always zero.  */
+  if (displaced_in_arm_mode (regs))
+    new_regmask &= 0xffff;
+  else
+    new_regmask &= 0xdfff;
+
+  RECORD_ARM_MODE_INSN (0,
+			 ((insn._32_bit & ~0xffff) | (new_regmask & 0xffff)));
+
+  return 0;
+}
+
+static void cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
+			 struct displaced_step_closure *dsc);
+static int
+arm_copy_svc_helper (struct gdbarch *gdbarch, union instruction_instance insn,
+		     CORE_ADDR to, struct regcache *regs,
+		     struct displaced_step_closure *dsc)
+{
+  CORE_ADDR from = dsc->insn_addr;
+
+  /* Allow OS-specific code to override SVC handling.  */
+  if (dsc->u.svc.copy_svc_os)
+    return dsc->u.svc.copy_svc_os (gdbarch, insn._32_bit, to, regs, dsc);
+
+  if (debug_displaced)
+    fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
+			(unsigned long) insn._32_bit);
+
+  /* Preparation: none.
+     Insn: unmodified svc.
+     Cleanup: pc <- insn_addr + 4.  */
+
+  RECORD_ARM_MODE_INSN (0, insn._32_bit);
+
+  dsc->cleanup = &cleanup_svc;
+  /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
+     instruction.  */
+  dsc->wrote_to_pc = 1;
+
+  return 0;
+}
+
+
+/* Helper definition is done.  */
+
+
 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
    in user-level code (in particular exception return, ldm rn, {...pc}^).  */
 
 static int
-copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
+copy_block_xfer (struct gdbarch *gdbarch, union instruction_instance insn,
+		 struct regcache *regs,
+		 copy_unmodified_helper copy_unmodified,
+		 copy_ldm_with_pc_helper copy_ldm_with_pc,
 		 struct displaced_step_closure *dsc)
 {
-  int load = bit (insn, 20);
-  int user = bit (insn, 22);
-  int increment = bit (insn, 23);
-  int before = bit (insn, 24);
-  int writeback = bit (insn, 21);
-  int rn = bits (insn, 16, 19);
   CORE_ADDR from = dsc->insn_addr;
 
-  /* Block transfers which don't mention PC can be run directly
-     out-of-line.  */
-  if (rn != 15 && (insn & 0x8000) == 0)
-    return copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
-
-  if (rn == 15)
+  if (dsc->u.block.rn == 15)
     {
       warning (_("displaced: Unpredictable LDM or STM with "
 		 "base register r15"));
-      return copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
+      return copy_unmodified (insn, "unpredictable ldm/stm", dsc);
     }
 
-  if (debug_displaced)
-    fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
-			"%.8lx\n", (unsigned long) insn);
 
-  dsc->u.block.xfer_addr = displaced_read_reg (regs, from, rn);
-  dsc->u.block.rn = rn;
-
-  dsc->u.block.load = load;
-  dsc->u.block.user = user;
-  dsc->u.block.increment = increment;
-  dsc->u.block.before = before;
-  dsc->u.block.writeback = writeback;
-  dsc->u.block.cond = bits (insn, 28, 31);
-
-  dsc->u.block.regmask = insn & 0xffff;
-
-  if (load)
+  dsc->u.block.xfer_addr = displaced_read_reg (regs, from, dsc->u.block.rn);
+  if (dsc->u.block.load)
     {
-      if ((insn & 0xffff) == 0xffff)
+      if (dsc->u.block.regmask == 0xffff)
 	{
 	  /* LDM with a fully-populated register list.  This case is
 	     particularly tricky.  Implement for now by fully emulating the
 	     instruction (which might not behave perfectly in all cases, but
 	     these instructions should be rare enough for that not to matter
-	     too much).  */
+	     too much).  This case is only valid in ARM encoding, so no need
+	     to worry about Thumb encoding here.  */
 	  RECORD_ARM_MODE_INSN (0, ARM_NOP);
 
 	  dsc->cleanup = &cleanup_block_load_all;
 	}
       else
 	{
-	  /* LDM of a list of registers which includes PC.  Implement by
-	     rewriting the list of registers to be transferred into a
-	     contiguous chunk r0...rX before doing the transfer, then shuffling
-	     registers into the correct places in the cleanup routine.  */
-	  unsigned int regmask = insn & 0xffff;
-	  unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
-	  unsigned int to = 0, from = 0, i, new_rn;
-
-	  for (i = 0; i < num_in_list; i++)
-	    dsc->tmp[i] = displaced_read_reg (regs, from, i);
-
-	  /* Writeback makes things complicated.  We need to avoid clobbering
-	     the base register with one of the registers in our modified
-	     register list, but just using a different register can't work in
-	     all cases, e.g.:
-
-	       ldm r14!, {r0-r13,pc}
-
-	     which would need to be rewritten as:
-
-	       ldm rN!, {r0-r14}
-
-	     but that can't work, because there's no free register for N.
-
-	     Solve this by turning off the writeback bit, and emulating
-	     writeback manually in the cleanup routine.  */
-
-	  if (writeback)
-	    insn &= ~(1 << 21);
-
-	  new_regmask = (1 << num_in_list) - 1;
-
-	  if (debug_displaced)
-	    fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
-				"{..., pc}: original reg list %.4x, modified "
-				"list %.4x\n"), rn, writeback ? "!" : "",
-				(int) insn & 0xffff, new_regmask);
-
-	  RECORD_ARM_MODE_INSN (0,
-				 ((insn & ~0xffff) | (new_regmask & 0xffff)));
-
+	  copy_ldm_with_pc (insn, dsc, regs);
 	  dsc->cleanup = &cleanup_block_load_pc;
 	}
     }
@@ -6338,7 +6492,7 @@ copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
 	 Doing things this way has the advantage that we can auto-detect
 	 the offset of the PC write (which is architecture-dependent) in
 	 the cleanup routine.  */
-      RECORD_ARM_MODE_INSN (0, insn);
+      RECORD_ARM_MODE_INSN (0, insn._32_bit);
 
       dsc->cleanup = &cleanup_block_store_pc;
     }
@@ -6346,56 +6500,64 @@ copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
   return 0;
 }
 
+static int
+arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
+		     struct regcache *regs, struct displaced_step_closure *dsc)
+{
+  int load = bit (insn, 20);
+  int user = bit (insn, 22);
+  int increment = bit (insn, 23);
+  int before = bit (insn, 24);
+  int writeback = bit (insn, 21);
+  int rn = bits (insn, 16, 19);
+  union instruction_instance ii;
+
+  /* Block transfers which don't mention PC can be run directly
+     out-of-line.  */
+  if (rn != 15 && (insn & 0x8000) == 0)
+    return arm_copy_unmodified (insn, "ldm/stm", dsc);
+
+  if (debug_displaced)
+    fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
+			"%.8lx\n", (unsigned long) insn);
+
+  dsc->u.block.rn = rn;
+
+  dsc->u.block.load = load;
+  dsc->u.block.user = user;
+  dsc->u.block.increment = increment;
+  dsc->u.block.before = before;
+  dsc->u.block.writeback = writeback;
+
+  dsc->u.block.cond = bits (insn, 28, 31);
+  dsc->u.block.regmask = insn & 0xffff;
+
+  ii._32_bit = insn;
+  return copy_block_xfer (gdbarch, ii, regs, arm_copy_unmodified_helper,
+			  arm_copy_ldm_with_pc_helper, dsc);
+
+}
 /* Cleanup/copy SVC (SWI) instructions.  These two functions are overridden
    for Linux, where some SVC instructions must be treated specially.  */
 
 static void
 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
-	     struct displaced_step_closure *dsc)
+            struct displaced_step_closure *dsc)
 {
   CORE_ADDR from = dsc->insn_addr;
   CORE_ADDR resume_addr = from + 4;
 
   if (debug_displaced)
     fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
-			"%.8lx\n", (unsigned long) resume_addr);
+                       "%.8lx\n", (unsigned long) resume_addr);
 
   displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
 }
 
-static int
-copy_svc (struct gdbarch *gdbarch, uint32_t insn, CORE_ADDR to,
-	  struct regcache *regs, struct displaced_step_closure *dsc)
-{
-  CORE_ADDR from = dsc->insn_addr;
-
-  /* Allow OS-specific code to override SVC handling.  */
-  if (dsc->u.svc.copy_svc_os)
-    return dsc->u.svc.copy_svc_os (gdbarch, insn, to, regs, dsc);
-
-  if (debug_displaced)
-    fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
-			(unsigned long) insn);
-
-  /* Preparation: none.
-     Insn: unmodified svc.
-     Cleanup: pc <- insn_addr + 4.  */
-
-  RECORD_ARM_MODE_INSN (0, insn);
-
-  dsc->cleanup = &cleanup_svc;
-  /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
-     instruction.  */
-  dsc->wrote_to_pc = 1;
-
-  return 0;
-}
-
 /* Copy undefined instructions.  */
 
 static int
-copy_undef (struct gdbarch *gdbarch, uint32_t insn,
-	    struct displaced_step_closure *dsc)
+arm_copy_undef (uint32_t insn, struct displaced_step_closure *dsc)
 {
   if (debug_displaced)
     fprintf_unfiltered (gdb_stdlog,
@@ -6410,12 +6572,12 @@ copy_undef (struct gdbarch *gdbarch, uint32_t insn,
 /* Copy unpredictable instructions.  */
 
 static int
-copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
+arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
 	     struct displaced_step_closure *dsc)
 {
   if (debug_displaced)
     fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
-			"%.8lx\n", (unsigned long) insn);
+                       "%.8lx\n", (unsigned long) insn);
 
   RECORD_ARM_MODE_INSN (0, insn);
 
@@ -6434,54 +6596,54 @@ decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
   unsigned int rn = bits (insn, 16, 19);
 
   if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
-    return copy_unmodified (gdbarch, insn, "cps", dsc);
+    return arm_copy_unmodified (insn, "cps", dsc);
   else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
-    return copy_unmodified (gdbarch, insn, "setend", dsc);
+    return arm_copy_unmodified (insn, "setend", dsc);
   else if ((op1 & 0x60) == 0x20)
-    return copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
+    return arm_copy_unmodified (insn, "neon dataproc", dsc);
   else if ((op1 & 0x71) == 0x40)
-    return copy_unmodified (gdbarch, insn, "neon elt/struct load/store", dsc);
+    return arm_copy_unmodified (insn, "neon elt/struct load/store", dsc);
   else if ((op1 & 0x77) == 0x41)
-    return copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
+    return arm_copy_unmodified (insn, "unallocated mem hint", dsc);
   else if ((op1 & 0x77) == 0x45)
-    return copy_preload (gdbarch, insn, regs, dsc);  /* pli.  */
+    return arm_copy_preload (gdbarch, insn, regs, dsc);  /* pli.  */
   else if ((op1 & 0x77) == 0x51)
     {
       if (rn != 0xf)
-	return copy_preload (gdbarch, insn, regs, dsc);  /* pld/pldw.  */
+	return arm_copy_preload (gdbarch, insn, regs, dsc);  /* pld/pldw.  */
       else
-	return copy_unpred (gdbarch, insn, dsc);
+	return arm_copy_unpred (gdbarch, insn, dsc);
     }
   else if ((op1 & 0x77) == 0x55)
-    return copy_preload (gdbarch, insn, regs, dsc);  /* pld/pldw.  */
+    return arm_copy_preload (gdbarch, insn, regs, dsc);  /* pld/pldw.  */
   else if (op1 == 0x57)
     switch (op2)
       {
-      case 0x1: return copy_unmodified (gdbarch, insn, "clrex", dsc);
-      case 0x4: return copy_unmodified (gdbarch, insn, "dsb", dsc);
-      case 0x5: return copy_unmodified (gdbarch, insn, "dmb", dsc);
-      case 0x6: return copy_unmodified (gdbarch, insn, "isb", dsc);
-      default: return copy_unpred (gdbarch, insn, dsc);
+      case 0x1: return arm_copy_unmodified (insn, "clrex", dsc);
+      case 0x4: return arm_copy_unmodified (insn, "dsb", dsc);
+      case 0x5: return arm_copy_unmodified (insn, "dmb", dsc);
+      case 0x6: return arm_copy_unmodified (insn, "isb", dsc);
+      default: return arm_copy_unpred (gdbarch, insn, dsc);
       }
   else if ((op1 & 0x63) == 0x43)
-    return copy_unpred (gdbarch, insn, dsc);
+    return arm_copy_unpred (gdbarch, insn, dsc);
   else if ((op2 & 0x1) == 0x0)
     switch (op1 & ~0x80)
       {
       case 0x61:
-	return copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
+	return arm_copy_unmodified (insn, "unallocated mem hint", dsc);
       case 0x65:
-	return copy_preload_reg (gdbarch, insn, regs, dsc);  /* pli reg.  */
+	return arm_copy_preload_reg (gdbarch, insn, regs, dsc);  /* pli reg.  */
       case 0x71: case 0x75:
         /* pld/pldw reg.  */
-	return copy_preload_reg (gdbarch, insn, regs, dsc);
+	return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
       case 0x63: case 0x67: case 0x73: case 0x77:
-	return copy_unpred (gdbarch, insn, dsc);
+	return arm_copy_unpred (gdbarch, insn, dsc);
       default:
-	return copy_undef (gdbarch, insn, dsc);
+	return arm_copy_undef (insn, dsc);
       }
   else
-    return copy_undef (gdbarch, insn, dsc);  /* Probably unreachable.  */
+    return arm_copy_undef (insn, dsc);  /* Probably unreachable.  */
 }
 
 static int
@@ -6495,26 +6657,28 @@ decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
   else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
     {
     case 0x0: case 0x2:
-      return copy_unmodified (gdbarch, insn, "srs", dsc);
+      return arm_copy_unmodified (insn, "srs", dsc);
 
     case 0x1: case 0x3:
-      return copy_unmodified (gdbarch, insn, "rfe", dsc);
+      return arm_copy_unmodified (insn, "rfe", dsc);
 
     case 0x4: case 0x5: case 0x6: case 0x7:
-      return copy_b_bl_blx (gdbarch, insn, regs, dsc);
+      return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
 
     case 0x8:
       switch ((insn & 0xe00000) >> 21)
 	{
 	case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
 	  /* stc/stc2.  */
-	  return copy_copro_load_store (gdbarch, insn, regs, dsc);
+	  return arm_copy_copro_load_store_helper (gdbarch,
+						   (union instruction_instance)insn,
+						   regs, dsc);
 
 	case 0x2:
-	  return copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
+	  return arm_copy_unmodified (insn, "mcrr/mcrr2", dsc);
 
 	default:
-	  return copy_undef (gdbarch, insn, dsc);
+	  return arm_copy_undef (insn, dsc);
 	}
 
     case 0x9:
@@ -6524,46 +6688,46 @@ decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
 	  {
 	  case 0x1: case 0x3:
 	    /* ldc/ldc2 imm (undefined for rn == pc).  */
-	    return rn_f ? copy_undef (gdbarch, insn, dsc)
-			: copy_copro_load_store (gdbarch, insn, regs, dsc);
+	    return rn_f ? arm_copy_undef (insn, dsc)
+			: arm_copy_copro_load_store_helper (gdbarch, (union instruction_instance)insn, regs, dsc);
 
 	  case 0x2:
-	    return copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
+	    return arm_copy_unmodified (insn, "mrrc/mrrc2", dsc);
 
 	  case 0x4: case 0x5: case 0x6: case 0x7:
 	    /* ldc/ldc2 lit (undefined for rn != pc).  */
-	    return rn_f ? copy_copro_load_store (gdbarch, insn, regs, dsc)
-			: copy_undef (gdbarch, insn, dsc);
+	    return rn_f ? arm_copy_copro_load_store_helper (gdbarch, (union instruction_instance)insn, regs, dsc)
+			: arm_copy_undef (insn, dsc);
 
 	  default:
-	    return copy_undef (gdbarch, insn, dsc);
+	    return arm_copy_undef (insn, dsc);
 	  }
       }
 
     case 0xa:
-      return copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
+      return arm_copy_unmodified (insn, "stc/stc2", dsc);
 
     case 0xb:
       if (bits (insn, 16, 19) == 0xf)
         /* ldc/ldc2 lit.  */
-	return copy_copro_load_store (gdbarch, insn, regs, dsc);
+	return arm_copy_copro_load_store_helper (gdbarch, (union instruction_instance)insn, regs, dsc);
       else
-	return copy_undef (gdbarch, insn, dsc);
+	return arm_copy_undef (insn, dsc);
 
     case 0xc:
       if (bit (insn, 4))
-	return copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
+	return arm_copy_unmodified (insn, "mcr/mcr2", dsc);
       else
-	return copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
+	return arm_copy_unmodified (insn, "cdp/cdp2", dsc);
 
     case 0xd:
       if (bit (insn, 4))
-	return copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
+	return arm_copy_unmodified (insn, "mrc/mrc2", dsc);
       else
-	return copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
+	return arm_copy_unmodified (insn, "cdp/cdp2", dsc);
 
     default:
-      return copy_undef (gdbarch, insn, dsc);
+      return arm_copy_undef (insn, dsc);
     }
 }
 
@@ -6581,42 +6745,41 @@ decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
   switch (op2)
     {
     case 0x0:
-      return copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
+      return arm_copy_unmodified (insn, "mrs/msr", dsc);
 
     case 0x1:
       if (op == 0x1)  /* bx.  */
-	return copy_bx_blx_reg (gdbarch, insn, regs, dsc);
+	return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
       else if (op == 0x3)
-	return copy_unmodified (gdbarch, insn, "clz", dsc);
+	return arm_copy_unmodified (insn, "clz", dsc);
       else
-	return copy_undef (gdbarch, insn, dsc);
+	return arm_copy_undef (insn, dsc);
 
     case 0x2:
       if (op == 0x1)
         /* Not really supported.  */
-	return copy_unmodified (gdbarch, insn, "bxj", dsc);
+	return arm_copy_unmodified (insn, "bxj", dsc);
       else
-	return copy_undef (gdbarch, insn, dsc);
+	return arm_copy_undef (insn, dsc);
 
     case 0x3:
-      if (op == 0x1)
-	return copy_bx_blx_reg (gdbarch, insn,
-				regs, dsc);  /* blx register.  */
+      if (op == 0x1) /* blx register.  */
+	return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
       else
-	return copy_undef (gdbarch, insn, dsc);
+	return arm_copy_undef (insn, dsc);
 
     case 0x5:
-      return copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
+      return arm_copy_unmodified (insn, "saturating add/sub", dsc);
 
     case 0x7:
       if (op == 0x1)
-	return copy_unmodified (gdbarch, insn, "bkpt", dsc);
+	return arm_copy_unmodified (insn, "bkpt", dsc);
       else if (op == 0x3)
         /* Not really supported.  */
-	return copy_unmodified (gdbarch, insn, "smc", dsc);
+	return arm_copy_unmodified (insn, "smc", dsc);
 
     default:
-      return copy_undef (gdbarch, insn, dsc);
+      return arm_copy_undef (insn, dsc);
     }
 }
 
@@ -6628,13 +6791,13 @@ decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
     switch (bits (insn, 20, 24))
       {
       case 0x10:
-	return copy_unmodified (gdbarch, insn, "movw", dsc);
+	return arm_copy_unmodified (insn, "movw", dsc);
 
       case 0x14:
-	return copy_unmodified (gdbarch, insn, "movt", dsc);
+	return arm_copy_unmodified (insn, "movt", dsc);
 
       case 0x12: case 0x16:
-	return copy_unmodified (gdbarch, insn, "msr imm", dsc);
+	return arm_copy_unmodified (insn, "msr imm", dsc);
 
       default:
 	return copy_alu_imm (gdbarch, insn, regs, dsc);
@@ -6644,17 +6807,17 @@ decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
       uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
 
       if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
-	return copy_alu_reg (gdbarch, insn, regs, dsc);
+	return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
       else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
-	return copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
+	return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
       else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
 	return decode_miscellaneous (gdbarch, insn, regs, dsc);
       else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
-	return copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
+	return arm_copy_unmodified (insn, "halfword mul/mla", dsc);
       else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
-	return copy_unmodified (gdbarch, insn, "mul/mla", dsc);
+	return arm_copy_unmodified (insn, "mul/mla", dsc);
       else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
-	return copy_unmodified (gdbarch, insn, "synch", dsc);
+	return arm_copy_unmodified (insn, "synch", dsc);
       else if (op2 == 0xb || (op2 & 0xd) == 0xd)
 	/* 2nd arg means "unpriveleged".  */
 	return copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
@@ -6676,28 +6839,28 @@ decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
 
   if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
       || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
-    return copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 0, 0);
+    return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 0, 0);
   else if ((!a && (op1 & 0x17) == 0x02)
 	    || (a && (op1 & 0x17) == 0x02 && !b))
-    return copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 0, 1);
+    return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 0, 1);
   else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
 	    || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
-    return copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 0, 0);
+    return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 0, 0);
   else if ((!a && (op1 & 0x17) == 0x03)
 	   || (a && (op1 & 0x17) == 0x03 && !b))
-    return copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 0, 1);
+    return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 0, 1);
   else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
 	    || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
-    return copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
+    return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
   else if ((!a && (op1 & 0x17) == 0x06)
 	   || (a && (op1 & 0x17) == 0x06 && !b))
-    return copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
+    return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
   else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
 	   || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
-    return copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
+    return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
   else if ((!a && (op1 & 0x17) == 0x07)
 	   || (a && (op1 & 0x17) == 0x07 && !b))
-    return copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
+    return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
 
   /* Should be unreachable.  */
   return 1;
@@ -6710,49 +6873,49 @@ decode_media (struct gdbarch *gdbarch, uint32_t insn,
   switch (bits (insn, 20, 24))
     {
     case 0x00: case 0x01: case 0x02: case 0x03:
-      return copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
+      return arm_copy_unmodified (insn, "parallel add/sub signed", dsc);
 
     case 0x04: case 0x05: case 0x06: case 0x07:
-      return copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
+      return arm_copy_unmodified (insn, "parallel add/sub unsigned", dsc);
 
     case 0x08: case 0x09: case 0x0a: case 0x0b:
     case 0x0c: case 0x0d: case 0x0e: case 0x0f:
-      return copy_unmodified (gdbarch, insn,
+      return arm_copy_unmodified (insn,
 			      "decode/pack/unpack/saturate/reverse", dsc);
 
     case 0x18:
       if (bits (insn, 5, 7) == 0)  /* op2.  */
 	 {
 	  if (bits (insn, 12, 15) == 0xf)
-	    return copy_unmodified (gdbarch, insn, "usad8", dsc);
+	    return arm_copy_unmodified (insn, "usad8", dsc);
 	  else
-	    return copy_unmodified (gdbarch, insn, "usada8", dsc);
+	    return arm_copy_unmodified (insn, "usada8", dsc);
 	}
       else
-	 return copy_undef (gdbarch, insn, dsc);
+	 return arm_copy_undef (insn, dsc);
 
     case 0x1a: case 0x1b:
       if (bits (insn, 5, 6) == 0x2)  /* op2[1:0].  */
-	return copy_unmodified (gdbarch, insn, "sbfx", dsc);
+	return arm_copy_unmodified (insn, "sbfx", dsc);
       else
-	return copy_undef (gdbarch, insn, dsc);
+	return arm_copy_undef (insn, dsc);
 
     case 0x1c: case 0x1d:
       if (bits (insn, 5, 6) == 0x0)  /* op2[1:0].  */
 	 {
 	  if (bits (insn, 0, 3) == 0xf)
-	    return copy_unmodified (gdbarch, insn, "bfc", dsc);
+	    return arm_copy_unmodified (insn, "bfc", dsc);
 	  else
-	    return copy_unmodified (gdbarch, insn, "bfi", dsc);
+	    return arm_copy_unmodified (insn, "bfi", dsc);
 	}
       else
-	return copy_undef (gdbarch, insn, dsc);
+	return arm_copy_undef (insn, dsc);
 
     case 0x1e: case 0x1f:
       if (bits (insn, 5, 6) == 0x2)  /* op2[1:0].  */
-	return copy_unmodified (gdbarch, insn, "ubfx", dsc);
+	return arm_copy_unmodified (insn, "ubfx", dsc);
       else
-	return copy_undef (gdbarch, insn, dsc);
+	return arm_copy_undef (insn, dsc);
     }
 
   /* Should be unreachable.  */
@@ -6764,53 +6927,49 @@ decode_b_bl_ldmstm (struct gdbarch *gdbarch, int32_t insn,
 		    struct regcache *regs, struct displaced_step_closure *dsc)
 {
   if (bit (insn, 25))
-    return copy_b_bl_blx (gdbarch, insn, regs, dsc);
+    return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
   else
-    return copy_block_xfer (gdbarch, insn, regs, dsc);
+    return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
 }
 
 static int
-decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
-		      struct regcache *regs,
-		      struct displaced_step_closure *dsc)
-{
-  unsigned int opcode = bits (insn, 20, 24);
+decode_svc_copro (struct gdbarch *gdbarch, union instruction_instance insn,
+		  copy_unmodified_helper copy_unmodified,
+		  copy_copro_load_store_helper copy_copro_load_store,
+		  copy_undef_helper copy_undef,
+		  copy_svc_helper copy_svc,
+		  struct regcache *regs, struct displaced_step_closure *dsc,
+		  unsigned int ops[])
+{
+  unsigned int op1 = ops[0];
+  unsigned int op = ops[1];
+  unsigned int coproc = ops[2];
+  unsigned int opcode = ops[3];
 
-  switch (opcode)
+  if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
     {
-    case 0x04: case 0x05:  /* VFP/Neon mrrc/mcrr.  */
-      return copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
+      switch (opcode)
+	{
+	case 0x04: case 0x05:  /* VFP/Neon mrrc/mcrr.  */
+	  return copy_unmodified (insn, "vfp/neon mrrc/mcrr", dsc);
 
-    case 0x08: case 0x0a: case 0x0c: case 0x0e:
-    case 0x12: case 0x16:
-      return copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
+	case 0x08: case 0x0a: case 0x0c: case 0x0e:
+	case 0x12: case 0x16:
+	  return copy_unmodified (insn, "vfp/neon vstm/vpush", dsc);
 
-    case 0x09: case 0x0b: case 0x0d: case 0x0f:
-    case 0x13: case 0x17:
-      return copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
+	case 0x09: case 0x0b: case 0x0d: case 0x0f:
+	case 0x13: case 0x17:
+	  return copy_unmodified (insn, "vfp/neon vldm/vpop", dsc);
 
-    case 0x10: case 0x14: case 0x18: case 0x1c:  /* vstr.  */
-    case 0x11: case 0x15: case 0x19: case 0x1d:  /* vldr.  */
-      /* Note: no writeback for these instructions.  Bit 25 will always be
-	 zero though (via caller), so the following works OK.  */
-      return copy_copro_load_store (gdbarch, insn, regs, dsc);
+	case 0x10: case 0x14: case 0x18: case 0x1c:  /* vstr.  */
+	case 0x11: case 0x15: case 0x19: case 0x1d:  /* vldr.  */
+	  /* Note: no writeback for these instructions.  Bit 25 will always be
+	     zero though (via caller), so the following works OK.  */
+	  return copy_copro_load_store (gdbarch, insn, regs, dsc);
+	}
+      /* Should be unreachable.  */
+      return 1;
     }
-
-  /* Should be unreachable.  */
-  return 1;
-}
-
-static int
-decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn, CORE_ADDR to,
-		  struct regcache *regs, struct displaced_step_closure *dsc)
-{
-  unsigned int op1 = bits (insn, 20, 25);
-  int op = bit (insn, 4);
-  unsigned int coproc = bits (insn, 8, 11);
-  unsigned int rn = bits (insn, 16, 19);
-
-  if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
-    return decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
   else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
 	   && (coproc & 0xe) != 0xa)
     /* stc/stc2.  */
@@ -6819,31 +6978,54 @@ decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn, CORE_ADDR to,
 	   && (coproc & 0xe) != 0xa)
     /* ldc/ldc2 imm/lit.  */
     return copy_copro_load_store (gdbarch, insn, regs, dsc);
+  else if ((op1 & 0x30) == 0x30)
+    return copy_svc (gdbarch, insn, dsc->scratch_base, regs, dsc);
+
   else if ((op1 & 0x3e) == 0x00)
-    return copy_undef (gdbarch, insn, dsc);
+    return copy_undef (insn, dsc);
   else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
-    return copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
+    return copy_unmodified (insn, "neon 64bit xfer", dsc);
   else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
-    return copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
+    return copy_unmodified (insn, "mcrr/mcrr2", dsc);
   else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
-    return copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
+    return copy_unmodified (insn, "mrrc/mrrc2", dsc);
   else if ((op1 & 0x30) == 0x20 && !op)
     {
       if ((coproc & 0xe) == 0xa)
-	return copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
+	return copy_unmodified (insn, "vfp dataproc", dsc);
       else
-	return copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
+	return copy_unmodified (insn, "cdp/cdp2", dsc);
     }
   else if ((op1 & 0x30) == 0x20 && op)
-    return copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
+    return copy_unmodified (insn, "neon 8/16/32 bit xfer", dsc);
   else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
-    return copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
+    return copy_unmodified (insn, "mcr/mcr2", dsc);
   else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
-    return copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
-  else if ((op1 & 0x30) == 0x30)
-    return copy_svc (gdbarch, insn, to, regs, dsc);
+    return copy_unmodified (insn, "mrc/mrc2", dsc);
   else
-    return copy_undef (gdbarch, insn, dsc);  /* Possibly unreachable.  */
+    return copy_undef (insn, dsc);  /* Possibly unreachable.  */
+}
+
+static int
+arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
+		      struct regcache *regs, struct displaced_step_closure *dsc)
+{
+  unsigned int ops[4];
+  union instruction_instance ii;
+  unsigned int rn = bits (insn, 16, 19);
+
+  ops[0] = bits (insn, 20, 25);
+  ops[1] = bit (insn, 4);
+  ops[2] = bits (insn, 8, 11);
+  ops[3] = bits (insn, 20, 24);
+
+
+  ii._32_bit = insn;
+
+  return decode_svc_copro (gdbarch, ii, arm_copy_unmodified_helper,
+			   arm_copy_copro_load_store_helper,
+			   arm_copy_undef_helper, arm_copy_svc_helper,
+			  regs, dsc, ops);
 }
 
 static void
@@ -6903,7 +7085,7 @@ arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
       break;
 
     case 0xc: case 0xd: case 0xe: case 0xf:
-      err = decode_svc_copro (gdbarch, insn, to, regs, dsc);
+      err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
       break;
     }
 
-- 
1.7.0.4


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]