This is the mail archive of the gdb-patches@sourceware.org mailing list for the GDB project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

Displaced stepping 0004: wip: 32-bit Thumb instructions


This is the last one.  Subject tells everything, however this patch is
*not* for review, but for helping you to understand my design of the
first 3 patches,

0001: refactor displaced stepping to handle 32-bit and 16-bit,
http://sourceware.org/ml/gdb-patches/2011-02/msg00790.html
0002: refactor and create some copy helpers
http://sourceware.org/ml/gdb-patches/2011-02/msg00792.html
0003: for 16-bit Thumb instructions
http://sourceware.org/ml/gdb-patches/2011-02/msg00866.html

There are still some problems in this patch, and I still need some time
to polish it.

-- 
Yao (éå)
	* arm-tdep.c (thumb_copy_unmodified_32bit): New.
	(thumb2_copy_preload, thumb2_copy_preload_reg): New.
	(thumb2_copy_b_bl_blx, thumb2_copy_alu_reg): New.
	(thumb2_copy_alu_shifted_reg): New.
	(thumb2_copy_ldr_str_ldrb_strb): New.
	(thumb_32bit_copy_undef): New.
	(thumb2_copy_unmodified_helper): Copy helpers for Thumb-2 mode.
	(thumb2_copy_undef_helper): Likewise.
	(thumb2_copy_copro_load_store_helper): Likewise.
	(thumb2_copy_ldm_with_pc_helper): Likewise.
	(thumb2_copy_svc_helper): Likewise.
	(thumb2_decode_svc_copro): New.  Call copy helpers.
	(thumb2_copy_block_xfer): Likewise.
	(thumb_decode_pc_relative_32bit): New.
	(decode_thumb_32bit_ld_mem_hints): New.
---
 gdb/arm-tdep.c |  587 +++++++++++++++++++++++++++++++++++++++++++++++++++++++-
 1 files changed, 582 insertions(+), 5 deletions(-)

diff --git a/gdb/arm-tdep.c b/gdb/arm-tdep.c
index 269c583..86bcfaa 100644
--- a/gdb/arm-tdep.c
+++ b/gdb/arm-tdep.c
@@ -5349,6 +5349,23 @@ thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, unsigned int insn,
   return 0;
 }
 
+/* Copy 32-bit Thumb(32-bit Thumb-2) instruction without any modification.  */
+static int
+thumb_copy_unmodified_32bit (unsigned int insn1, unsigned int insn2,
+			     const char *iname,
+			     struct displaced_step_closure *dsc)
+{
+  if (debug_displaced)
+    fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
+			"opcode/class '%s' unmodified\n", insn1, insn2,
+			iname);
+
+  RECORD_THUMB2_MODE_INSN (0, insn1, insn2);
+  dsc->numinsns = 2;
+
+  return 0;
+}
+
 /* Preload instructions with immediate offset.  */
 
 static void
@@ -5402,6 +5419,27 @@ arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
   return copy_preload (gdbarch, rn, regs, dsc);
 }
 
+
+static int
+thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
+		     struct regcache *regs, struct displaced_step_closure *dsc)
+{
+  unsigned int rn = bits (insn1, 0, 3);
+
+  if (rn != ARM_PC_REGNUM)
+    return thumb_copy_unmodified_32bit (insn1, insn2, "preload", dsc);
+
+  if (debug_displaced)
+    fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.4x %.4x\n",
+			(unsigned short) insn1, (unsigned short) insn2);
+
+
+  RECORD_THUMB2_MODE_INSN (0, (insn1 & 0xfff0), insn2);
+  dsc->numinsns = 2;
+
+  return copy_preload (gdbarch, rn, regs, dsc);
+}
+
 /* Preload instructions with register offset.  */
 
 static int
@@ -5450,6 +5488,29 @@ arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
   return copy_preload_reg (gdbarch, rn, rm, regs, dsc);
 }
 
+static int
+thumb2_copy_preload_reg (struct gdbarch *gdbarch, uint16_t insn1,
+			 uint16_t insn2, struct regcache *regs,
+			 struct displaced_step_closure *dsc)
+{
+  unsigned int rn = bits (insn1, 0, 3);
+  unsigned int rm = bits (insn2, 0, 3);
+
+  if (rn != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
+    return thumb_copy_unmodified_32bit (insn1, insn2, "preload reg", dsc);
+
+  if (debug_displaced)
+    fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.4x %.4x\n",
+			(unsigned short) insn1, (unsigned short) insn2);
+
+  RECORD_THUMB2_MODE_INSN (0, insn1 & 0xfff0, (insn2 & 0xfff0) | 0x1);
+  dsc->numinsns = 2;
+
+  return copy_preload_reg (gdbarch, rn, rm, regs, dsc);
+}
+
+
+
 /* Copy/cleanup coprocessor load and store instructions.  */
 
 static void
@@ -5575,6 +5636,60 @@ arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
   return copy_b_bl_blx (gdbarch, cond, exchange, link, offset, regs, dsc);
 }
 
+/* Copy B/BL/BLX Thumb-2 instructions with immediate destinations.  */
+static int
+thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, unsigned short insn1,
+		     unsigned short insn2, struct regcache *regs,
+		     struct displaced_step_closure *dsc)
+{
+  int link = bit (insn2, 14);
+  int exchange = link && !bit (insn2, 12);
+  int cond = INST_AL;
+  long offset =0;
+
+  if (!link && !exchange) /* B */
+    {
+      int j1 = bit (insn2, 13);
+      int j2 = bit (insn2, 11);
+      int s = sbits (insn1, 10, 10);
+
+      cond = bits (insn1, 6, 9);
+      offset = (bits (insn2, 0, 10) < 1);
+      if (bit (insn2, 12))
+	{
+	  int i1 = !(j1 ^ bit (insn1, 10));
+	  int i2 = !(j2 ^ bit (insn1, 10));
+
+	  offset |= (bits (insn1, 0, 9) < 12)
+	    | (i2 < 22)
+	    | (i1 < 23)
+	    | (s < 24);
+	}
+      else
+	offset |= (bits (insn1, 0, 5) < 12)
+	  | (j1 < 18)
+	  | (j2 < 19)
+	  | (s < 20);
+    }
+  else
+    {
+      offset = (sbits (insn1, 0, 9) << 12);
+      offset |= exchange ?
+	(bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
+    }
+
+  if (debug_displaced)
+    fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
+			"%.4x %.4x with offset %.8lx\n",
+			(exchange) ? "blx" : "bl",
+			insn1, insn2, offset);
+
+  /* Plus the size of THUMB_NOP and B/BL/BLX.  */
+  dsc->u.branch.dest = dsc->insn_addr + 4 + offset;
+  RECORD_THUMB_MODE_INSN (0, THUMB_NOP);
+
+  return copy_b_bl_blx (gdbarch, INST_AL, exchange, 1, offset, regs, dsc);
+}
 /* Copy B Thumb instructions.  */
 static int
 thumb_copy_b (struct gdbarch *gdbarch, unsigned short insn,
@@ -5853,6 +5968,23 @@ thumb_copy_alu_reg (struct gdbarch *gdbarch, unsigned short insn,
   return copy_alu_reg (gdbarch, regs, dsc, reg_ids);
 }
 
+static int
+thumb2_copy_alu_reg (struct gdbarch *gdbarch, unsigned short insn1,
+		    unsigned short insn2, struct regcache *regs,
+		    struct displaced_step_closure *dsc)
+{
+  unsigned int rn = bits (insn1, 0, 3);
+  unsigned int rm = bits (insn2, 0, 3);
+  unsigned int rd = bits (insn2, 8, 11);
+
+  /* In Thumb-2, rn, rm and rd can't be r15.  */
+  if (rn == ARM_PC_REGNUM || rm == ARM_PC_REGNUM || rd == ARM_PC_REGNUM)
+    internal_error (__FILE__, __LINE__,
+		    _("thumb_copy_alu_reg: rn, rm or rd shouldn't be r15"));
+
+  return thumb_copy_unmodified_32bit (insn1, insn2, "ALU reg", dsc);
+}
+
 /* Cleanup/copy arithmetic/logic insns with shifted register RHS.  */
 
 static void
@@ -5928,6 +6060,22 @@ arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
   return 0;
 }
 
+static int
+thumb2_copy_alu_shifted_reg (struct gdbarch *gdbarch, unsigned short insn1,
+			    unsigned short insn2, struct regcache *regs,
+			    struct displaced_step_closure *dsc)
+{
+  unsigned int rm = bits (insn1, 0, 3);
+  unsigned int rd = bits (insn2, 8, 11);
+  unsigned int rs = bits (insn2, 0, 3);
+
+if (rs == ARM_PC_REGNUM || rm == ARM_PC_REGNUM || rd == ARM_PC_REGNUM)
+    internal_error (__FILE__, __LINE__,
+		    _("thumb_copy_alu_reg: rn, rm or rd shouldn't be r15"));
+
+   return thumb_copy_unmodified_32bit (insn1, insn2, "ALU shifted reg", dsc);
+}
+
 /* Clean up load instructions.  */
 
 static void
@@ -6114,6 +6262,69 @@ copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, struct regcache *regs,
 }
 
 static int
+thumb2_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, unsigned short insn1,
+			      unsigned short insn2,  struct regcache *regs,
+			      struct displaced_step_closure *dsc,
+			      int load, int byte, int usermode, int writeback)
+{
+  int immed = !bit (insn1, 9);
+  unsigned int rt = bits (insn2, 12, 15);
+  unsigned int rn = bits (insn1, 0, 3);
+  unsigned int rm = bits (insn2, 0, 3);  /* Only valid if !immed.  */
+
+  if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
+    return thumb_copy_unmodified_32bit (insn1, insn2, "load/store", dsc);
+
+  if (debug_displaced)
+    fprintf_unfiltered (gdb_stdlog,
+			"displaced: copying %s%s r%d [r%d] insn %.4x%.4x\n",
+			load ? (byte ? "ldrb" : "ldr")
+			     : (byte ? "strb" : "str"), usermode ? "t" : "",
+			rt, rn, insn1, insn2);
+
+  dsc->rd = rt;
+  dsc->u.ldst.rn = rn;
+  dsc->u.ldst.immed = immed;
+
+  copy_ldr_str_ldrb_strb (gdbarch, regs, dsc, load, byte, usermode,
+			  writeback, rm);
+
+  if (load || rt != ARM_PC_REGNUM)
+    {
+      dsc->u.ldst.restore_r4 = 0;
+
+      if (immed)
+	/* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
+	   ->
+	   {ldr,str}[b]<cond> r0, [r2, #imm].  */
+	{
+	  RECORD_THUMB_MODE_INSN (0, (insn1 & 0xfff0) | 0x2);
+	  RECORD_THUMB_MODE_INSN (1, insn2 & 0x0fff);
+	}
+      else
+	/* {ldr,str}[b]<cond> rt, [rn, rm], etc.
+	   ->
+	   {ldr,str}[b]<cond> r0, [r2, r3].  */
+	{
+	  RECORD_THUMB_MODE_INSN (0, (insn1 & 0xfff0) | 0x2);
+	  RECORD_THUMB_MODE_INSN (1, (insn2 & 0x0ff0) | 0x3);
+	}
+
+      dsc->numinsns = 2;
+    }
+  else
+    {
+      /* In Thumb-32 instructions, the behavior is unpredictable when Rt is
+	 PC, while the behavior is undefined when Rn is PC.  Shortly, neither
+	 Rt nor Rn can be PC.  */
+
+      gdb_assert (0);
+    }
+
+  return 0;
+}
+
+static int
 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
 			    struct regcache *regs,
 			    struct displaced_step_closure *dsc,
@@ -6411,7 +6622,7 @@ typedef int (*copy_svc_helper)(struct gdbarch *,
 			       struct regcache *,
 			       struct displaced_step_closure *);
 
-/* Define helpers for ARM.  */
+/* Define helpers for ARM and Thumb-2.  */
 static int
 arm_copy_unmodified_helper (union instruction_instance insn, const char *iname,
 			    struct displaced_step_closure *dsc)
@@ -6419,6 +6630,15 @@ arm_copy_unmodified_helper (union instruction_instance insn, const char *iname,
   return arm_copy_unmodified (insn._32_bit, iname, dsc);
 }
 
+static int
+thumb2_copy_unmodified_helper (union instruction_instance insn,
+			       const char *iname,
+			       struct displaced_step_closure *dsc)
+{
+  return thumb_copy_unmodified_32bit (insn._16_bit[0], insn._16_bit[1], iname,
+				      dsc);
+}
+
 static int arm_copy_undef (uint32_t insn, struct displaced_step_closure *dsc);
 
 static int
@@ -6428,6 +6648,15 @@ arm_copy_undef_helper (union instruction_instance ii,
   return arm_copy_undef (ii._32_bit, dsc);
 }
 
+static int thumb_32bit_copy_undef (uint16_t insn1, uint16_t insn2,
+				   struct displaced_step_closure *dsc);
+static int
+thumb2_copy_undef_helper (union instruction_instance ii,
+			  struct displaced_step_closure *dsc)
+{
+  return thumb_32bit_copy_undef (ii._16_bit[0], ii._16_bit[1], dsc);
+}
+
 static int
 arm_copy_copro_load_store_helper (struct gdbarch *gdbarch,
 				  union instruction_instance insn,
@@ -6452,6 +6681,30 @@ arm_copy_copro_load_store_helper (struct gdbarch *gdbarch,
 }
 
 static int
+thumb2_copy_copro_load_store_helper (struct gdbarch *gdbarch,
+				     union instruction_instance insn,
+				     struct regcache *regs,
+				     struct displaced_step_closure *dsc)
+{
+  unsigned int rn = bits (insn._16_bit[0], 0, 3);
+  if (rn != ARM_PC_REGNUM)
+    return arm_copy_unmodified (insn._32_bit, "copro load/store", dsc);
+
+  if (debug_displaced)
+    fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
+			"load/store insn %.8lx\n",
+			(unsigned long) insn._32_bit);
+
+  dsc->u.ldst.writeback = bit (insn._16_bit[0], 9);
+  dsc->u.ldst.rn = rn;
+
+  RECORD_THUMB2_MODE_INSN(0, insn._16_bit[0] & 0xfff0,
+			  insn._16_bit[1]);
+
+  return copy_copro_load_store (gdbarch, rn, regs, dsc);
+}
+
+static int
 arm_copy_ldm_with_pc_helper(union instruction_instance insn,
 			    struct displaced_step_closure *dsc,
 			    struct regcache *regs)
@@ -6508,6 +6761,61 @@ arm_copy_ldm_with_pc_helper(union instruction_instance insn,
   return 0;
 }
 
+static int
+thumb2_copy_ldm_with_pc_helper (union instruction_instance insn,
+				struct displaced_step_closure *dsc,
+				struct regcache *regs)
+{
+  /* LDM of a list of registers which includes PC.  Implement by
+     rewriting the list of registers to be transferred into a
+     contiguous chunk r0...rX before doing the transfer, then shuffling
+     registers into the correct places in the cleanup routine.  */
+  unsigned int regmask = dsc->u.block.regmask;
+  unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
+  unsigned int to = 0, from = 0, i, new_rn;
+
+  for (i = 0; i < num_in_list; i++)
+    dsc->tmp[i] = displaced_read_reg (regs, from, i);
+
+  /* Writeback makes things complicated.  We need to avoid clobbering
+     the base register with one of the registers in our modified
+     register list, but just using a different register can't work in
+     all cases, e.g.:
+
+     ldm r14!, {r0-r13,pc}
+
+     which would need to be rewritten as:
+
+     ldm rN!, {r0-r14}
+
+     but that can't work, because there's no free register for N.
+
+     Solve this by turning off the writeback bit, and emulating
+     writeback manually in the cleanup routine.  */
+
+  if (dsc->u.block.writeback )
+    insn._32_bit &= ~(1 << 21);
+
+  new_regmask = (1 << num_in_list) - 1;
+
+  if (debug_displaced)
+    fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
+				      "{..., pc}: original reg list %.4x, modified "
+				      "list %.4x\n"), dsc->u.block.rn,
+			dsc->u.block.writeback ? "!" : "",
+			(int) dsc->u.block.regmask,
+			new_regmask);
+
+  /* In Thumb encoding, bit 13 should be always zero.  */
+  if (displaced_in_arm_mode (regs))
+    new_regmask &= 0xffff;
+  else
+    new_regmask &= 0xdfff;
+
+  RECORD_THUMB2_MODE_INSN(0, insn._16_bit[0], new_regmask);
+  return 0;
+}
+
 static void cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
 			 struct displaced_step_closure *dsc);
 static int
@@ -6538,7 +6846,15 @@ arm_copy_svc_helper (struct gdbarch *gdbarch, union instruction_instance insn,
 
   return 0;
 }
-
+static int
+thumb2_copy_svc_helper (struct gdbarch *gdbarch,
+			union instruction_instance insn,
+			CORE_ADDR to, struct regcache *regs,
+			struct displaced_step_closure *dsc)
+{
+  /* Not implemented.  */
+  return 0;
+}
 
 /* Helper definition is done.  */
 
@@ -6637,19 +6953,33 @@ arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
 			  arm_copy_ldm_with_pc_helper, dsc);
 
 }
+
+static int
+thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
+			struct regcache *regs,
+			struct displaced_step_closure *dsc)
+{
+   union instruction_instance ii;
+   ii._16_bit[0] = insn1;
+   ii._16_bit[1] = insn2;
+
+   return copy_block_xfer (gdbarch, ii, regs, thumb2_copy_unmodified_helper,
+			   thumb2_copy_ldm_with_pc_helper, dsc);
+}
+
 /* Cleanup/copy SVC (SWI) instructions.  These two functions are overridden
    for Linux, where some SVC instructions must be treated specially.  */
 
 static void
 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
-            struct displaced_step_closure *dsc)
+	     struct displaced_step_closure *dsc)
 {
   CORE_ADDR from = dsc->insn_addr;
   CORE_ADDR resume_addr = from + 4;
 
   if (debug_displaced)
     fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
-                       "%.8lx\n", (unsigned long) resume_addr);
+			"%.8lx\n", (unsigned long) resume_addr);
 
   displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
 }
@@ -6669,6 +6999,22 @@ arm_copy_undef (uint32_t insn, struct displaced_step_closure *dsc)
   return 0;
 }
 
+static int
+thumb_32bit_copy_undef (uint16_t insn1, uint16_t insn2,
+			struct displaced_step_closure *dsc)
+{
+
+  if (debug_displaced)
+    fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
+			"%.4x %.4x\n", (unsigned short) insn1,
+			(unsigned short) insn2);
+
+  RECORD_THUMB2_MODE_INSN (0, insn1, insn2);
+  dsc->numinsns = 2;
+
+  return 0;
+}
+
 /* Copy unpredictable instructions.  */
 
 static int
@@ -7129,6 +7475,28 @@ arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
 }
 
 static int
+thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
+			 uint16_t insn2, struct regcache *regs,
+			 struct displaced_step_closure *dsc)
+{
+  unsigned int ops[4];
+  union instruction_instance ii;
+
+  ops[0] = bits (insn1, 4, 9);
+  ops[1] = bit (insn2, 4);
+  ops[2] = bits (insn2, 8, 11);
+  ops[3] = bits (insn1, 4, 8);
+
+  ii._16_bit[0] = insn1;
+  ii._16_bit[1] = insn2;
+
+  return decode_svc_copro (gdbarch, ii, thumb2_copy_unmodified_helper,
+			   thumb2_copy_copro_load_store_helper,
+			   thumb2_copy_undef_helper, thumb2_copy_svc_helper,
+			  regs, dsc, ops);
+}
+
+static int
 copy_pc_relative (struct regcache *regs, struct displaced_step_closure *dsc,
 		  int rd, unsigned int imm, int is_32bit)
 {
@@ -7179,6 +7547,26 @@ thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, unsigned short insn,
 }
 
 static int
+thumb_decode_pc_relative_32bit (struct gdbarch *gdbarch, unsigned short insn1,
+				unsigned short insn2, struct regcache *regs,
+				struct displaced_step_closure *dsc)
+{
+  unsigned int rd = bits (insn2, 8, 11);
+  /* Since immeidate has the same encoding in both ADR and ADDS, so we simply
+     extract raw immediate encoding rather than computing immediate.  When
+     generating ADDS instruction, we can simply perform OR operation to set
+     immediate into ADDS.  */
+  unsigned int imm = (insn2 & 0x70ff) | (bit (insn1, 10) << 26);
+
+  if (debug_displaced)
+    fprintf_unfiltered (gdb_stdlog,
+			"displaced: copying thumb adr r%d, #%d insn %.4x%.4x\n",
+			rd, imm, insn1, insn2);
+
+  return copy_pc_relative (regs, dsc, rd, imm, 1);
+}
+
+static int
 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, unsigned short insn1,
 			      struct regcache *regs,
 			      struct displaced_step_closure *dsc)
@@ -7437,12 +7825,201 @@ thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch,
 		    _("thumb_process_displaced_insn: Instruction decode error"));
 }
 
+static int
+decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
+				 unsigned short insn1, unsigned short insn2,
+				 struct regcache *regs,
+				 struct displaced_step_closure *dsc)
+{
+  int rd = bits (insn2, 12, 15);
+  int user_mode = (bits (insn2, 8, 11) == 0xe);
+  int err = 0;
+  int writeback = 0;
+
+  switch (bits (insn1, 5, 6))
+    {
+    case 0: /* Load byte and memory hints */
+      if (rd == 0xf) /* PLD/PLI */
+	{
+	  if (bits (insn2, 6, 11))
+	    return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
+	  else
+	    return thumb2_copy_preload_reg (gdbarch, insn1, insn2, regs, dsc);
+	}
+      else
+	{
+	  int op1 = bits (insn1, 7, 8);
+
+	  if ((op1 == 0 || op1 == 2) && bit (insn2, 11))
+	    writeback = bit (insn2, 8);
+
+	  return thumb2_copy_ldr_str_ldrb_strb (gdbarch, insn1, insn2, regs,
+					       dsc, 1, 1, user_mode, writeback);
+	}
+
+      break;
+    case 1: /* Load halfword and memory hints */
+      if (rd == 0xf) /* PLD{W} and Unalloc memory hint */
+	{
+	  if (bits (insn2, 6, 11))
+	    return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
+	  else
+	    return thumb2_copy_preload_reg (gdbarch,insn1, insn2, regs, dsc);
+	}
+      else
+	{
+	  int op1 = bits (insn1, 7, 8);
+
+	  if ((op1 == 0 || op1 == 2) && bit (insn2, 11))
+	    writeback = bit (insn2, 8);
+	  return thumb2_copy_ldr_str_ldrb_strb (gdbarch, insn1, insn2, regs,
+					       dsc, 1, 0, user_mode, writeback);
+	}
+      break;
+    case 2: /* Load word */
+      {
+	int op1 = bits (insn1, 7, 8);
+
+	  if ((op1 == 0 || op1 == 2) && bit (insn2, 11))
+	    writeback = bit (insn2, 8);
+
+	return thumb2_copy_ldr_str_ldrb_strb (gdbarch, insn1, insn2, regs, dsc,
+					     1, 0, user_mode, writeback);
+	break;
+      }
+    default:
+      return thumb_32bit_copy_undef (insn1, insn2, dsc);
+      break;
+    }
+  return 0;
+}
+
 static void
 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
 				    uint16_t insn2, struct regcache *regs,
 				    struct displaced_step_closure *dsc)
 {
-  error (_("Displaced stepping is only supported in ARM mode and Thumb 16bit instructions"));
+  int err = 0;
+  unsigned short op;
+
+  op = bit (insn2, 15);
+
+  switch (bits (insn1, 11, 12))
+    {
+    case 1:
+      {
+	switch (bits (insn1, 9, 10))
+	  {
+	  case 0: /* load/store multiple */
+	    switch (bits (insn1, 7, 8))
+	      {
+	      case 0: case 3: /* SRS, RFE */
+		err = thumb_copy_unmodified_32bit (insn1, insn2, "srs/rfe",
+						   dsc);
+		break;
+	      case 1: case 2: /* LDM/STM/PUSH/POP */
+		/* These Thumb 32-bit insns have the same encodings as ARM
+		   counterparts.  */
+		err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
+	      }
+	    break;
+	  case 1: /* Data processing (register) */
+	    err = thumb2_copy_alu_reg (gdbarch, insn1, insn2, regs, dsc);
+	    break;
+	  default: /* Coprocessor instructions */
+	    /* Thumb 32bit coprocessor instructions have the same encoding
+	       as ARM's.  */
+	    err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
+	    break;
+	  }
+      break;
+      }
+    case 2:
+      if (op) /* Branch and misc control.  */
+	{
+	  if (bit (insn2, 14)) /* BLX/BL */
+	    err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
+	  else if (!bits (insn2, 12, 14) && bits (insn1, 8, 10) != 0x7)
+	    /* Conditional Branch */
+	    err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
+	  else
+	    err = thumb_copy_unmodified_32bit (insn1, insn2, "misc ctrl",
+					       dsc);
+	}
+      else
+	{
+	  if (bit (insn1, 9)) /* Data processing (plain binary imm) */
+	    {
+	      int op = bits (insn1, 4, 8);
+	      int rn = bits (insn1, 0, 4);
+	      if ((op == 0 || op == 0xa) && rn == 0xf)
+		err = thumb_decode_pc_relative_32bit (gdbarch, insn1, insn2,
+						      regs, dsc);
+	      else
+		err = thumb_copy_unmodified_32bit (insn1, insn2, "dp/pb", dsc);
+	    }
+	  else /* Data processing (modified immeidate) */
+	    err = thumb_copy_unmodified_32bit (insn1, insn2, "dp/mi", dsc);
+	}
+      break;
+    case 3:
+      switch (bits (insn1, 9, 10))
+	{
+	case 0:
+	  if (bit (insn1, 4))
+	    err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
+						   regs, dsc);
+	  else
+	    {
+	      if (bit (insn1, 8)) /* NEON Load/Store */
+		err = thumb_copy_unmodified_32bit (insn1, insn2,
+						   "neon elt/struct load/store",
+						   dsc);
+	      else /* Store single data item */
+		{
+		  int user_mode = (bits (insn2, 8, 11) == 0xe);
+		  int byte = (bits (insn1, 5, 7) == 0
+			      || bits (insn1, 5, 7) == 4);
+		  int writeback = 0;
+
+		  if (bits (insn1, 5, 7) < 3 && bit (insn2, 11))
+		    writeback = bit (insn2, 8);
+
+		  err = thumb2_copy_ldr_str_ldrb_strb (gdbarch, insn1, insn2,
+						      regs, dsc, 0, byte,
+						      user_mode, writeback);
+		}
+	    }
+	  break;
+	case 1:
+	  switch (bits (insn1, 7, 8))
+	    {
+	    case 0: case 1: /* Data-processing (shift register) */
+	      err = thumb2_copy_alu_shifted_reg (gdbarch, insn1, insn2, regs,
+						dsc);
+	      break;
+	    case 2: /* Multiply and absolute difference */
+	      err = thumb_copy_unmodified_32bit (insn1, insn2, "mul/mua/diff",
+						 dsc);
+	      break;
+	    case 3: /* Long multiply and divide */
+	      err = thumb_copy_unmodified_32bit (insn1, insn2, "lmul/lmua",
+						 dsc);
+	      break;
+	    }
+	  break;
+	default: /* Coprocessor instructions */
+	  err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
+	  break;
+	}
+      break;
+    default:
+      err = 1;
+    }
+
+  if (err)
+    internal_error (__FILE__, __LINE__,
+		    _("thumb_process_displaced_insn: Instruction decode error"));
 }
 
 static void
-- 
1.7.0.4


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]