This is the mail archive of the
gdb-patches@sourceware.org
mailing list for the GDB project.
[try 2nd 2/8] Rename copy_* functions to arm_copy_*
- From: Yao Qi <yao at codesourcery dot com>
- To: gdb-patches at sourceware dot org
- Date: Thu, 24 Mar 2011 21:48:53 +0800
- Subject: [try 2nd 2/8] Rename copy_* functions to arm_copy_*
- References: <4D15F9B8.5070705@codesourcery.com> <4D8B4947.1000000@codesourcery.com>
The copy functions for arm and thumb instructions should be different.
So some copy_* functions are renamed to arm_copy_* functions. In each
copy functions, there are some arm-thumb-independent part, such as
install cleanup helper, store register, etc. This part is moved to
install_* functions.
--
Yao (éå)
2011-03-24 Yao Qi <yao@codesourcery.com>
* gdb/arm-tdep.c (copy_unmodified): Rename to ...
(arm_copy_unmodified): .. this. New.
(copy_preload): Move common part to ...
(install_preload): .. this. New.
(arm_copy_preload): New.
(copy_preload_reg): Move common part to ...
(install_preload_reg): ... this. New.
(arm_copy_preload_reg): New.
(copy_b_bl_blx): Move common part to ...
(install_b_bl_blx): .. this. New.
(arm_copy_b_bl_blx): New.
(copy_bx_blx_reg): Move common part to ...
(install_bx_blx_reg): ... this. New.
(arm_copy_bx_blx_reg): New.
(copy_alu_reg): Move common part to ...
(install_alu_reg): ... this. New.
(arm_copy_alu_reg): New.
(copy_alu_shifted_reg): Move common part to ...
(install_alu_shifted_reg): ... this. New.
(copy_ldr_str_ldrb_strb): Move common part to ...
(install_ldr_str_ldrb_strb): ... this. New.
(arm_copy_ldr_str_ldrb_strb): New.
(copy_svc): Delete.
(arm_copy_svc): Renamed from copy_svc.
(copy_copro_load_store, copy_alu_imm): update callers.
(copy_extra_ld_st, copy_block_xfer): Likewise.
(decode_misc_memhint_neon, decode_unconditional): Likewise.
(decode_miscellaneous, decode_dp_misc): Likewise.
(decode_ld_st_word_ubyte, decode_media): Likewise.
(decode_b_bl_ldmstm, decode_ext_reg_ld_st): Likewise.
(decode_svc_copro): Likewise.
* gdb/arm-tdep.h (struct displaced_step_closure): Add two structures
`alu_reg' and `alu_shifted_reg'.
---
gdb/arm-tdep.c | 495 +++++++++++++++++++++++++++++++------------------------
gdb/arm-tdep.h | 14 ++
2 files changed, 293 insertions(+), 216 deletions(-)
diff --git a/gdb/arm-tdep.c b/gdb/arm-tdep.c
index 2ebafad..af81b1e 100644
--- a/gdb/arm-tdep.c
+++ b/gdb/arm-tdep.c
@@ -5319,7 +5319,7 @@ insn_references_pc (uint32_t insn, uint32_t bitmask)
matter what address they are executed at: in those cases, use this. */
static int
-copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
+arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
const char *iname, struct displaced_step_closure *dsc)
{
if (debug_displaced)
@@ -5343,20 +5343,11 @@ cleanup_preload (struct gdbarch *gdbarch,
displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
}
-static int
-copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
- struct displaced_step_closure *dsc)
+static void
+install_preload (struct gdbarch *gdbarch, struct regcache *regs,
+ struct displaced_step_closure *dsc, unsigned int rn)
{
- unsigned int rn = bits (insn, 16, 19);
ULONGEST rn_val;
-
- if (!insn_references_pc (insn, 0x000f0000ul))
- return copy_unmodified (gdbarch, insn, "preload", dsc);
-
- if (debug_displaced)
- fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
- (unsigned long) insn);
-
/* Preload instructions:
{pli/pld} [rn, #+/-imm]
@@ -5366,34 +5357,40 @@ copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
rn_val = displaced_read_reg (regs, dsc, rn);
displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
-
dsc->u.preload.immed = 1;
- dsc->modinsn[0] = insn & 0xfff0ffff;
-
dsc->cleanup = &cleanup_preload;
-
- return 0;
}
-/* Preload instructions with register offset. */
-
static int
-copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
- struct regcache *regs,
+arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
struct displaced_step_closure *dsc)
{
unsigned int rn = bits (insn, 16, 19);
- unsigned int rm = bits (insn, 0, 3);
- ULONGEST rn_val, rm_val;
- if (!insn_references_pc (insn, 0x000f000ful))
- return copy_unmodified (gdbarch, insn, "preload reg", dsc);
+ if (!insn_references_pc (insn, 0x000f0000ul))
+ return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
if (debug_displaced)
fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
(unsigned long) insn);
+ dsc->modinsn[0] = insn & 0xfff0ffff;
+
+ install_preload (gdbarch, regs, dsc, rn);
+
+ return 0;
+}
+
+/* Preload instructions with register offset. */
+
+static void
+install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
+ struct displaced_step_closure *dsc, unsigned int rn,
+ unsigned int rm)
+{
+ ULONGEST rn_val, rm_val;
+
/* Preload register-offset instructions:
{pli/pld} [rn, rm {, shift}]
@@ -5406,13 +5403,30 @@ copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
rm_val = displaced_read_reg (regs, dsc, rm);
displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
-
dsc->u.preload.immed = 0;
- dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
-
dsc->cleanup = &cleanup_preload;
+}
+
+static int
+arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
+ struct regcache *regs,
+ struct displaced_step_closure *dsc)
+{
+ unsigned int rn = bits (insn, 16, 19);
+ unsigned int rm = bits (insn, 0, 3);
+
+ if (!insn_references_pc (insn, 0x000f000ful))
+ return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
+
+ if (debug_displaced)
+ fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
+ (unsigned long) insn);
+
+ dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
+
+ install_preload_reg (gdbarch, regs, dsc, rn, rm);
return 0;
}
@@ -5440,7 +5454,7 @@ copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
ULONGEST rn_val;
if (!insn_references_pc (insn, 0x000f0000ul))
- return copy_unmodified (gdbarch, insn, "copro load/store", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
if (debug_displaced)
fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
@@ -5503,28 +5517,39 @@ cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
/* Copy B/BL/BLX instructions with immediate destinations. */
static int
-copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
- struct regcache *regs, struct displaced_step_closure *dsc)
+install_b_bl_blx (struct gdbarch *gdbarch, unsigned int cond, int exchange,
+ int link, long offset, struct regcache *regs,
+ struct displaced_step_closure *dsc)
+{
+ /* Implement "BL<cond> <label>" as:
+
+ Preparation: cond <- instruction condition
+ Insn: mov r0, r0 (nop)
+ Cleanup: if (condition true) { r14 <- pc; pc <- label }.
+
+ B<cond> similar, but don't set r14 in cleanup. */
+
+ dsc->u.branch.cond = cond;
+ dsc->u.branch.link = link;
+ dsc->u.branch.exchange = exchange;
+
+ dsc->cleanup = &cleanup_branch;
+
+ return 0;
+}
+static int
+arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
+ struct regcache *regs, struct displaced_step_closure *dsc)
{
unsigned int cond = bits (insn, 28, 31);
int exchange = (cond == 0xf);
int link = exchange || bit (insn, 24);
- CORE_ADDR from = dsc->insn_addr;
long offset;
if (debug_displaced)
fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
"%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
(unsigned long) insn);
-
- /* Implement "BL<cond> <label>" as:
-
- Preparation: cond <- instruction condition
- Insn: mov r0, r0 (nop)
- Cleanup: if (condition true) { r14 <- pc; pc <- label }.
-
- B<cond> similar, but don't set r14 in cleanup. */
-
if (exchange)
/* For BLX, set bit 0 of the destination. The cleanup_branch function will
then arrange the switch into Thumb mode. */
@@ -5535,35 +5560,18 @@ copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
if (bit (offset, 25))
offset = offset | ~0x3ffffff;
- dsc->u.branch.cond = cond;
- dsc->u.branch.link = link;
- dsc->u.branch.exchange = exchange;
- dsc->u.branch.dest = from + 8 + offset;
-
+ dsc->u.branch.dest = dsc->insn_addr + 8 + offset;
dsc->modinsn[0] = ARM_NOP;
- dsc->cleanup = &cleanup_branch;
-
- return 0;
+ return install_b_bl_blx (gdbarch, cond, exchange, link, offset, regs, dsc);
}
/* Copy BX/BLX with register-specified destinations. */
static int
-copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
- struct regcache *regs, struct displaced_step_closure *dsc)
+install_bx_blx_reg (struct gdbarch *gdbarch, unsigned int rm,
+ struct regcache *regs, struct displaced_step_closure *dsc)
{
- unsigned int cond = bits (insn, 28, 31);
- /* BX: x12xxx1x
- BLX: x12xxx3x. */
- int link = bit (insn, 5);
- unsigned int rm = bits (insn, 0, 3);
-
- if (debug_displaced)
- fprintf_unfiltered (gdb_stdlog, "displaced: copying %s register insn "
- "%.8lx\n", (link) ? "blx" : "bx",
- (unsigned long) insn);
-
/* Implement {BX,BLX}<cond> <reg>" as:
Preparation: cond <- instruction condition
@@ -5573,18 +5581,34 @@ copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
Don't set r14 in cleanup for BX. */
dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
-
- dsc->u.branch.cond = cond;
- dsc->u.branch.link = link;
dsc->u.branch.exchange = 1;
- dsc->modinsn[0] = ARM_NOP;
-
dsc->cleanup = &cleanup_branch;
return 0;
}
+static int
+arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
+ struct regcache *regs, struct displaced_step_closure *dsc)
+{
+ unsigned int cond = bits (insn, 28, 31);
+ /* BX: x12xxx1x
+ BLX: x12xxx3x. */
+ int link = bit (insn, 5);
+ unsigned int rm = bits (insn, 0, 3);
+
+ if (debug_displaced)
+ fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
+ (unsigned long) insn);
+
+ dsc->u.branch.link = link;
+ dsc->u.branch.cond = cond;
+ dsc->modinsn[0] = ARM_NOP;
+
+ return install_bx_blx_reg (gdbarch, rm, regs, dsc);
+}
+
/* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
static void
@@ -5608,7 +5632,7 @@ copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
ULONGEST rd_val, rn_val;
if (!insn_references_pc (insn, 0x000ff000ul))
- return copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
if (debug_displaced)
fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
@@ -5663,23 +5687,11 @@ cleanup_alu_reg (struct gdbarch *gdbarch,
}
static int
-copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
- struct displaced_step_closure *dsc)
+install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
+ struct displaced_step_closure *dsc)
{
- unsigned int rn = bits (insn, 16, 19);
- unsigned int rm = bits (insn, 0, 3);
- unsigned int rd = bits (insn, 12, 15);
- unsigned int op = bits (insn, 21, 24);
- int is_mov = (op == 0xd);
ULONGEST rd_val, rn_val, rm_val;
- if (!insn_references_pc (insn, 0x000ff00ful))
- return copy_unmodified (gdbarch, insn, "ALU reg", dsc);
-
- if (debug_displaced)
- fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
- is_mov ? "move" : "ALU", (unsigned long) insn);
-
/* Instruction is of form:
<op><cond> rd, [rn,] rm [, <shift>]
@@ -5695,24 +5707,45 @@ copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
- rd_val = displaced_read_reg (regs, dsc, rd);
- rn_val = displaced_read_reg (regs, dsc, rn);
- rm_val = displaced_read_reg (regs, dsc, rm);
+ rd_val = displaced_read_reg (regs, dsc, dsc->rd);
+ rn_val = displaced_read_reg (regs, dsc, dsc->u.alu_reg.rn);
+ rm_val = displaced_read_reg (regs, dsc, dsc->u.alu_reg.rm);
+
displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
- dsc->rd = rd;
-
- if (is_mov)
- dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
- else
- dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
dsc->cleanup = &cleanup_alu_reg;
return 0;
}
+static int
+arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
+ struct displaced_step_closure *dsc)
+{
+ unsigned int op = bits (insn, 21, 24);
+ int is_mov = (op == 0xd);
+
+ dsc->u.alu_reg.rn = bits (insn, 16, 19); /* Rn */
+ dsc->u.alu_reg.rm = bits (insn, 0, 3); /* Rm */
+ dsc->rd = bits (insn, 12, 15); /* Rd */
+
+ if (!insn_references_pc (insn, 0x000ff00ful))
+ return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
+
+ if (debug_displaced)
+ fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
+ is_mov ? "move" : "ALU", (unsigned long) insn);
+
+ if (is_mov)
+ dsc->modinsn[0] = ((insn & 0xfff00ff0) | 0x2);
+ else
+ dsc->modinsn[0] = ((insn & 0xfff00ff0) | 0x10002);
+
+ return install_alu_reg (gdbarch, regs, dsc);
+}
+
/* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
static void
@@ -5729,27 +5762,13 @@ cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
}
-static int
-copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
- struct regcache *regs,
- struct displaced_step_closure *dsc)
+static void
+install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
+ struct displaced_step_closure *dsc)
{
- unsigned int rn = bits (insn, 16, 19);
- unsigned int rm = bits (insn, 0, 3);
- unsigned int rd = bits (insn, 12, 15);
- unsigned int rs = bits (insn, 8, 11);
- unsigned int op = bits (insn, 21, 24);
- int is_mov = (op == 0xd), i;
+ int i;
ULONGEST rd_val, rn_val, rm_val, rs_val;
- if (!insn_references_pc (insn, 0x000fff0ful))
- return copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
-
- if (debug_displaced)
- fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
- "%.8lx\n", is_mov ? "move" : "ALU",
- (unsigned long) insn);
-
/* Instruction is of form:
<op><cond> rd, [rn,] rm, <shift> rs
@@ -5767,22 +5786,45 @@ copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
for (i = 0; i < 4; i++)
dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
- rd_val = displaced_read_reg (regs, dsc, rd);
- rn_val = displaced_read_reg (regs, dsc, rn);
- rm_val = displaced_read_reg (regs, dsc, rm);
- rs_val = displaced_read_reg (regs, dsc, rs);
+ rd_val = displaced_read_reg (regs, dsc, dsc->rd);
+ rn_val = displaced_read_reg (regs, dsc, dsc->u.alu_shifted_reg.rn);
+ rm_val = displaced_read_reg (regs, dsc, dsc->u.alu_shifted_reg.rm);
+ rs_val = displaced_read_reg (regs, dsc, dsc->u.alu_shifted_reg.rs);
displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
- dsc->rd = rd;
+
+ dsc->cleanup = &cleanup_alu_shifted_reg;
+}
+
+static int
+copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
+ struct regcache *regs,
+ struct displaced_step_closure *dsc)
+{
+ unsigned int op = bits (insn, 21, 24);
+ int is_mov = (op == 0xd);
+
+ if (!insn_references_pc (insn, 0x000fff0ful))
+ return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
+
+ if (debug_displaced)
+ fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
+ "%.8lx\n", is_mov ? "move" : "ALU",
+ (unsigned long) insn);
+
+ dsc->u.alu_shifted_reg.rn = bits (insn, 16, 19);
+ dsc->u.alu_shifted_reg.rm = bits (insn, 0, 3);
+ dsc->u.alu_shifted_reg.rs = bits (insn, 8, 11);
+ dsc->rd = bits (insn, 12, 15);
if (is_mov)
dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
else
dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
- dsc->cleanup = &cleanup_alu_shifted_reg;
+ install_alu_shifted_reg (gdbarch, regs, dsc);
return 0;
}
@@ -5857,7 +5899,7 @@ copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unpriveleged,
ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
if (!insn_references_pc (insn, 0x000ff00ful))
- return copy_unmodified (gdbarch, insn, "extra load/store", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
if (debug_displaced)
fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
@@ -5916,26 +5958,13 @@ copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unpriveleged,
/* Copy byte/word loads and stores. */
static int
-copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
- struct regcache *regs,
- struct displaced_step_closure *dsc, int load, int byte,
- int usermode)
+install_ldr_str_ldrb_strb (struct gdbarch *gdbarch, struct regcache *regs,
+ struct displaced_step_closure *dsc, int load,
+ int byte, int usermode, int writeback, int rm,
+ int immed)
{
- int immed = !bit (insn, 25);
- unsigned int rt = bits (insn, 12, 15);
- unsigned int rn = bits (insn, 16, 19);
- unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
ULONGEST rt_val, rn_val, rm_val = 0;
- if (!insn_references_pc (insn, 0x000ff00ful))
- return copy_unmodified (gdbarch, insn, "load/store", dsc);
-
- if (debug_displaced)
- fprintf_unfiltered (gdb_stdlog, "displaced: copying %s%s insn %.8lx\n",
- load ? (byte ? "ldrb" : "ldr")
- : (byte ? "strb" : "str"), usermode ? "t" : "",
- (unsigned long) insn);
-
dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
if (!immed)
@@ -5943,8 +5972,8 @@ copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
if (!load)
dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
- rt_val = displaced_read_reg (regs, dsc, rt);
- rn_val = displaced_read_reg (regs, dsc, rn);
+ rt_val = displaced_read_reg (regs, dsc, dsc->rd);
+ rn_val = displaced_read_reg (regs, dsc, dsc->u.ldst.rn);
if (!immed)
rm_val = displaced_read_reg (regs, dsc, rm);
@@ -5953,11 +5982,10 @@ copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
if (!immed)
displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
- dsc->rd = rt;
dsc->u.ldst.xfersize = byte ? 1 : 4;
- dsc->u.ldst.rn = rn;
+
dsc->u.ldst.immed = immed;
- dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
+ dsc->u.ldst.writeback = writeback;
/* To write PC we can do:
@@ -5980,6 +6008,40 @@ copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
of this can be found in Section "Saving from r15" in
http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
+ dsc->cleanup = load ? &cleanup_load : &cleanup_store;
+
+ return 0;
+}
+
+static int
+arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
+ struct regcache *regs,
+ struct displaced_step_closure *dsc,
+ int load, int byte, int usermode)
+{
+ int immed = !bit (insn, 25);
+ unsigned int rt = bits (insn, 12, 15);
+ unsigned int rn = bits (insn, 16, 19);
+ unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
+
+ if (!insn_references_pc (insn, 0x000ff00ful))
+ return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
+
+ if (debug_displaced)
+ fprintf_unfiltered (gdb_stdlog,
+ "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
+ load ? (byte ? "ldrb" : "ldr")
+ : (byte ? "strb" : "str"), usermode ? "t" : "",
+ rt, rn,
+ (unsigned long) insn);
+
+ dsc->rd = rt;
+ dsc->u.ldst.rn = rn;
+
+ install_ldr_str_ldrb_strb (gdbarch, regs, dsc, load, byte, usermode,
+ (bit (insn, 24) == 0 || bit (insn, 21) != 0),
+ rm, immed);
+
if (load || rt != ARM_PC_REGNUM)
{
dsc->u.ldst.restore_r4 = 0;
@@ -6244,13 +6306,13 @@ copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
/* Block transfers which don't mention PC can be run directly
out-of-line. */
if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
- return copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
if (rn == ARM_PC_REGNUM)
{
warning (_("displaced: Unpredictable LDM or STM with "
"base register r15"));
- return copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
}
if (debug_displaced)
@@ -6271,7 +6333,7 @@ copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
if (load)
{
- if ((insn & 0xffff) == 0xffff)
+ if (dsc->u.block.regmask == 0xffff)
{
/* LDM with a fully-populated register list. This case is
particularly tricky. Implement for now by fully emulating the
@@ -6288,7 +6350,7 @@ copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
rewriting the list of registers to be transferred into a
contiguous chunk r0...rX before doing the transfer, then shuffling
registers into the correct places in the cleanup routine. */
- unsigned int regmask = insn & 0xffff;
+ unsigned int regmask = dsc->u.block.regmask;
unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
unsigned int to = 0, from = 0, i, new_rn;
@@ -6320,7 +6382,7 @@ copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
"{..., pc}: original reg list %.4x, modified "
"list %.4x\n"), rn, writeback ? "!" : "",
- (int) insn & 0xffff, new_regmask);
+ (int) dsc->u.block.regmask, new_regmask);
dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
@@ -6360,8 +6422,8 @@ cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
}
static int
-copy_svc (struct gdbarch *gdbarch, uint32_t insn, CORE_ADDR to,
- struct regcache *regs, struct displaced_step_closure *dsc)
+arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn, CORE_ADDR to,
+ struct regcache *regs, struct displaced_step_closure *dsc)
{
/* Allow OS-specific code to override SVC handling. */
if (dsc->u.svc.copy_svc_os)
@@ -6428,33 +6490,34 @@ decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
unsigned int rn = bits (insn, 16, 19);
if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
- return copy_unmodified (gdbarch, insn, "cps", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
- return copy_unmodified (gdbarch, insn, "setend", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
else if ((op1 & 0x60) == 0x20)
- return copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
else if ((op1 & 0x71) == 0x40)
- return copy_unmodified (gdbarch, insn, "neon elt/struct load/store", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
+ dsc);
else if ((op1 & 0x77) == 0x41)
- return copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
else if ((op1 & 0x77) == 0x45)
- return copy_preload (gdbarch, insn, regs, dsc); /* pli. */
+ return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
else if ((op1 & 0x77) == 0x51)
{
if (rn != 0xf)
- return copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
+ return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
else
return copy_unpred (gdbarch, insn, dsc);
}
else if ((op1 & 0x77) == 0x55)
- return copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
+ return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
else if (op1 == 0x57)
switch (op2)
{
- case 0x1: return copy_unmodified (gdbarch, insn, "clrex", dsc);
- case 0x4: return copy_unmodified (gdbarch, insn, "dsb", dsc);
- case 0x5: return copy_unmodified (gdbarch, insn, "dmb", dsc);
- case 0x6: return copy_unmodified (gdbarch, insn, "isb", dsc);
+ case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
+ case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
+ case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
+ case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
default: return copy_unpred (gdbarch, insn, dsc);
}
else if ((op1 & 0x63) == 0x43)
@@ -6463,12 +6526,12 @@ decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
switch (op1 & ~0x80)
{
case 0x61:
- return copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
case 0x65:
- return copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
+ return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
case 0x71: case 0x75:
/* pld/pldw reg. */
- return copy_preload_reg (gdbarch, insn, regs, dsc);
+ return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
case 0x63: case 0x67: case 0x73: case 0x77:
return copy_unpred (gdbarch, insn, dsc);
default:
@@ -6489,13 +6552,13 @@ decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
{
case 0x0: case 0x2:
- return copy_unmodified (gdbarch, insn, "srs", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
case 0x1: case 0x3:
- return copy_unmodified (gdbarch, insn, "rfe", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
case 0x4: case 0x5: case 0x6: case 0x7:
- return copy_b_bl_blx (gdbarch, insn, regs, dsc);
+ return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
case 0x8:
switch ((insn & 0xe00000) >> 21)
@@ -6505,7 +6568,7 @@ decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
return copy_copro_load_store (gdbarch, insn, regs, dsc);
case 0x2:
- return copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
default:
return copy_undef (gdbarch, insn, dsc);
@@ -6522,7 +6585,7 @@ decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
: copy_copro_load_store (gdbarch, insn, regs, dsc);
case 0x2:
- return copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
case 0x4: case 0x5: case 0x6: case 0x7:
/* ldc/ldc2 lit (undefined for rn != pc). */
@@ -6535,7 +6598,7 @@ decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
}
case 0xa:
- return copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
case 0xb:
if (bits (insn, 16, 19) == 0xf)
@@ -6546,15 +6609,15 @@ decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
case 0xc:
if (bit (insn, 4))
- return copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
else
- return copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
case 0xd:
if (bit (insn, 4))
- return copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
else
- return copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
default:
return copy_undef (gdbarch, insn, dsc);
@@ -6575,39 +6638,39 @@ decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
switch (op2)
{
case 0x0:
- return copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
case 0x1:
if (op == 0x1) /* bx. */
- return copy_bx_blx_reg (gdbarch, insn, regs, dsc);
+ return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
else if (op == 0x3)
- return copy_unmodified (gdbarch, insn, "clz", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
else
return copy_undef (gdbarch, insn, dsc);
case 0x2:
if (op == 0x1)
/* Not really supported. */
- return copy_unmodified (gdbarch, insn, "bxj", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
else
return copy_undef (gdbarch, insn, dsc);
case 0x3:
if (op == 0x1)
- return copy_bx_blx_reg (gdbarch, insn,
+ return arm_copy_bx_blx_reg (gdbarch, insn,
regs, dsc); /* blx register. */
else
return copy_undef (gdbarch, insn, dsc);
case 0x5:
- return copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
case 0x7:
if (op == 0x1)
- return copy_unmodified (gdbarch, insn, "bkpt", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
else if (op == 0x3)
/* Not really supported. */
- return copy_unmodified (gdbarch, insn, "smc", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
default:
return copy_undef (gdbarch, insn, dsc);
@@ -6622,13 +6685,13 @@ decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
switch (bits (insn, 20, 24))
{
case 0x10:
- return copy_unmodified (gdbarch, insn, "movw", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
case 0x14:
- return copy_unmodified (gdbarch, insn, "movt", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
case 0x12: case 0x16:
- return copy_unmodified (gdbarch, insn, "msr imm", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
default:
return copy_alu_imm (gdbarch, insn, regs, dsc);
@@ -6638,17 +6701,17 @@ decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
- return copy_alu_reg (gdbarch, insn, regs, dsc);
+ return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
return copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
return decode_miscellaneous (gdbarch, insn, regs, dsc);
else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
- return copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
- return copy_unmodified (gdbarch, insn, "mul/mla", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
- return copy_unmodified (gdbarch, insn, "synch", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
else if (op2 == 0xb || (op2 & 0xd) == 0xd)
/* 2nd arg means "unpriveleged". */
return copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
@@ -6670,28 +6733,28 @@ decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
|| (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
- return copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 0, 0);
+ return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 0, 0);
else if ((!a && (op1 & 0x17) == 0x02)
|| (a && (op1 & 0x17) == 0x02 && !b))
- return copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 0, 1);
+ return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 0, 1);
else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
|| (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
- return copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 0, 0);
+ return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 0, 0);
else if ((!a && (op1 & 0x17) == 0x03)
|| (a && (op1 & 0x17) == 0x03 && !b))
- return copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 0, 1);
+ return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 0, 1);
else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
|| (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
- return copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
+ return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
else if ((!a && (op1 & 0x17) == 0x06)
|| (a && (op1 & 0x17) == 0x06 && !b))
- return copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
+ return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
|| (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
- return copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
+ return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
else if ((!a && (op1 & 0x17) == 0x07)
|| (a && (op1 & 0x17) == 0x07 && !b))
- return copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
+ return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
/* Should be unreachable. */
return 1;
@@ -6704,30 +6767,30 @@ decode_media (struct gdbarch *gdbarch, uint32_t insn,
switch (bits (insn, 20, 24))
{
case 0x00: case 0x01: case 0x02: case 0x03:
- return copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
case 0x04: case 0x05: case 0x06: case 0x07:
- return copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
case 0x08: case 0x09: case 0x0a: case 0x0b:
case 0x0c: case 0x0d: case 0x0e: case 0x0f:
- return copy_unmodified (gdbarch, insn,
+ return arm_copy_unmodified (gdbarch, insn,
"decode/pack/unpack/saturate/reverse", dsc);
case 0x18:
if (bits (insn, 5, 7) == 0) /* op2. */
{
if (bits (insn, 12, 15) == 0xf)
- return copy_unmodified (gdbarch, insn, "usad8", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
else
- return copy_unmodified (gdbarch, insn, "usada8", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
}
else
return copy_undef (gdbarch, insn, dsc);
case 0x1a: case 0x1b:
if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
- return copy_unmodified (gdbarch, insn, "sbfx", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
else
return copy_undef (gdbarch, insn, dsc);
@@ -6735,16 +6798,16 @@ decode_media (struct gdbarch *gdbarch, uint32_t insn,
if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
{
if (bits (insn, 0, 3) == 0xf)
- return copy_unmodified (gdbarch, insn, "bfc", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
else
- return copy_unmodified (gdbarch, insn, "bfi", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
}
else
return copy_undef (gdbarch, insn, dsc);
case 0x1e: case 0x1f:
if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
- return copy_unmodified (gdbarch, insn, "ubfx", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
else
return copy_undef (gdbarch, insn, dsc);
}
@@ -6758,7 +6821,7 @@ decode_b_bl_ldmstm (struct gdbarch *gdbarch, int32_t insn,
struct regcache *regs, struct displaced_step_closure *dsc)
{
if (bit (insn, 25))
- return copy_b_bl_blx (gdbarch, insn, regs, dsc);
+ return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
else
return copy_block_xfer (gdbarch, insn, regs, dsc);
}
@@ -6773,15 +6836,15 @@ decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
switch (opcode)
{
case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
- return copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
case 0x08: case 0x0a: case 0x0c: case 0x0e:
case 0x12: case 0x16:
- return copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
case 0x09: case 0x0b: case 0x0d: case 0x0f:
case 0x13: case 0x17:
- return copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
@@ -6816,26 +6879,26 @@ decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn, CORE_ADDR to,
else if ((op1 & 0x3e) == 0x00)
return copy_undef (gdbarch, insn, dsc);
else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
- return copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
- return copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
- return copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
else if ((op1 & 0x30) == 0x20 && !op)
{
if ((coproc & 0xe) == 0xa)
- return copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
else
- return copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
}
else if ((op1 & 0x30) == 0x20 && op)
- return copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
- return copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
- return copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
+ return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
else if ((op1 & 0x30) == 0x30)
- return copy_svc (gdbarch, insn, to, regs, dsc);
+ return arm_copy_svc (gdbarch, insn, to, regs, dsc);
else
return copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
}
diff --git a/gdb/arm-tdep.h b/gdb/arm-tdep.h
index ebd5e6e..3b1fce9 100644
--- a/gdb/arm-tdep.h
+++ b/gdb/arm-tdep.h
@@ -250,6 +250,20 @@ struct displaced_step_closure
struct
{
+ unsigned int rn;
+ unsigned int rm;
+ unsigned int rd;
+ } alu_reg;
+
+ struct
+ {
+ unsigned int rn;
+ unsigned int rm;
+ unsigned int rs;
+ } alu_shifted_reg;
+
+ struct
+ {
unsigned int immed : 1;
} preload;
--
1.7.0.4