You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 
 
 
 

739 lines
20 KiB

  1. /***************************************************************************
  2. * Copyright (C) 2006 by Dominic Rath *
  3. * Dominic.Rath@gmx.de *
  4. * *
  5. * Copyright (C) 2008 by Hongtao Zheng *
  6. * hontor@126.com *
  7. * *
  8. * This program is free software; you can redistribute it and/or modify *
  9. * it under the terms of the GNU General Public License as published by *
  10. * the Free Software Foundation; either version 2 of the License, or *
  11. * (at your option) any later version. *
  12. * *
  13. * This program is distributed in the hope that it will be useful, *
  14. * but WITHOUT ANY WARRANTY; without even the implied warranty of *
  15. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
  16. * GNU General Public License for more details. *
  17. * *
  18. * You should have received a copy of the GNU General Public License *
  19. * along with this program; if not, write to the *
  20. * Free Software Foundation, Inc., *
  21. * 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *
  22. ***************************************************************************/
  23. #ifdef HAVE_CONFIG_H
  24. #include "config.h"
  25. #endif
  26. #include "target.h"
  27. #include "armv4_5.h"
  28. #include "arm_disassembler.h"
  29. #include "arm_simulator.h"
  30. #include "log.h"
  31. #include "binarybuffer.h"
  32. #include <string.h>
  33. u32 arm_shift(u8 shift, u32 Rm, u32 shift_amount, u8 *carry)
  34. {
  35. u32 return_value = 0;
  36. shift_amount &= 0xff;
  37. if (shift == 0x0) /* LSL */
  38. {
  39. if ((shift_amount > 0) && (shift_amount <= 32))
  40. {
  41. return_value = Rm << shift_amount;
  42. *carry = Rm >> (32 - shift_amount);
  43. }
  44. else if (shift_amount > 32)
  45. {
  46. return_value = 0x0;
  47. *carry = 0x0;
  48. }
  49. else /* (shift_amount == 0) */
  50. {
  51. return_value = Rm;
  52. }
  53. }
  54. else if (shift == 0x1) /* LSR */
  55. {
  56. if ((shift_amount > 0) && (shift_amount <= 32))
  57. {
  58. return_value = Rm >> shift_amount;
  59. *carry = (Rm >> (shift_amount - 1)) & 1;
  60. }
  61. else if (shift_amount > 32)
  62. {
  63. return_value = 0x0;
  64. *carry = 0x0;
  65. }
  66. else /* (shift_amount == 0) */
  67. {
  68. return_value = Rm;
  69. }
  70. }
  71. else if (shift == 0x2) /* ASR */
  72. {
  73. if ((shift_amount > 0) && (shift_amount <= 32))
  74. {
  75. /* right shifts of unsigned values are guaranteed to be logical (shift in zeroes)
  76. * simulate an arithmetic shift (shift in signed-bit) by adding the signed-bit manually */
  77. return_value = Rm >> shift_amount;
  78. if (Rm & 0x80000000)
  79. return_value |= 0xffffffff << (32 - shift_amount);
  80. }
  81. else if (shift_amount > 32)
  82. {
  83. if (Rm & 0x80000000)
  84. {
  85. return_value = 0xffffffff;
  86. *carry = 0x1;
  87. }
  88. else
  89. {
  90. return_value = 0x0;
  91. *carry = 0x0;
  92. }
  93. }
  94. else /* (shift_amount == 0) */
  95. {
  96. return_value = Rm;
  97. }
  98. }
  99. else if (shift == 0x3) /* ROR */
  100. {
  101. if (shift_amount == 0)
  102. {
  103. return_value = Rm;
  104. }
  105. else
  106. {
  107. shift_amount = shift_amount % 32;
  108. return_value = (Rm >> shift_amount) | (Rm << (32 - shift_amount));
  109. *carry = (return_value >> 31) & 0x1;
  110. }
  111. }
  112. else if (shift == 0x4) /* RRX */
  113. {
  114. return_value = Rm >> 1;
  115. if (*carry)
  116. Rm |= 0x80000000;
  117. *carry = Rm & 0x1;
  118. }
  119. return return_value;
  120. }
  121. u32 arm_shifter_operand(armv4_5_common_t *armv4_5, int variant, union arm_shifter_operand shifter_operand, u8 *shifter_carry_out)
  122. {
  123. u32 return_value;
  124. int instruction_size;
  125. if (armv4_5->core_state == ARMV4_5_STATE_ARM)
  126. instruction_size = 4;
  127. else
  128. instruction_size = 2;
  129. *shifter_carry_out = buf_get_u32(armv4_5->core_cache->reg_list[ARMV4_5_CPSR].value, 29, 1);
  130. if (variant == 0) /* 32-bit immediate */
  131. {
  132. return_value = shifter_operand.immediate.immediate;
  133. }
  134. else if (variant == 1) /* immediate shift */
  135. {
  136. u32 Rm = buf_get_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, shifter_operand.immediate_shift.Rm).value, 0, 32);
  137. /* adjust RM in case the PC is being read */
  138. if (shifter_operand.immediate_shift.Rm == 15)
  139. Rm += 2 * instruction_size;
  140. return_value = arm_shift(shifter_operand.immediate_shift.shift, Rm, shifter_operand.immediate_shift.shift_imm, shifter_carry_out);
  141. }
  142. else if (variant == 2) /* register shift */
  143. {
  144. u32 Rm = buf_get_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, shifter_operand.register_shift.Rm).value, 0, 32);
  145. u32 Rs = buf_get_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, shifter_operand.register_shift.Rs).value, 0, 32);
  146. /* adjust RM in case the PC is being read */
  147. if (shifter_operand.register_shift.Rm == 15)
  148. Rm += 2 * instruction_size;
  149. return_value = arm_shift(shifter_operand.immediate_shift.shift, Rm, Rs, shifter_carry_out);
  150. }
  151. else
  152. {
  153. LOG_ERROR("BUG: shifter_operand.variant not 0, 1 or 2");
  154. return_value = 0xffffffff;
  155. }
  156. return return_value;
  157. }
  158. int pass_condition(u32 cpsr, u32 opcode)
  159. {
  160. switch ((opcode & 0xf0000000) >> 28)
  161. {
  162. case 0x0: /* EQ */
  163. if (cpsr & 0x40000000)
  164. return 1;
  165. else
  166. return 0;
  167. case 0x1: /* NE */
  168. if (!(cpsr & 0x40000000))
  169. return 1;
  170. else
  171. return 0;
  172. case 0x2: /* CS */
  173. if (cpsr & 0x20000000)
  174. return 1;
  175. else
  176. return 0;
  177. case 0x3: /* CC */
  178. if (!(cpsr & 0x20000000))
  179. return 1;
  180. else
  181. return 0;
  182. case 0x4: /* MI */
  183. if (cpsr & 0x80000000)
  184. return 1;
  185. else
  186. return 0;
  187. case 0x5: /* PL */
  188. if (!(cpsr & 0x80000000))
  189. return 1;
  190. else
  191. return 0;
  192. case 0x6: /* VS */
  193. if (cpsr & 0x10000000)
  194. return 1;
  195. else
  196. return 0;
  197. case 0x7: /* VC */
  198. if (!(cpsr & 0x10000000))
  199. return 1;
  200. else
  201. return 0;
  202. case 0x8: /* HI */
  203. if ((cpsr & 0x20000000) && !(cpsr & 0x40000000))
  204. return 1;
  205. else
  206. return 0;
  207. case 0x9: /* LS */
  208. if (!(cpsr & 0x20000000) || (cpsr & 0x40000000))
  209. return 1;
  210. else
  211. return 0;
  212. case 0xa: /* GE */
  213. if (((cpsr & 0x80000000) && (cpsr & 0x10000000))
  214. || (!(cpsr & 0x80000000) && !(cpsr & 0x10000000)))
  215. return 1;
  216. else
  217. return 0;
  218. case 0xb: /* LT */
  219. if (((cpsr & 0x80000000) && !(cpsr & 0x10000000))
  220. || (!(cpsr & 0x80000000) && (cpsr & 0x10000000)))
  221. return 1;
  222. else
  223. return 0;
  224. case 0xc: /* GT */
  225. if (!(cpsr & 0x40000000) &&
  226. (((cpsr & 0x80000000) && (cpsr & 0x10000000))
  227. || (!(cpsr & 0x80000000) && !(cpsr & 0x10000000))))
  228. return 1;
  229. else
  230. return 0;
  231. case 0xd: /* LE */
  232. if ((cpsr & 0x40000000) &&
  233. (((cpsr & 0x80000000) && !(cpsr & 0x10000000))
  234. || (!(cpsr & 0x80000000) && (cpsr & 0x10000000))))
  235. return 1;
  236. else
  237. return 0;
  238. case 0xe:
  239. case 0xf:
  240. return 1;
  241. }
  242. LOG_ERROR("BUG: should never get here");
  243. return 0;
  244. }
  245. int thumb_pass_branch_condition(u32 cpsr, u16 opcode)
  246. {
  247. return pass_condition(cpsr, (opcode & 0x0f00) << 20);
  248. }
  249. /* simulate a single step (if possible)
  250. * if the dry_run_pc argument is provided, no state is changed,
  251. * but the new pc is stored in the variable pointed at by the argument
  252. */
  253. int arm_simulate_step(target_t *target, u32 *dry_run_pc)
  254. {
  255. armv4_5_common_t *armv4_5 = target->arch_info;
  256. u32 current_pc = buf_get_u32(armv4_5->core_cache->reg_list[15].value, 0, 32);
  257. arm_instruction_t instruction;
  258. int instruction_size;
  259. int retval = ERROR_OK;
  260. if (armv4_5->core_state == ARMV4_5_STATE_ARM)
  261. {
  262. u32 opcode;
  263. /* get current instruction, and identify it */
  264. if((retval = target_read_u32(target, current_pc, &opcode)) != ERROR_OK)
  265. {
  266. return retval;
  267. }
  268. if((retval = arm_evaluate_opcode(opcode, current_pc, &instruction)) != ERROR_OK)
  269. {
  270. return retval;
  271. }
  272. instruction_size = 4;
  273. /* check condition code (for all instructions) */
  274. if (!pass_condition(buf_get_u32(armv4_5->core_cache->reg_list[ARMV4_5_CPSR].value, 0, 32), opcode))
  275. {
  276. if (dry_run_pc)
  277. {
  278. *dry_run_pc = current_pc + instruction_size;
  279. }
  280. else
  281. {
  282. buf_set_u32(armv4_5->core_cache->reg_list[15].value, 0, 32, current_pc + instruction_size);
  283. }
  284. return ERROR_OK;
  285. }
  286. }
  287. else
  288. {
  289. u16 opcode;
  290. if((retval = target_read_u16(target, current_pc, &opcode)) != ERROR_OK)
  291. {
  292. return retval;
  293. }
  294. if((retval = thumb_evaluate_opcode(opcode, current_pc, &instruction)) != ERROR_OK)
  295. {
  296. return retval;
  297. }
  298. instruction_size = 2;
  299. /* check condition code (only for branch instructions) */
  300. if ((!thumb_pass_branch_condition(buf_get_u32(armv4_5->core_cache->reg_list[ARMV4_5_CPSR].value, 0, 32), opcode)) &&
  301. (instruction.type == ARM_B))
  302. {
  303. if (dry_run_pc)
  304. {
  305. *dry_run_pc = current_pc + instruction_size;
  306. }
  307. else
  308. {
  309. buf_set_u32(armv4_5->core_cache->reg_list[15].value, 0, 32, current_pc + instruction_size);
  310. }
  311. return ERROR_OK;
  312. }
  313. }
  314. /* examine instruction type */
  315. /* branch instructions */
  316. if ((instruction.type >= ARM_B) && (instruction.type <= ARM_BLX))
  317. {
  318. u32 target;
  319. if (instruction.info.b_bl_bx_blx.reg_operand == -1)
  320. {
  321. target = instruction.info.b_bl_bx_blx.target_address;
  322. }
  323. else
  324. {
  325. target = buf_get_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, instruction.info.b_bl_bx_blx.reg_operand).value, 0, 32);
  326. if(instruction.info.b_bl_bx_blx.reg_operand == 15)
  327. {
  328. target += 2 * instruction_size;
  329. }
  330. }
  331. if (dry_run_pc)
  332. {
  333. *dry_run_pc = target;
  334. return ERROR_OK;
  335. }
  336. else
  337. {
  338. if (instruction.type == ARM_B)
  339. {
  340. buf_set_u32(armv4_5->core_cache->reg_list[15].value, 0, 32, target);
  341. }
  342. else if (instruction.type == ARM_BL)
  343. {
  344. u32 old_pc = buf_get_u32(armv4_5->core_cache->reg_list[15].value, 0, 32);
  345. buf_set_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, 14).value, 0, 32, old_pc + 4);
  346. buf_set_u32(armv4_5->core_cache->reg_list[15].value, 0, 32, target);
  347. }
  348. else if (instruction.type == ARM_BX)
  349. {
  350. if (target & 0x1)
  351. {
  352. armv4_5->core_state = ARMV4_5_STATE_THUMB;
  353. }
  354. else
  355. {
  356. armv4_5->core_state = ARMV4_5_STATE_ARM;
  357. }
  358. buf_set_u32(armv4_5->core_cache->reg_list[15].value, 0, 32, target & 0xfffffffe);
  359. }
  360. else if (instruction.type == ARM_BLX)
  361. {
  362. u32 old_pc = buf_get_u32(armv4_5->core_cache->reg_list[15].value, 0, 32);
  363. buf_set_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, 14).value, 0, 32, old_pc + 4);
  364. if (target & 0x1)
  365. {
  366. armv4_5->core_state = ARMV4_5_STATE_THUMB;
  367. }
  368. else
  369. {
  370. armv4_5->core_state = ARMV4_5_STATE_ARM;
  371. }
  372. buf_set_u32(armv4_5->core_cache->reg_list[15].value, 0, 32, target & 0xfffffffe);
  373. }
  374. return ERROR_OK;
  375. }
  376. }
  377. /* data processing instructions, except compare instructions (CMP, CMN, TST, TEQ) */
  378. else if (((instruction.type >= ARM_AND) && (instruction.type <= ARM_RSC))
  379. || ((instruction.type >= ARM_ORR) && (instruction.type <= ARM_MVN)))
  380. {
  381. u32 Rd, Rn, shifter_operand;
  382. u8 C = buf_get_u32(armv4_5->core_cache->reg_list[ARMV4_5_CPSR].value, 29, 1);
  383. u8 carry_out;
  384. Rd = 0x0;
  385. Rn = buf_get_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, instruction.info.data_proc.Rn).value, 0, 32);
  386. shifter_operand = arm_shifter_operand(armv4_5, instruction.info.data_proc.variant, instruction.info.data_proc.shifter_operand, &carry_out);
  387. /* adjust Rn in case the PC is being read */
  388. if (instruction.info.data_proc.Rn == 15)
  389. Rn += 2 * instruction_size;
  390. if (instruction.type == ARM_AND)
  391. Rd = Rn & shifter_operand;
  392. else if (instruction.type == ARM_EOR)
  393. Rd = Rn ^ shifter_operand;
  394. else if (instruction.type == ARM_SUB)
  395. Rd = Rn - shifter_operand;
  396. else if (instruction.type == ARM_RSB)
  397. Rd = shifter_operand - Rn;
  398. else if (instruction.type == ARM_ADD)
  399. Rd = Rn + shifter_operand;
  400. else if (instruction.type == ARM_ADC)
  401. Rd = Rn + shifter_operand + (C & 1);
  402. else if (instruction.type == ARM_SBC)
  403. Rd = Rn - shifter_operand - (C & 1) ? 0 : 1;
  404. else if (instruction.type == ARM_RSC)
  405. Rd = shifter_operand - Rn - (C & 1) ? 0 : 1;
  406. else if (instruction.type == ARM_ORR)
  407. Rd = Rn | shifter_operand;
  408. else if (instruction.type == ARM_BIC)
  409. Rd = Rn & ~(shifter_operand);
  410. else if (instruction.type == ARM_MOV)
  411. Rd = shifter_operand;
  412. else if (instruction.type == ARM_MVN)
  413. Rd = ~shifter_operand;
  414. if (dry_run_pc)
  415. {
  416. if (instruction.info.data_proc.Rd == 15)
  417. {
  418. *dry_run_pc = Rd;
  419. return ERROR_OK;
  420. }
  421. else
  422. {
  423. *dry_run_pc = current_pc + instruction_size;
  424. }
  425. return ERROR_OK;
  426. }
  427. else
  428. {
  429. buf_set_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, instruction.info.data_proc.Rd).value, 0, 32, Rd);
  430. LOG_WARNING("no updating of flags yet");
  431. if (instruction.info.data_proc.Rd == 15)
  432. return ERROR_OK;
  433. }
  434. }
  435. /* compare instructions (CMP, CMN, TST, TEQ) */
  436. else if ((instruction.type >= ARM_TST) && (instruction.type <= ARM_CMN))
  437. {
  438. if (dry_run_pc)
  439. {
  440. *dry_run_pc = current_pc + instruction_size;
  441. return ERROR_OK;
  442. }
  443. else
  444. {
  445. LOG_WARNING("no updating of flags yet");
  446. }
  447. }
  448. /* load register instructions */
  449. else if ((instruction.type >= ARM_LDR) && (instruction.type <= ARM_LDRSH))
  450. {
  451. u32 load_address = 0, modified_address = 0, load_value;
  452. u32 Rn = buf_get_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, instruction.info.load_store.Rn).value, 0, 32);
  453. /* adjust Rn in case the PC is being read */
  454. if (instruction.info.load_store.Rn == 15)
  455. Rn += 2 * instruction_size;
  456. if (instruction.info.load_store.offset_mode == 0)
  457. {
  458. if (instruction.info.load_store.U)
  459. modified_address = Rn + instruction.info.load_store.offset.offset;
  460. else
  461. modified_address = Rn - instruction.info.load_store.offset.offset;
  462. }
  463. else if (instruction.info.load_store.offset_mode == 1)
  464. {
  465. u32 offset;
  466. u32 Rm = buf_get_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, instruction.info.load_store.offset.reg.Rm).value, 0, 32);
  467. u8 shift = instruction.info.load_store.offset.reg.shift;
  468. u8 shift_imm = instruction.info.load_store.offset.reg.shift_imm;
  469. u8 carry = buf_get_u32(armv4_5->core_cache->reg_list[ARMV4_5_CPSR].value, 29, 1);
  470. offset = arm_shift(shift, Rm, shift_imm, &carry);
  471. if (instruction.info.load_store.U)
  472. modified_address = Rn + offset;
  473. else
  474. modified_address = Rn - offset;
  475. }
  476. else
  477. {
  478. LOG_ERROR("BUG: offset_mode neither 0 (offset) nor 1 (scaled register)");
  479. }
  480. if (instruction.info.load_store.index_mode == 0)
  481. {
  482. /* offset mode
  483. * we load from the modified address, but don't change the base address register */
  484. load_address = modified_address;
  485. modified_address = Rn;
  486. }
  487. else if (instruction.info.load_store.index_mode == 1)
  488. {
  489. /* pre-indexed mode
  490. * we load from the modified address, and write it back to the base address register */
  491. load_address = modified_address;
  492. }
  493. else if (instruction.info.load_store.index_mode == 2)
  494. {
  495. /* post-indexed mode
  496. * we load from the unmodified address, and write the modified address back */
  497. load_address = Rn;
  498. }
  499. if((!dry_run_pc) || (instruction.info.load_store.Rd == 15))
  500. {
  501. if((retval = target_read_u32(target, load_address, &load_value)) != ERROR_OK)
  502. {
  503. return retval;
  504. }
  505. }
  506. if (dry_run_pc)
  507. {
  508. if (instruction.info.load_store.Rd == 15)
  509. {
  510. *dry_run_pc = load_value;
  511. return ERROR_OK;
  512. }
  513. else
  514. {
  515. *dry_run_pc = current_pc + instruction_size;
  516. }
  517. return ERROR_OK;
  518. }
  519. else
  520. {
  521. if ((instruction.info.load_store.index_mode == 1) ||
  522. (instruction.info.load_store.index_mode == 2))
  523. {
  524. buf_set_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, instruction.info.load_store.Rn).value, 0, 32, modified_address);
  525. }
  526. buf_set_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, instruction.info.load_store.Rd).value, 0, 32, load_value);
  527. if (instruction.info.load_store.Rd == 15)
  528. return ERROR_OK;
  529. }
  530. }
  531. /* load multiple instruction */
  532. else if (instruction.type == ARM_LDM)
  533. {
  534. int i;
  535. u32 Rn = buf_get_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, instruction.info.load_store_multiple.Rn).value, 0, 32);
  536. u32 load_values[16];
  537. int bits_set = 0;
  538. for (i = 0; i < 16; i++)
  539. {
  540. if (instruction.info.load_store_multiple.register_list & (1 << i))
  541. bits_set++;
  542. }
  543. switch (instruction.info.load_store_multiple.addressing_mode)
  544. {
  545. case 0: /* Increment after */
  546. Rn = Rn;
  547. break;
  548. case 1: /* Increment before */
  549. Rn = Rn + 4;
  550. break;
  551. case 2: /* Decrement after */
  552. Rn = Rn - (bits_set * 4) + 4;
  553. break;
  554. case 3: /* Decrement before */
  555. Rn = Rn - (bits_set * 4);
  556. break;
  557. }
  558. for (i = 0; i < 16; i++)
  559. {
  560. if (instruction.info.load_store_multiple.register_list & (1 << i))
  561. {
  562. if((!dry_run_pc) || (i == 15))
  563. {
  564. target_read_u32(target, Rn, &load_values[i]);
  565. }
  566. Rn += 4;
  567. }
  568. }
  569. if (dry_run_pc)
  570. {
  571. if (instruction.info.load_store_multiple.register_list & 0x8000)
  572. {
  573. *dry_run_pc = load_values[15];
  574. return ERROR_OK;
  575. }
  576. }
  577. else
  578. {
  579. enum armv4_5_mode mode = armv4_5->core_mode;
  580. int update_cpsr = 0;
  581. if (instruction.info.load_store_multiple.S)
  582. {
  583. if (instruction.info.load_store_multiple.register_list & 0x8000)
  584. update_cpsr = 1;
  585. else
  586. mode = ARMV4_5_MODE_USR;
  587. }
  588. for (i = 0; i < 16; i++)
  589. {
  590. if (instruction.info.load_store_multiple.register_list & (1 << i))
  591. {
  592. buf_set_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, mode, i).value, 0, 32, load_values[i]);
  593. }
  594. }
  595. if (update_cpsr)
  596. {
  597. u32 spsr = buf_get_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, 16).value, 0, 32);
  598. buf_set_u32(armv4_5->core_cache->reg_list[ARMV4_5_CPSR].value, 0, 32, spsr);
  599. }
  600. /* base register writeback */
  601. if (instruction.info.load_store_multiple.W)
  602. buf_set_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, instruction.info.load_store_multiple.Rn).value, 0, 32, Rn);
  603. if (instruction.info.load_store_multiple.register_list & 0x8000)
  604. return ERROR_OK;
  605. }
  606. }
  607. /* store multiple instruction */
  608. else if (instruction.type == ARM_STM)
  609. {
  610. int i;
  611. if (dry_run_pc)
  612. {
  613. /* STM wont affect PC (advance by instruction size */
  614. }
  615. else
  616. {
  617. u32 Rn = buf_get_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, instruction.info.load_store_multiple.Rn).value, 0, 32);
  618. int bits_set = 0;
  619. enum armv4_5_mode mode = armv4_5->core_mode;
  620. for (i = 0; i < 16; i++)
  621. {
  622. if (instruction.info.load_store_multiple.register_list & (1 << i))
  623. bits_set++;
  624. }
  625. if (instruction.info.load_store_multiple.S)
  626. {
  627. mode = ARMV4_5_MODE_USR;
  628. }
  629. switch (instruction.info.load_store_multiple.addressing_mode)
  630. {
  631. case 0: /* Increment after */
  632. Rn = Rn;
  633. break;
  634. case 1: /* Increment before */
  635. Rn = Rn + 4;
  636. break;
  637. case 2: /* Decrement after */
  638. Rn = Rn - (bits_set * 4) + 4;
  639. break;
  640. case 3: /* Decrement before */
  641. Rn = Rn - (bits_set * 4);
  642. break;
  643. }
  644. for (i = 0; i < 16; i++)
  645. {
  646. if (instruction.info.load_store_multiple.register_list & (1 << i))
  647. {
  648. target_write_u32(target, Rn, buf_get_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, i).value, 0, 32));
  649. Rn += 4;
  650. }
  651. }
  652. /* base register writeback */
  653. if (instruction.info.load_store_multiple.W)
  654. buf_set_u32(ARMV4_5_CORE_REG_MODE(armv4_5->core_cache, armv4_5->core_mode, instruction.info.load_store_multiple.Rn).value, 0, 32, Rn);
  655. }
  656. }
  657. else if (!dry_run_pc)
  658. {
  659. /* the instruction wasn't handled, but we're supposed to simulate it
  660. */
  661. return ERROR_ARM_SIMULATOR_NOT_IMPLEMENTED;
  662. }
  663. if (dry_run_pc)
  664. {
  665. *dry_run_pc = current_pc + instruction_size;
  666. return ERROR_OK;
  667. }
  668. else
  669. {
  670. buf_set_u32(armv4_5->core_cache->reg_list[15].value, 0, 32, current_pc + instruction_size);
  671. return ERROR_OK;
  672. }
  673. }