@@ -251,6 +251,50 @@ static inline void asm_thumb_bx_reg(asm_thumb_t *as, uint r_src) {
251251 asm_thumb_format_5 (as , ASM_THUMB_FORMAT_5_BX , 0 , r_src );
252252}
253253
254+ // FORMAT 7: load/store with register offset
255+ // FORMAT 8: load/store sign-extended byte/halfword
256+
257+ #define ASM_THUMB_FORMAT_7_LDR (0x5800)
258+ #define ASM_THUMB_FORMAT_7_STR (0x5000)
259+ #define ASM_THUMB_FORMAT_7_WORD_TRANSFER (0x0000)
260+ #define ASM_THUMB_FORMAT_7_BYTE_TRANSFER (0x0400)
261+ #define ASM_THUMB_FORMAT_8_LDRH (0x5A00)
262+ #define ASM_THUMB_FORMAT_8_STRH (0x5200)
263+
264+ #define ASM_THUMB_FORMAT_7_8_ENCODE (op , rlo_dest , rlo_base , rlo_index ) \
265+ ((op) | ((rlo_index) << 6) | ((rlo_base) << 3) | ((rlo_dest)))
266+
267+ static inline void asm_thumb_format_7_8 (asm_thumb_t * as , uint op , uint rlo_dest , uint rlo_base , uint rlo_index ) {
268+ assert (rlo_dest < ASM_THUMB_REG_R8 );
269+ assert (rlo_base < ASM_THUMB_REG_R8 );
270+ assert (rlo_index < ASM_THUMB_REG_R8 );
271+ asm_thumb_op16 (as , ASM_THUMB_FORMAT_7_8_ENCODE (op , rlo_dest , rlo_base , rlo_index ));
272+ }
273+
274+ static inline void asm_thumb_ldrb_rlo_rlo_rlo (asm_thumb_t * as , uint rlo_dest , uint rlo_base , uint rlo_index ) {
275+ asm_thumb_format_7_8 (as , ASM_THUMB_FORMAT_7_LDR | ASM_THUMB_FORMAT_7_BYTE_TRANSFER , rlo_dest , rlo_base , rlo_index );
276+ }
277+
278+ static inline void asm_thumb_ldrh_rlo_rlo_rlo (asm_thumb_t * as , uint rlo_dest , uint rlo_base , uint rlo_index ) {
279+ asm_thumb_format_7_8 (as , ASM_THUMB_FORMAT_8_LDRH , rlo_dest , rlo_base , rlo_index );
280+ }
281+
282+ static inline void asm_thumb_ldr_rlo_rlo_rlo (asm_thumb_t * as , uint rlo_dest , uint rlo_base , uint rlo_index ) {
283+ asm_thumb_format_7_8 (as , ASM_THUMB_FORMAT_7_LDR | ASM_THUMB_FORMAT_7_WORD_TRANSFER , rlo_dest , rlo_base , rlo_index );
284+ }
285+
286+ static inline void asm_thumb_strb_rlo_rlo_rlo (asm_thumb_t * as , uint rlo_src , uint rlo_base , uint rlo_index ) {
287+ asm_thumb_format_7_8 (as , ASM_THUMB_FORMAT_7_STR | ASM_THUMB_FORMAT_7_BYTE_TRANSFER , rlo_src , rlo_base , rlo_index );
288+ }
289+
290+ static inline void asm_thumb_strh_rlo_rlo_rlo (asm_thumb_t * as , uint rlo_dest , uint rlo_base , uint rlo_index ) {
291+ asm_thumb_format_7_8 (as , ASM_THUMB_FORMAT_8_STRH , rlo_dest , rlo_base , rlo_index );
292+ }
293+
294+ static inline void asm_thumb_str_rlo_rlo_rlo (asm_thumb_t * as , uint rlo_src , uint rlo_base , uint rlo_index ) {
295+ asm_thumb_format_7_8 (as , ASM_THUMB_FORMAT_7_STR | ASM_THUMB_FORMAT_7_WORD_TRANSFER , rlo_src , rlo_base , rlo_index );
296+ }
297+
254298// FORMAT 9: load/store with immediate offset
255299// For word transfers the offset must be aligned, and >>2
256300
@@ -341,6 +385,11 @@ void asm_thumb_mov_reg_pcrel(asm_thumb_t *as, uint rlo_dest, uint label);
341385void asm_thumb_ldr_reg_reg_i12_optimised (asm_thumb_t * as , uint reg_dest , uint reg_base , uint word_offset ); // convenience
342386void asm_thumb_ldrh_reg_reg_i12_optimised (asm_thumb_t * as , uint reg_dest , uint reg_base , uint uint16_offset ); // convenience
343387
388+ void asm_thumb_ldrh_reg_reg_reg (asm_thumb_t * as , uint reg_dest , uint reg_base , uint reg_index );
389+ void asm_thumb_ldr_reg_reg_reg (asm_thumb_t * as , uint reg_dest , uint reg_base , uint reg_index );
390+ void asm_thumb_strh_reg_reg_reg (asm_thumb_t * as , uint reg_val , uint reg_base , uint reg_index );
391+ void asm_thumb_str_reg_reg_reg (asm_thumb_t * as , uint reg_val , uint reg_base , uint reg_index );
392+
344393void asm_thumb_b_label (asm_thumb_t * as , uint label ); // convenience: picks narrow or wide branch
345394void asm_thumb_bcc_label (asm_thumb_t * as , int cc , uint label ); // convenience: picks narrow or wide branch
346395void asm_thumb_bl_ind (asm_thumb_t * as , uint fun_id , uint reg_temp ); // convenience
0 commit comments