diff options
author | Jim Grosbach <grosbach@apple.com> | 2010-12-03 22:31:40 +0000 |
---|---|---|
committer | Jim Grosbach <grosbach@apple.com> | 2010-12-03 22:31:40 +0000 |
commit | d91f4e40e6312304c60c83c3dd93f769a39a9772 (patch) | |
tree | 003a4711455541613b8e9792f3615a2017771219 /test/MC/ARM/neont2-shift-encoding.s | |
parent | 5812b10adb850337a56d9480c3383abfe1c12f25 (diff) | |
download | external_llvm-d91f4e40e6312304c60c83c3dd93f769a39a9772.zip external_llvm-d91f4e40e6312304c60c83c3dd93f769a39a9772.tar.gz external_llvm-d91f4e40e6312304c60c83c3dd93f769a39a9772.tar.bz2 |
Encode the 32-bit wide Thumb (and Thumb2) instructions with the high order
halfword being emitted to the stream first. rdar://8728174
git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@120848 91177308-0d34-0410-b5e6-96231b3b80d8
Diffstat (limited to 'test/MC/ARM/neont2-shift-encoding.s')
-rw-r--r-- | test/MC/ARM/neont2-shift-encoding.s | 158 |
1 files changed, 79 insertions, 79 deletions
diff --git a/test/MC/ARM/neont2-shift-encoding.s b/test/MC/ARM/neont2-shift-encoding.s index c96ce08..46db951 100644 --- a/test/MC/ARM/neont2-shift-encoding.s +++ b/test/MC/ARM/neont2-shift-encoding.s @@ -2,161 +2,161 @@ .code 16 -@ CHECK: vshl.u8 d16, d17, d16 @ encoding: [0xa1,0x04,0x40,0xff] +@ CHECK: vshl.u8 d16, d17, d16 @ encoding: [0x40,0xff,0xa1,0x04] vshl.u8 d16, d17, d16 -@ CHECK: vshl.u16 d16, d17, d16 @ encoding: [0xa1,0x04,0x50,0xff] +@ CHECK: vshl.u16 d16, d17, d16 @ encoding: [0x50,0xff,0xa1,0x04] vshl.u16 d16, d17, d16 -@ CHECK: vshl.u32 d16, d17, d16 @ encoding: [0xa1,0x04,0x60,0xff] +@ CHECK: vshl.u32 d16, d17, d16 @ encoding: [0x60,0xff,0xa1,0x04] vshl.u32 d16, d17, d16 -@ CHECK: vshl.u64 d16, d17, d16 @ encoding: [0xa1,0x04,0x70,0xff] +@ CHECK: vshl.u64 d16, d17, d16 @ encoding: [0x70,0xff,0xa1,0x04] vshl.u64 d16, d17, d16 -@ CHECK: vshl.i8 d16, d16, #7 @ encoding: [0x30,0x05,0xcf,0xef] +@ CHECK: vshl.i8 d16, d16, #7 @ encoding: [0xcf,0xef,0x30,0x05] vshl.i8 d16, d16, #7 -@ CHECK: vshl.i16 d16, d16, #15 @ encoding: [0x30,0x05,0xdf,0xef] +@ CHECK: vshl.i16 d16, d16, #15 @ encoding: [0xdf,0xef,0x30,0x05] vshl.i16 d16, d16, #15 -@ CHECK: vshl.i32 d16, d16, #31 @ encoding: [0x30,0x05,0xff,0xef] +@ CHECK: vshl.i32 d16, d16, #31 @ encoding: [0xff,0xef,0x30,0x05] vshl.i32 d16, d16, #31 -@ CHECK: vshl.i64 d16, d16, #63 @ encoding: [0xb0,0x05,0xff,0xef] +@ CHECK: vshl.i64 d16, d16, #63 @ encoding: [0xff,0xef,0xb0,0x05] vshl.i64 d16, d16, #63 -@ CHECK: vshl.u8 q8, q9, q8 @ encoding: [0xe2,0x04,0x40,0xff] +@ CHECK: vshl.u8 q8, q9, q8 @ encoding: [0x40,0xff,0xe2,0x04] vshl.u8 q8, q9, q8 -@ CHECK: vshl.u16 q8, q9, q8 @ encoding: [0xe2,0x04,0x50,0xff] +@ CHECK: vshl.u16 q8, q9, q8 @ encoding: [0x50,0xff,0xe2,0x04] vshl.u16 q8, q9, q8 -@ CHECK: vshl.u32 q8, q9, q8 @ encoding: [0xe2,0x04,0x60,0xff] +@ CHECK: vshl.u32 q8, q9, q8 @ encoding: [0x60,0xff,0xe2,0x04] vshl.u32 q8, q9, q8 -@ CHECK: vshl.u64 q8, q9, q8 @ encoding: [0xe2,0x04,0x70,0xff] +@ CHECK: vshl.u64 q8, q9, q8 @ encoding: [0x70,0xff,0xe2,0x04] vshl.u64 q8, q9, q8 -@ CHECK: vshl.i8 q8, q8, #7 @ encoding: [0x70,0x05,0xcf,0xef] +@ CHECK: vshl.i8 q8, q8, #7 @ encoding: [0xcf,0xef,0x70,0x05] vshl.i8 q8, q8, #7 -@ CHECK: vshl.i16 q8, q8, #15 @ encoding: [0x70,0x05,0xdf,0xef] +@ CHECK: vshl.i16 q8, q8, #15 @ encoding: [0xdf,0xef,0x70,0x05] vshl.i16 q8, q8, #15 -@ CHECK: vshl.i32 q8, q8, #31 @ encoding: [0x70,0x05,0xff,0xef] +@ CHECK: vshl.i32 q8, q8, #31 @ encoding: [0xff,0xef,0x70,0x05] vshl.i32 q8, q8, #31 -@ CHECK: vshl.i64 q8, q8, #63 @ encoding: [0xf0,0x05,0xff,0xef] +@ CHECK: vshl.i64 q8, q8, #63 @ encoding: [0xff,0xef,0xf0,0x05] vshl.i64 q8, q8, #63 -@ CHECK: vshr.u8 d16, d16, #8 @ encoding: [0x30,0x00,0xc8,0xff] +@ CHECK: vshr.u8 d16, d16, #8 @ encoding: [0xc8,0xff,0x30,0x00] vshr.u8 d16, d16, #8 -@ CHECK: vshr.u16 d16, d16, #16 @ encoding: [0x30,0x00,0xd0,0xff] +@ CHECK: vshr.u16 d16, d16, #16 @ encoding: [0xd0,0xff,0x30,0x00] vshr.u16 d16, d16, #16 -@ CHECK: vshr.u32 d16, d16, #32 @ encoding: [0x30,0x00,0xe0,0xff] +@ CHECK: vshr.u32 d16, d16, #32 @ encoding: [0xe0,0xff,0x30,0x00] vshr.u32 d16, d16, #32 -@ CHECK: vshr.u64 d16, d16, #64 @ encoding: [0xb0,0x00,0xc0,0xff] +@ CHECK: vshr.u64 d16, d16, #64 @ encoding: [0xc0,0xff,0xb0,0x00] vshr.u64 d16, d16, #64 -@ CHECK: vshr.u8 q8, q8, #8 @ encoding: [0x70,0x00,0xc8,0xff] +@ CHECK: vshr.u8 q8, q8, #8 @ encoding: [0xc8,0xff,0x70,0x00] vshr.u8 q8, q8, #8 -@ CHECK: vshr.u16 q8, q8, #16 @ encoding: [0x70,0x00,0xd0,0xff] +@ CHECK: vshr.u16 q8, q8, #16 @ encoding: [0xd0,0xff,0x70,0x00] vshr.u16 q8, q8, #16 -@ CHECK: vshr.u32 q8, q8, #32 @ encoding: [0x70,0x00,0xe0,0xff] +@ CHECK: vshr.u32 q8, q8, #32 @ encoding: [0xe0,0xff,0x70,0x00] vshr.u32 q8, q8, #32 -@ CHECK: vshr.u64 q8, q8, #64 @ encoding: [0xf0,0x00,0xc0,0xff] +@ CHECK: vshr.u64 q8, q8, #64 @ encoding: [0xc0,0xff,0xf0,0x00] vshr.u64 q8, q8, #64 -@ CHECK: vshr.s8 d16, d16, #8 @ encoding: [0x30,0x00,0xc8,0xef] +@ CHECK: vshr.s8 d16, d16, #8 @ encoding: [0xc8,0xef,0x30,0x00] vshr.s8 d16, d16, #8 -@ CHECK: vshr.s16 d16, d16, #16 @ encoding: [0x30,0x00,0xd0,0xef] +@ CHECK: vshr.s16 d16, d16, #16 @ encoding: [0xd0,0xef,0x30,0x00] vshr.s16 d16, d16, #16 -@ CHECK: vshr.s32 d16, d16, #32 @ encoding: [0x30,0x00,0xe0,0xef] +@ CHECK: vshr.s32 d16, d16, #32 @ encoding: [0xe0,0xef,0x30,0x00] vshr.s32 d16, d16, #32 -@ CHECK: vshr.s64 d16, d16, #64 @ encoding: [0xb0,0x00,0xc0,0xef] +@ CHECK: vshr.s64 d16, d16, #64 @ encoding: [0xc0,0xef,0xb0,0x00] vshr.s64 d16, d16, #64 -@ CHECK: vshr.s8 q8, q8, #8 @ encoding: [0x70,0x00,0xc8,0xef] +@ CHECK: vshr.s8 q8, q8, #8 @ encoding: [0xc8,0xef,0x70,0x00] vshr.s8 q8, q8, #8 -@ CHECK: vshr.s16 q8, q8, #16 @ encoding: [0x70,0x00,0xd0,0xef] +@ CHECK: vshr.s16 q8, q8, #16 @ encoding: [0xd0,0xef,0x70,0x00] vshr.s16 q8, q8, #16 -@ CHECK: vshr.s32 q8, q8, #32 @ encoding: [0x70,0x00,0xe0,0xef] +@ CHECK: vshr.s32 q8, q8, #32 @ encoding: [0xe0,0xef,0x70,0x00] vshr.s32 q8, q8, #32 -@ CHECK: vshr.s64 q8, q8, #64 @ encoding: [0xf0,0x00,0xc0,0xef] +@ CHECK: vshr.s64 q8, q8, #64 @ encoding: [0xc0,0xef,0xf0,0x00] vshr.s64 q8, q8, #64 -@ CHECK: vshll.s8 q8, d16, #7 @ encoding: [0x30,0x0a,0xcf,0xef] +@ CHECK: vshll.s8 q8, d16, #7 @ encoding: [0xcf,0xef,0x30,0x0a] vshll.s8 q8, d16, #7 -@ CHECK: vshll.s16 q8, d16, #15 @ encoding: [0x30,0x0a,0xdf,0xef] +@ CHECK: vshll.s16 q8, d16, #15 @ encoding: [0xdf,0xef,0x30,0x0a] vshll.s16 q8, d16, #15 -@ CHECK: vshll.s32 q8, d16, #31 @ encoding: [0x30,0x0a,0xff,0xef] +@ CHECK: vshll.s32 q8, d16, #31 @ encoding: [0xff,0xef,0x30,0x0a] vshll.s32 q8, d16, #31 -@ CHECK: vshll.u8 q8, d16, #7 @ encoding: [0x30,0x0a,0xcf,0xff] +@ CHECK: vshll.u8 q8, d16, #7 @ encoding: [0xcf,0xff,0x30,0x0a] vshll.u8 q8, d16, #7 -@ CHECK: vshll.u16 q8, d16, #15 @ encoding: [0x30,0x0a,0xdf,0xff] +@ CHECK: vshll.u16 q8, d16, #15 @ encoding: [0xdf,0xff,0x30,0x0a] vshll.u16 q8, d16, #15 -@ CHECK: vshll.u32 q8, d16, #31 @ encoding: [0x30,0x0a,0xff,0xff] +@ CHECK: vshll.u32 q8, d16, #31 @ encoding: [0xff,0xff,0x30,0x0a] vshll.u32 q8, d16, #31 -@ CHECK: vshll.i8 q8, d16, #8 @ encoding: [0x20,0x03,0xf2,0xff] +@ CHECK: vshll.i8 q8, d16, #8 @ encoding: [0xf2,0xff,0x20,0x03] vshll.i8 q8, d16, #8 -@ CHECK: vshll.i16 q8, d16, #16 @ encoding: [0x20,0x03,0xf6,0xff] +@ CHECK: vshll.i16 q8, d16, #16 @ encoding: [0xf6,0xff,0x20,0x03] vshll.i16 q8, d16, #16 -@ CHECK: vshll.i32 q8, d16, #32 @ encoding: [0x20,0x03,0xfa,0xff] +@ CHECK: vshll.i32 q8, d16, #32 @ encoding: [0xfa,0xff,0x20,0x03] vshll.i32 q8, d16, #32 -@ CHECK: vshrn.i16 d16, q8, #8 @ encoding: [0x30,0x08,0xc8,0xef] +@ CHECK: vshrn.i16 d16, q8, #8 @ encoding: [0xc8,0xef,0x30,0x08] vshrn.i16 d16, q8, #8 -@ CHECK: vshrn.i32 d16, q8, #16 @ encoding: [0x30,0x08,0xd0,0xef] +@ CHECK: vshrn.i32 d16, q8, #16 @ encoding: [0xd0,0xef,0x30,0x08] vshrn.i32 d16, q8, #16 -@ CHECK: vshrn.i64 d16, q8, #32 @ encoding: [0x30,0x08,0xe0,0xef] +@ CHECK: vshrn.i64 d16, q8, #32 @ encoding: [0xe0,0xef,0x30,0x08] vshrn.i64 d16, q8, #32 -@ CHECK: vrshl.s8 d16, d17, d16 @ encoding: [0xa1,0x05,0x40,0xef] +@ CHECK: vrshl.s8 d16, d17, d16 @ encoding: [0x40,0xef,0xa1,0x05] vrshl.s8 d16, d17, d16 -@ CHECK: vrshl.s16 d16, d17, d16 @ encoding: [0xa1,0x05,0x50,0xef] +@ CHECK: vrshl.s16 d16, d17, d16 @ encoding: [0x50,0xef,0xa1,0x05] vrshl.s16 d16, d17, d16 -@ CHECK: vrshl.s32 d16, d17, d16 @ encoding: [0xa1,0x05,0x60,0xef] +@ CHECK: vrshl.s32 d16, d17, d16 @ encoding: [0x60,0xef,0xa1,0x05] vrshl.s32 d16, d17, d16 -@ CHECK: vrshl.s64 d16, d17, d16 @ encoding: [0xa1,0x05,0x70,0 +@ CHECK: vrshl.s64 d16, d17, d16 @ encoding: [0x70,0xef,0xa1,0x05] vrshl.s64 d16, d17, d16 -@ CHECK: vrshl.u8 d16, d17, d16 @ encoding: [0xa1,0x05,0x40,0xff] +@ CHECK: vrshl.u8 d16, d17, d16 @ encoding: [0x40,0xff,0xa1,0x05] vrshl.u8 d16, d17, d16 -@ CHECK: vrshl.u16 d16, d17, d16 @ encoding: [0xa1,0x05,0x50,0xff] +@ CHECK: vrshl.u16 d16, d17, d16 @ encoding: [0x50,0xff,0xa1,0x05] vrshl.u16 d16, d17, d16 -@ CHECK: vrshl.u32 d16, d17, d16 @ encoding: [0xa1,0x05,0x60,0xff] +@ CHECK: vrshl.u32 d16, d17, d16 @ encoding: [0x60,0xff,0xa1,0x05] vrshl.u32 d16, d17, d16 -@ CHECK: vrshl.u64 d16, d17, d16 @ encoding: [0xa1,0x05,0x70,0xff] +@ CHECK: vrshl.u64 d16, d17, d16 @ encoding: [0x70,0xff,0xa1,0x05] vrshl.u64 d16, d17, d16 -@ CHECK: vrshl.s8 q8, q9, q8 @ encoding: [0xe2,0x05,0x40,0xef] +@ CHECK: vrshl.s8 q8, q9, q8 @ encoding: [0x40,0xef,0xe2,0x05] vrshl.s8 q8, q9, q8 -@ CHECK: vrshl.s16 q8, q9, q8 @ encoding: [0xe2,0x05,0x50,0xef] +@ CHECK: vrshl.s16 q8, q9, q8 @ encoding: [0x50,0xef,0xe2,0x05] vrshl.s16 q8, q9, q8 -@ CHECK: vrshl.s32 q8, q9, q8 @ encoding: [0xe2,0x05,0x60,0xef] +@ CHECK: vrshl.s32 q8, q9, q8 @ encoding: [0x60,0xef,0xe2,0x05] vrshl.s32 q8, q9, q8 -@ CHECK: vrshl.s64 q8, q9, q8 @ encoding: [0xe2,0x05,0x70,0xef] +@ CHECK: vrshl.s64 q8, q9, q8 @ encoding: [0x70,0xef,0xe2,0x05] vrshl.s64 q8, q9, q8 -@ CHECK: vrshl.u8 q8, q9, q8 @ encoding: [0xe2,0x05,0x40,0xff] +@ CHECK: vrshl.u8 q8, q9, q8 @ encoding: [0x40,0xff,0xe2,0x05] vrshl.u8 q8, q9, q8 -@ CHECK: vrshl.u16 q8, q9, q8 @ encoding: [0xe2,0x05,0x50,0xff] +@ CHECK: vrshl.u16 q8, q9, q8 @ encoding: [0x50,0xff,0xe2,0x05] vrshl.u16 q8, q9, q8 -@ CHECK: vrshl.u32 q8, q9, q8 @ encoding: [0xe2,0x05,0x60,0xff] +@ CHECK: vrshl.u32 q8, q9, q8 @ encoding: [0x60,0xff,0xe2,0x05] vrshl.u32 q8, q9, q8 -@ CHECK: vrshl.u64 q8, q9, q8 @ encoding: [0xe2,0x05,0x70,0xff] +@ CHECK: vrshl.u64 q8, q9, q8 @ encoding: [0x70,0xff,0xe2,0x05] vrshl.u64 q8, q9, q8 -@ CHECK: vrshr.s8 d16, d16, #8 @ encoding: [0x30,0x02,0xc8,0xef] +@ CHECK: vrshr.s8 d16, d16, #8 @ encoding: [0xc8,0xef,0x30,0x02] vrshr.s8 d16, d16, #8 -@ CHECK: vrshr.s16 d16, d16, #16 @ encoding: [0x30,0x02,0xd0,0xef] +@ CHECK: vrshr.s16 d16, d16, #16 @ encoding: [0xd0,0xef,0x30,0x02] vrshr.s16 d16, d16, #16 -@ CHECK: vrshr.s32 d16, d16, #32 @ encoding: [0x30,0x02,0xe0,0xef] +@ CHECK: vrshr.s32 d16, d16, #32 @ encoding: [0xe0,0xef,0x30,0x02] vrshr.s32 d16, d16, #32 -@ CHECK: vrshr.s64 d16, d16, #64 @ encoding: [0xb0,0x02,0xc0,0xef] +@ CHECK: vrshr.s64 d16, d16, #64 @ encoding: [0xc0,0xef,0xb0,0x02] vrshr.s64 d16, d16, #64 -@ CHECK: vrshr.u8 d16, d16, #8 @ encoding: [0x30,0x02,0xc8,0xff] +@ CHECK: vrshr.u8 d16, d16, #8 @ encoding: [0xc8,0xff,0x30,0x02] vrshr.u8 d16, d16, #8 -@ CHECK: vrshr.u16 d16, d16, #16 @ encoding: [0x30,0x02,0xd0,0xff] +@ CHECK: vrshr.u16 d16, d16, #16 @ encoding: [0xd0,0xff,0x30,0x02] vrshr.u16 d16, d16, #16 -@ CHECK: vrshr.u32 d16, d16, #32 @ encoding: [0x30,0x02,0xe0,0xff] +@ CHECK: vrshr.u32 d16, d16, #32 @ encoding: [0xe0,0xff,0x30,0x02] vrshr.u32 d16, d16, #32 -@ CHECK: vrshr.u64 d16, d16, #64 @ encoding: [0xb0,0x02,0xc0,0xff] +@ CHECK: vrshr.u64 d16, d16, #64 @ encoding: [0xc0,0xff,0xb0,0x02] vrshr.u64 d16, d16, #64 -@ CHECK: vrshr.s8 q8, q8, #8 @ encoding: [0x70,0x02,0xc8,0xef] +@ CHECK: vrshr.s8 q8, q8, #8 @ encoding: [0xc8,0xef,0x70,0x02] vrshr.s8 q8, q8, #8 -@ CHECK: vrshr.s16 q8, q8, #16 @ encoding: [0x70,0x02,0xd0,0xef] +@ CHECK: vrshr.s16 q8, q8, #16 @ encoding: [0xd0,0xef,0x70,0x02] vrshr.s16 q8, q8, #16 -@ CHECK: vrshr.s32 q8, q8, #32 @ encoding: [0x70,0x02,0xe0,0xef] +@ CHECK: vrshr.s32 q8, q8, #32 @ encoding: [0xe0,0xef,0x70,0x02] vrshr.s32 q8, q8, #32 -@ CHECK: vrshr.s64 q8, q8, #64 @ encoding: [0xf0,0x02,0xc0,0xef] +@ CHECK: vrshr.s64 q8, q8, #64 @ encoding: [0xc0,0xef,0xf0,0x02] vrshr.s64 q8, q8, #64 -@ CHECK: vrshr.u8 q8, q8, #8 @ encoding: [0x70,0x02,0xc8,0xff] +@ CHECK: vrshr.u8 q8, q8, #8 @ encoding: [0xc8,0xff,0x70,0x02] vrshr.u8 q8, q8, #8 -@ CHECK: vrshr.u16 q8, q8, #16 @ encoding: [0x70,0x02,0xd0,0xff] +@ CHECK: vrshr.u16 q8, q8, #16 @ encoding: [0xd0,0xff,0x70,0x02] vrshr.u16 q8, q8, #16 -@ CHECK: vrshr.u32 q8, q8, #32 @ encoding: [0x70,0x02,0xe0,0xff] +@ CHECK: vrshr.u32 q8, q8, #32 @ encoding: [0xe0,0xff,0x70,0x02] vrshr.u32 q8, q8, #32 -@ CHECK: vrshr.u64 q8, q8, #64 @ encoding: [0xf0,0x02,0xc0,0xff] +@ CHECK: vrshr.u64 q8, q8, #64 @ encoding: [0xc0,0xff,0xf0,0x02] vrshr.u64 q8, q8, #64 -@ CHECK: vrshrn.i16 d16, q8, #8 @ encoding: [0x70,0x08,0xc8,0xef] +@ CHECK: vrshrn.i16 d16, q8, #8 @ encoding: [0xc8,0xef,0x70,0x08] vrshrn.i16 d16, q8, #8 -@ CHECK: vrshrn.i32 d16, q8, #16 @ encoding: [0x70,0x08,0xd0,0xef] +@ CHECK: vrshrn.i32 d16, q8, #16 @ encoding: [0xd0,0xef,0x70,0x08] vrshrn.i32 d16, q8, #16 -@ CHECK: vrshrn.i64 d16, q8, #32 @ encoding: [0x70,0x08,0xe0,0xef] +@ CHECK: vrshrn.i64 d16, q8, #32 @ encoding: [0xe0,0xef,0x70,0x08] vrshrn.i64 d16, q8, #32 |