| // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
| // for details. All rights reserved. Use of this source code is governed by a |
| // BSD-style license that can be found in the LICENSE file. |
| |
| #include "vm/globals.h" |
| #if defined(TARGET_ARCH_ARM64) |
| |
| #include "vm/compiler/assembler/assembler.h" |
| #include "vm/cpu.h" |
| #include "vm/os.h" |
| #include "vm/unit_test.h" |
| #include "vm/virtual_memory.h" |
| |
| namespace dart { |
| namespace compiler { |
| #define __ assembler-> |
| |
| #if defined(PRODUCT) |
| #define EXPECT_DISASSEMBLY(expected) |
| #else |
| #define EXPECT_DISASSEMBLY(expected) \ |
| EXPECT_STREQ(expected, test->RelativeDisassembly()) |
| #endif |
| |
| ASSEMBLER_TEST_GENERATE(Simple, assembler) { |
| __ add(R0, ZR, Operand(ZR)); |
| __ add(R0, R0, Operand(42)); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(Simple, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "add r0, zr, zr\n" |
| "add r0, r0, #0x2a\n" |
| "ret\n"); |
| } |
| |
| // Move wide immediate tests. |
| // movz |
| ASSEMBLER_TEST_GENERATE(Movz0, assembler) { |
| __ movz(R0, Immediate(42), 0); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(Movz0, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r0, #0x2a\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(Movz1, assembler) { |
| __ movz(R0, Immediate(42), 0); // Overwritten by next instruction. |
| __ movz(R0, Immediate(42), 1); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(Movz1, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42LL << 16, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r0, #0x2a\n" |
| "movz r0, #0x2a lsl 16\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(Movz2, assembler) { |
| __ movz(R0, Immediate(42), 2); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(Movz2, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42LL << 32, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r0, #0x2a lsl 32\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(Movz3, assembler) { |
| __ movz(R0, Immediate(42), 3); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(Movz3, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42LL << 48, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r0, #0x2a lsl 48\n" |
| "ret\n"); |
| } |
| |
| // movn |
| ASSEMBLER_TEST_GENERATE(Movn0, assembler) { |
| __ movn(R0, Immediate(42), 0); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(Movn0, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(~42LL, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movn r0, #0x2a\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(Movn1, assembler) { |
| __ movn(R0, Immediate(42), 1); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(Movn1, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(~(42LL << 16), EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movn r0, #0x2a lsl 16\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(Movn2, assembler) { |
| __ movn(R0, Immediate(42), 2); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(Movn2, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(~(42LL << 32), EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movn r0, #0x2a lsl 32\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(Movn3, assembler) { |
| __ movn(R0, Immediate(42), 3); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(Movn3, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(~(42LL << 48), EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movn r0, #0x2a lsl 48\n" |
| "ret\n"); |
| } |
| |
| // movk |
| ASSEMBLER_TEST_GENERATE(Movk0, assembler) { |
| __ movz(R0, Immediate(1), 3); |
| __ movk(R0, Immediate(42), 0); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(Movk0, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42LL | (1LL << 48), |
| EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r0, #0x1 lsl 48\n" |
| "movk r0, #0x2a\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(Movk1, assembler) { |
| __ movz(R0, Immediate(1), 0); |
| __ movk(R0, Immediate(42), 1); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(Movk1, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ((42LL << 16) | 1, |
| EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r0, #0x1\n" |
| "movk r0, #0x2a lsl 16\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(Movk2, assembler) { |
| __ movz(R0, Immediate(1), 0); |
| __ movk(R0, Immediate(42), 2); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(Movk2, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ((42LL << 32) | 1, |
| EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r0, #0x1\n" |
| "movk r0, #0x2a lsl 32\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(Movk3, assembler) { |
| __ movz(R0, Immediate(1), 0); |
| __ movk(R0, Immediate(42), 3); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(Movk3, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ((42LL << 48) | 1, |
| EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r0, #0x1\n" |
| "movk r0, #0x2a lsl 48\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(MovzBig, assembler) { |
| __ movz(R0, Immediate(0x8000), 0); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(MovzBig, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(0x8000, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r0, #0x8000\n" |
| "ret\n"); |
| } |
| |
| // add tests. |
| ASSEMBLER_TEST_GENERATE(AddReg, assembler) { |
| __ movz(R0, Immediate(20), 0); |
| __ movz(R1, Immediate(22), 0); |
| __ add(R0, R0, Operand(R1)); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(AddReg, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r0, #0x14\n" |
| "movz r1, #0x16\n" |
| "add r0, r0, r1\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(AddLSLReg, assembler) { |
| __ movz(R0, Immediate(20), 0); |
| __ movz(R1, Immediate(11), 0); |
| __ add(R0, R0, Operand(R1, LSL, 1)); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(AddLSLReg, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r0, #0x14\n" |
| "movz r1, #0xb\n" |
| "add r0, r0, r1 lsl #1\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(AddLSRReg, assembler) { |
| __ movz(R0, Immediate(20), 0); |
| __ movz(R1, Immediate(44), 0); |
| __ add(R0, R0, Operand(R1, LSR, 1)); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(AddLSRReg, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r0, #0x14\n" |
| "movz r1, #0x2c\n" |
| "add r0, r0, r1 lsr #1\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(AddASRReg, assembler) { |
| __ movz(R0, Immediate(20), 0); |
| __ movz(R1, Immediate(44), 0); |
| __ add(R0, R0, Operand(R1, ASR, 1)); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(AddASRReg, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r0, #0x14\n" |
| "movz r1, #0x2c\n" |
| "add r0, r0, r1 asr #1\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(AddASRNegReg, assembler) { |
| __ movz(R0, Immediate(43), 0); |
| __ movn(R1, Immediate(0), 0); // R1 <- -1 |
| __ add(R1, ZR, Operand(R1, LSL, 3)); // R1 <- -8 |
| __ add(R0, R0, Operand(R1, ASR, 3)); // R0 <- 43 + (-8 >> 3) |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(AddASRNegReg, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r0, #0x2b\n" |
| "movn r1, #0x0\n" |
| "add r1, zr, r1 lsl #3\n" |
| "add r0, r0, r1 asr #3\n" |
| "ret\n"); |
| } |
| |
| // TODO(zra): test other sign extension modes. |
| ASSEMBLER_TEST_GENERATE(AddExtReg, assembler) { |
| __ movz(R0, Immediate(43), 0); |
| __ movz(R1, Immediate(0xffff), 0); |
| __ movk(R1, Immediate(0xffff), 1); // R1 <- -1 (32-bit) |
| __ add(R0, R0, Operand(R1, SXTW, 0)); // R0 <- R0 + (sign extended R1) |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(AddExtReg, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r0, #0x2b\n" |
| "movz r1, #0xffff\n" |
| "movk r1, #0xffff lsl 16\n" |
| "add r0, r0, r1 sxtw\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(AddCarryInOut, assembler) { |
| __ LoadImmediate(R2, -1); |
| __ LoadImmediate(R1, 1); |
| __ LoadImmediate(R0, 0); |
| __ adds(IP0, R2, Operand(R1)); // c_out = 1. |
| __ adcs(IP0, R2, R0); // c_in = 1, c_out = 1. |
| __ adc(R0, R0, R0); // c_in = 1. |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(AddCarryInOut, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(1, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movn r2, #0x0\n" |
| "movz r1, #0x1\n" |
| "movz r0, #0x0\n" |
| "adds tmp, r2, r1\n" |
| "adcs tmp, r2, r0\n" |
| "adc r0, r0, r0\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(SubCarryInOut, assembler) { |
| __ LoadImmediate(R1, 1); |
| __ LoadImmediate(R0, 0); |
| __ subs(IP0, R0, Operand(R1)); // c_out = 1. |
| __ sbcs(IP0, R0, R0); // c_in = 1, c_out = 1. |
| __ sbc(R0, R0, R0); // c_in = 1. |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(SubCarryInOut, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(-1, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r1, #0x1\n" |
| "movz r0, #0x0\n" |
| "subs tmp, r0, r1\n" |
| "sbcs tmp, r0, r0\n" |
| "sbc r0, r0, r0\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(Overflow, assembler) { |
| __ LoadImmediate(R0, 0); |
| __ LoadImmediate(R1, 1); |
| __ LoadImmediate(R2, 0xFFFFFFFFFFFFFFFF); |
| __ LoadImmediate(R3, 0x7FFFFFFFFFFFFFFF); |
| __ adds(IP0, R2, Operand(R1)); // c_out = 1. |
| __ adcs(IP0, R3, R0); // c_in = 1, c_out = 1, v = 1. |
| __ csinc(R0, R0, R0, VS); // R0 = v ? R0 : R0 + 1. |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(Overflow, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(0, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r0, #0x0\n" |
| "movz r1, #0x1\n" |
| "movn r2, #0x0\n" |
| "mov r3, 0x7fffffffffffffff\n" |
| "adds tmp, r2, r1\n" |
| "adcs tmp, r3, r0\n" |
| "csinc r0, r0, r0, vs\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(WordAddCarryInOut, assembler) { |
| __ LoadImmediate(R2, -1); |
| __ LoadImmediate(R1, 1); |
| __ LoadImmediate(R0, 0); |
| __ addsw(IP0, R2, Operand(R1)); // c_out = 1. |
| __ adcsw(IP0, R2, R0); // c_in = 1, c_out = 1. |
| __ adcw(R0, R0, R0); // c_in = 1. |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(WordAddCarryInOut, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(1, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movn r2, #0x0\n" |
| "movz r1, #0x1\n" |
| "movz r0, #0x0\n" |
| "addws tmp, r2, r1\n" |
| "adcws tmp, r2, r0\n" |
| "adcw r0, r0, r0\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(WordSubCarryInOut, assembler) { |
| __ LoadImmediate(R1, 1); |
| __ LoadImmediate(R0, 0); |
| __ subsw(IP0, R0, Operand(R1)); // c_out = 1. |
| __ sbcsw(IP0, R0, R0); // c_in = 1, c_out = 1. |
| __ sbcw(R0, R0, R0); // c_in = 1. |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(WordSubCarryInOut, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(0x0FFFFFFFF, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r1, #0x1\n" |
| "movz r0, #0x0\n" |
| "subws tmp, r0, r1\n" |
| "sbcws tmp, r0, r0\n" |
| "sbcw r0, r0, r0\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(WordOverflow, assembler) { |
| __ LoadImmediate(R0, 0); |
| __ LoadImmediate(R1, 1); |
| __ LoadImmediate(R2, 0xFFFFFFFF); |
| __ LoadImmediate(R3, 0x7FFFFFFF); |
| __ addsw(IP0, R2, Operand(R1)); // c_out = 1. |
| __ adcsw(IP0, R3, R0); // c_in = 1, c_out = 1, v = 1. |
| __ csinc(R0, R0, R0, VS); // R0 = v ? R0 : R0 + 1. |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(WordOverflow, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(0, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r0, #0x0\n" |
| "movz r1, #0x1\n" |
| "mov r2, 0xffffffff\n" |
| "mov r3, 0x7fffffff\n" |
| "addws tmp, r2, r1\n" |
| "adcws tmp, r3, r0\n" |
| "csinc r0, r0, r0, vs\n" |
| "ret\n"); |
| } |
| |
| // Loads and Stores. |
| ASSEMBLER_TEST_GENERATE(SimpleLoadStore, assembler) { |
| __ SetupDartSP(); |
| |
| __ sub(CSP, CSP, |
| Operand(2 * target::kWordSize)); // Must not access beyond CSP. |
| |
| __ movz(R0, Immediate(43), 0); |
| __ movz(R1, Immediate(42), 0); |
| __ str(R1, Address(SP, -1 * target::kWordSize, Address::PreIndex)); |
| __ ldr(R0, Address(SP, 1 * target::kWordSize, Address::PostIndex)); |
| __ RestoreCSP(); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(SimpleLoadStore, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "mov sp, csp\n" |
| "sub csp, csp, #0x1000\n" |
| "sub csp, csp, #0x10\n" |
| "movz r0, #0x2b\n" |
| "movz r1, #0x2a\n" |
| "str r1, [sp, #-8]!\n" |
| "ldr r0, [sp], #8 !\n" |
| "mov csp, sp\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(SimpleLoadStoreHeapTag, assembler) { |
| __ SetupDartSP(); |
| __ movz(R0, Immediate(43), 0); |
| __ movz(R1, Immediate(42), 0); |
| __ add(R2, SP, Operand(1)); |
| __ str(R1, Address(R2, -1)); |
| __ ldr(R0, Address(R2, -1)); |
| __ RestoreCSP(); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(SimpleLoadStoreHeapTag, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "mov sp, csp\n" |
| "sub csp, csp, #0x1000\n" |
| "movz r0, #0x2b\n" |
| "movz r1, #0x2a\n" |
| "add r2, sp, #0x1\n" |
| "str r1, [r2, #-1]\n" |
| "ldr r0, [r2, #-1]\n" |
| "mov csp, sp\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(LoadStoreLargeIndex, assembler) { |
| __ SetupDartSP(); |
| |
| __ sub(CSP, CSP, |
| Operand(32 * target::kWordSize)); // Must not access beyond CSP. |
| |
| __ movz(R0, Immediate(43), 0); |
| __ movz(R1, Immediate(42), 0); |
| // Largest negative offset that can fit in the signed 9-bit immediate field. |
| __ str(R1, Address(SP, -32 * target::kWordSize, Address::PreIndex)); |
| // Largest positive kWordSize aligned offset that we can fit. |
| __ ldr(R0, Address(SP, 31 * target::kWordSize, Address::PostIndex)); |
| // Correction. |
| __ add(SP, SP, Operand(target::kWordSize)); // Restore SP. |
| __ RestoreCSP(); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(LoadStoreLargeIndex, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "mov sp, csp\n" |
| "sub csp, csp, #0x1000\n" |
| "sub csp, csp, #0x100\n" |
| "movz r0, #0x2b\n" |
| "movz r1, #0x2a\n" |
| "str r1, [sp, #-256]!\n" |
| "ldr r0, [sp], #248 !\n" |
| "add sp, sp, #0x8\n" |
| "mov csp, sp\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(LoadStoreLargeOffset, assembler) { |
| __ SetupDartSP(); |
| __ movz(R0, Immediate(43), 0); |
| __ movz(R1, Immediate(42), 0); |
| __ sub(SP, SP, Operand(512 * target::kWordSize)); |
| __ andi(CSP, SP, Immediate(~15)); // Must not access beyond CSP. |
| __ str(R1, Address(SP, 512 * target::kWordSize, Address::Offset)); |
| __ add(SP, SP, Operand(512 * target::kWordSize)); |
| __ ldr(R0, Address(SP)); |
| __ RestoreCSP(); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(LoadStoreLargeOffset, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "mov sp, csp\n" |
| "sub csp, csp, #0x1000\n" |
| "movz r0, #0x2b\n" |
| "movz r1, #0x2a\n" |
| "sub sp, sp, #0x1000\n" |
| "and csp, sp, 0xfffffffffffffff0\n" |
| "str r1, [sp, #4096]\n" |
| "add sp, sp, #0x1000\n" |
| "ldr r0, [sp]\n" |
| "mov csp, sp\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(LoadStoreExtReg, assembler) { |
| __ SetupDartSP(); |
| __ movz(R0, Immediate(43), 0); |
| __ movz(R1, Immediate(42), 0); |
| __ movz(R2, Immediate(0xfff8), 0); |
| __ movk(R2, Immediate(0xffff), 1); // R2 <- -8 (int32_t). |
| // This should sign extend R2, and add to SP to get address, |
| // i.e. SP - kWordSize. |
| __ str(R1, Address(SP, R2, SXTW)); |
| __ sub(SP, SP, Operand(target::kWordSize)); |
| __ andi(CSP, SP, Immediate(~15)); // Must not access beyond CSP. |
| __ ldr(R0, Address(SP)); |
| __ add(SP, SP, Operand(target::kWordSize)); |
| __ RestoreCSP(); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(LoadStoreExtReg, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "mov sp, csp\n" |
| "sub csp, csp, #0x1000\n" |
| "movz r0, #0x2b\n" |
| "movz r1, #0x2a\n" |
| "movz r2, #0xfff8\n" |
| "movk r2, #0xffff lsl 16\n" |
| "str r1, [sp, r2 sxtw]\n" |
| "sub sp, sp, #0x8\n" |
| "and csp, sp, 0xfffffffffffffff0\n" |
| "ldr r0, [sp]\n" |
| "add sp, sp, #0x8\n" |
| "mov csp, sp\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(LoadStoreScaledReg, assembler) { |
| __ SetupDartSP(); |
| __ movz(R0, Immediate(43), 0); |
| __ movz(R1, Immediate(42), 0); |
| __ movz(R2, Immediate(10), 0); |
| __ sub(SP, SP, Operand(10 * target::kWordSize)); |
| __ andi(CSP, SP, Immediate(~15)); // Must not access beyond CSP. |
| // Store R1 into SP + R2 * kWordSize. |
| __ str(R1, Address(SP, R2, UXTX, Address::Scaled)); |
| __ ldr(R0, Address(SP, R2, UXTX, Address::Scaled)); |
| __ add(SP, SP, Operand(10 * target::kWordSize)); |
| __ RestoreCSP(); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(LoadStoreScaledReg, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "mov sp, csp\n" |
| "sub csp, csp, #0x1000\n" |
| "movz r0, #0x2b\n" |
| "movz r1, #0x2a\n" |
| "movz r2, #0xa\n" |
| "sub sp, sp, #0x50\n" |
| "and csp, sp, 0xfffffffffffffff0\n" |
| "str r1, [sp, r2 uxtx scaled]\n" |
| "ldr r0, [sp, r2 uxtx scaled]\n" |
| "add sp, sp, #0x50\n" |
| "mov csp, sp\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(LoadSigned32Bit, assembler) { |
| __ SetupDartSP(); |
| |
| __ sub(CSP, CSP, |
| Operand(2 * target::kWordSize)); // Must not access beyond CSP. |
| |
| __ LoadImmediate(R1, 0xffffffff); |
| __ str(R1, Address(SP, -4, Address::PreIndex, kFourBytes), kFourBytes); |
| __ ldr(R0, Address(SP), kFourBytes); |
| __ ldr(R1, Address(SP, 4, Address::PostIndex, kFourBytes), kFourBytes); |
| __ RestoreCSP(); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(LoadSigned32Bit, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(-1, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "mov sp, csp\n" |
| "sub csp, csp, #0x1000\n" |
| "sub csp, csp, #0x10\n" |
| "mov r1, 0xffffffff\n" |
| "strw r1, [sp, #-4]!\n" |
| "ldrsw r0, [sp]\n" |
| "ldrsw r1, [sp], #4 !\n" |
| "mov csp, sp\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(SimpleLoadStorePair, assembler) { |
| __ SetupDartSP(); |
| |
| __ sub(CSP, CSP, |
| Operand(2 * target::kWordSize)); // Must not access beyond CSP. |
| |
| __ LoadImmediate(R2, 43); |
| __ LoadImmediate(R3, 42); |
| __ stp(R2, R3, Address(SP, -2 * target::kWordSize, Address::PairPreIndex)); |
| __ ldp(R0, R1, Address(SP, 2 * target::kWordSize, Address::PairPostIndex)); |
| __ sub(R0, R0, Operand(R1)); |
| __ RestoreCSP(); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(SimpleLoadStorePair, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(1, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "mov sp, csp\n" |
| "sub csp, csp, #0x1000\n" |
| "sub csp, csp, #0x10\n" |
| "movz r2, #0x2b\n" |
| "movz r3, #0x2a\n" |
| "stp r2, r3, [sp, #-16]!\n" |
| "ldp r0, r1, [sp], #16 !\n" |
| "sub r0, r0, r1\n" |
| "mov csp, sp\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(LoadStorePairOffset, assembler) { |
| __ SetupDartSP(); |
| __ LoadImmediate(R2, 43); |
| __ LoadImmediate(R3, 42); |
| __ sub(SP, SP, Operand(4 * target::kWordSize)); |
| __ andi(CSP, SP, Immediate(~15)); // Must not access beyond CSP. |
| __ stp(R2, R3, Address::Pair(SP, 2 * target::kWordSize)); |
| __ ldp(R0, R1, Address::Pair(SP, 2 * target::kWordSize)); |
| __ add(SP, SP, Operand(4 * target::kWordSize)); |
| __ sub(R0, R0, Operand(R1)); |
| __ RestoreCSP(); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(LoadStorePairOffset, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(1, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "mov sp, csp\n" |
| "sub csp, csp, #0x1000\n" |
| "movz r2, #0x2b\n" |
| "movz r3, #0x2a\n" |
| "sub sp, sp, #0x20\n" |
| "and csp, sp, 0xfffffffffffffff0\n" |
| "stp r2, r3, [sp, #16]\n" |
| "ldp r0, r1, [sp, #16]\n" |
| "add sp, sp, #0x20\n" |
| "sub r0, r0, r1\n" |
| "mov csp, sp\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(PushRegisterPair, assembler) { |
| __ SetupDartSP(); |
| __ LoadImmediate(R2, 12); |
| __ LoadImmediate(R3, 21); |
| __ PushRegisterPair(R2, R3); |
| __ Pop(R0); |
| __ Pop(R1); |
| __ RestoreCSP(); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(PushRegisterPair, test) { |
| EXPECT(test != NULL); |
| typedef int (*PushRegisterPair)() DART_UNUSED; |
| EXPECT_EQ(12, EXECUTE_TEST_CODE_INT64(PushRegisterPair, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "mov sp, csp\n" |
| "sub csp, csp, #0x1000\n" |
| "movz r2, #0xc\n" |
| "movz r3, #0x15\n" |
| "stp r2, r3, [sp, #-16]!\n" |
| "ldr r0, [sp], #8 !\n" |
| "ldr r1, [sp], #8 !\n" |
| "mov csp, sp\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(PushRegisterPairReversed, assembler) { |
| __ SetupDartSP(); |
| __ LoadImmediate(R3, 12); |
| __ LoadImmediate(R2, 21); |
| __ PushRegisterPair(R3, R2); |
| __ Pop(R0); |
| __ Pop(R1); |
| __ RestoreCSP(); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(PushRegisterPairReversed, test) { |
| EXPECT(test != NULL); |
| typedef int (*PushRegisterPairReversed)() DART_UNUSED; |
| EXPECT_EQ(12, |
| EXECUTE_TEST_CODE_INT64(PushRegisterPairReversed, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "mov sp, csp\n" |
| "sub csp, csp, #0x1000\n" |
| "movz r3, #0xc\n" |
| "movz r2, #0x15\n" |
| "stp r3, r2, [sp, #-16]!\n" |
| "ldr r0, [sp], #8 !\n" |
| "ldr r1, [sp], #8 !\n" |
| "mov csp, sp\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(PopRegisterPair, assembler) { |
| __ SetupDartSP(); |
| __ LoadImmediate(R2, 12); |
| __ LoadImmediate(R3, 21); |
| __ Push(R3); |
| __ Push(R2); |
| __ PopRegisterPair(R0, R1); |
| __ RestoreCSP(); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(PopRegisterPair, test) { |
| EXPECT(test != NULL); |
| typedef int (*PopRegisterPair)() DART_UNUSED; |
| EXPECT_EQ(12, EXECUTE_TEST_CODE_INT64(PopRegisterPair, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "mov sp, csp\n" |
| "sub csp, csp, #0x1000\n" |
| "movz r2, #0xc\n" |
| "movz r3, #0x15\n" |
| "str r3, [sp, #-8]!\n" |
| "str r2, [sp, #-8]!\n" |
| "ldp r0, r1, [sp], #16 !\n" |
| "mov csp, sp\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(PopRegisterPairReversed, assembler) { |
| __ SetupDartSP(); |
| __ LoadImmediate(R3, 12); |
| __ LoadImmediate(R2, 21); |
| __ Push(R3); |
| __ Push(R2); |
| __ PopRegisterPair(R1, R0); |
| __ RestoreCSP(); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(PopRegisterPairReversed, test) { |
| EXPECT(test != NULL); |
| typedef int (*PopRegisterPairReversed)() DART_UNUSED; |
| EXPECT_EQ(12, |
| EXECUTE_TEST_CODE_INT64(PopRegisterPairReversed, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "mov sp, csp\n" |
| "sub csp, csp, #0x1000\n" |
| "movz r3, #0xc\n" |
| "movz r2, #0x15\n" |
| "str r3, [sp, #-8]!\n" |
| "str r2, [sp, #-8]!\n" |
| "ldp r1, r0, [sp], #16 !\n" |
| "mov csp, sp\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(Semaphore, assembler) { |
| __ SetupDartSP(); |
| __ movz(R0, Immediate(40), 0); |
| __ movz(R1, Immediate(42), 0); |
| __ Push(R0); |
| Label retry; |
| __ Bind(&retry); |
| __ ldxr(R0, SP); |
| __ stxr(TMP, R1, SP); // IP == 0, success |
| __ cmp(TMP, Operand(0)); |
| __ b(&retry, NE); // NE if context switch occurred between ldrex and strex. |
| __ Pop(R0); // 42 |
| __ RestoreCSP(); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(Semaphore, test) { |
| EXPECT(test != NULL); |
| typedef intptr_t (*Semaphore)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Semaphore, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "mov sp, csp\n" |
| "sub csp, csp, #0x1000\n" |
| "movz r0, #0x28\n" |
| "movz r1, #0x2a\n" |
| "str r0, [sp, #-8]!\n" |
| "ldxr r0, sp\n" |
| "stxr tmp, r1, sp\n" |
| "cmp tmp, #0x0\n" |
| "bne -12\n" |
| "ldr r0, [sp], #8 !\n" |
| "mov csp, sp\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(FailedSemaphore, assembler) { |
| __ SetupDartSP(); |
| __ movz(R0, Immediate(40), 0); |
| __ movz(R1, Immediate(42), 0); |
| __ Push(R0); |
| __ ldxr(R0, SP); |
| __ clrex(); // Simulate a context switch. |
| __ stxr(TMP, R1, SP); // IP == 1, failure |
| __ Pop(R0); // 40 |
| __ add(R0, R0, Operand(TMP)); |
| __ RestoreCSP(); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(FailedSemaphore, test) { |
| EXPECT(test != NULL); |
| typedef intptr_t (*FailedSemaphore)() DART_UNUSED; |
| EXPECT_EQ(41, EXECUTE_TEST_CODE_INT64(FailedSemaphore, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "mov sp, csp\n" |
| "sub csp, csp, #0x1000\n" |
| "movz r0, #0x28\n" |
| "movz r1, #0x2a\n" |
| "str r0, [sp, #-8]!\n" |
| "ldxr r0, sp\n" |
| "clrex\n" |
| "stxr tmp, r1, sp\n" |
| "ldr r0, [sp], #8 !\n" |
| "add r0, r0, tmp\n" |
| "mov csp, sp\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(Semaphore32, assembler) { |
| __ SetupDartSP(); |
| __ movz(R0, Immediate(40), 0); |
| __ add(R0, R0, Operand(R0, LSL, 32)); |
| __ Push(R0); |
| |
| __ movz(R0, Immediate(40), 0); |
| __ movz(R1, Immediate(42), 0); |
| |
| Label retry; |
| __ Bind(&retry); |
| __ ldxr(R0, SP, kFourBytes); |
| // 32 bit operation should ignore the high word of R0 that was pushed on the |
| // stack. |
| __ stxr(TMP, R1, SP, kFourBytes); // IP == 0, success |
| __ cmp(TMP, Operand(0)); |
| __ b(&retry, NE); // NE if context switch occurred between ldrex and strex. |
| __ Pop(R0); // 42 + 42 * 2**32 |
| __ RestoreCSP(); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(Semaphore32, test) { |
| EXPECT(test != NULL); |
| typedef intptr_t (*Semaphore32)() DART_UNUSED; |
| // Lower word has been atomically switched from 40 to 42k, whereas upper word |
| // is unchanged at 40. |
| EXPECT_EQ(42 + (DART_INT64_C(40) << 32), |
| EXECUTE_TEST_CODE_INT64(Semaphore32, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "mov sp, csp\n" |
| "sub csp, csp, #0x1000\n" |
| "movz r0, #0x28\n" |
| "add r0, r0, r0 lsl #32\n" |
| "str r0, [sp, #-8]!\n" |
| "movz r0, #0x28\n" |
| "movz r1, #0x2a\n" |
| "ldxrw r0, sp\n" |
| "stxrw tmp, r1, sp\n" |
| "cmp tmp, #0x0\n" |
| "bne -12\n" |
| "ldr r0, [sp], #8 !\n" |
| "mov csp, sp\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(FailedSemaphore32, assembler) { |
| __ SetupDartSP(); |
| __ movz(R0, Immediate(40), 0); |
| __ add(R0, R0, Operand(R0, LSL, 32)); |
| __ Push(R0); |
| |
| __ movz(R0, Immediate(40), 0); |
| __ movz(R1, Immediate(42), 0); |
| |
| __ ldxr(R0, SP, kFourBytes); |
| __ clrex(); // Simulate a context switch. |
| __ stxr(TMP, R1, SP, kFourBytes); // IP == 1, failure |
| __ Pop(R0); // 40 |
| __ add(R0, R0, Operand(TMP)); |
| __ RestoreCSP(); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(FailedSemaphore32, test) { |
| EXPECT(test != NULL); |
| typedef intptr_t (*FailedSemaphore32)() DART_UNUSED; |
| // Lower word has had the failure code (1) added to it. Upper word is |
| // unchanged at 40. |
| EXPECT_EQ(41 + (DART_INT64_C(40) << 32), |
| EXECUTE_TEST_CODE_INT64(FailedSemaphore32, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "mov sp, csp\n" |
| "sub csp, csp, #0x1000\n" |
| "movz r0, #0x28\n" |
| "add r0, r0, r0 lsl #32\n" |
| "str r0, [sp, #-8]!\n" |
| "movz r0, #0x28\n" |
| "movz r1, #0x2a\n" |
| "ldxrw r0, sp\n" |
| "clrex\n" |
| "stxrw tmp, r1, sp\n" |
| "ldr r0, [sp], #8 !\n" |
| "add r0, r0, tmp\n" |
| "mov csp, sp\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(LoadAcquireStoreRelease, assembler) { |
| // We cannot really test that ldar/stlr have the barrier behavior, but at |
| // least we can test that the load/store behavior is correct. |
| Label failed, done; |
| |
| __ SetupDartSP(); |
| __ EnterFrame(0); |
| |
| // Test 64-bit ladr. |
| __ PushImmediate(0x1122334455667788); |
| __ ldar(R1, SP, kEightBytes); |
| __ CompareImmediate(R1, 0x1122334455667788); |
| __ BranchIf(NOT_EQUAL, &failed); |
| __ Drop(1); |
| |
| // Test 32-bit ladr - must zero extend. |
| __ PushImmediate(0x1122334455667788); |
| __ ldar(R1, SP, kFourBytes); |
| __ CompareImmediate(R1, 0x55667788); |
| __ BranchIf(NOT_EQUAL, &failed); |
| __ Drop(1); |
| |
| // Test 64-bit stlr. |
| __ PushImmediate(0); |
| __ LoadImmediate(R1, 0x1122334455667788); |
| __ stlr(R1, SP, kEightBytes); |
| __ Pop(R1); |
| __ CompareImmediate(R1, 0x1122334455667788); |
| __ BranchIf(NOT_EQUAL, &failed); |
| |
| // Test 32-bit stlr. |
| __ PushImmediate(0); |
| __ LoadImmediate(R1, 0x1122334455667788); |
| __ stlr(R1, SP, kFourBytes); |
| __ Pop(R1); |
| __ CompareImmediate(R1, 0x55667788); |
| __ BranchIf(NOT_EQUAL, &failed); |
| |
| __ LoadImmediate(R0, 0x42); |
| __ b(&done); |
| |
| __ Bind(&failed); |
| __ LoadImmediate(R0, 0x84); |
| |
| __ Bind(&done); |
| __ LeaveFrame(); |
| __ RestoreCSP(); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(LoadAcquireStoreRelease, test) { |
| typedef intptr_t (*LoadAcquireStoreRelease)() DART_UNUSED; |
| EXPECT_EQ(0x42, |
| EXECUTE_TEST_CODE_INT64(LoadAcquireStoreRelease, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "mov sp, csp\n" |
| "sub csp, csp, #0x1000\n" |
| "stp fp, lr, [sp, #-16]!\n" |
| "mov fp, sp\n" |
| "movz tmp, #0x7788\n" |
| "movk tmp, #0x5566 lsl 16\n" |
| "movk tmp, #0x3344 lsl 32\n" |
| "movk tmp, #0x1122 lsl 48\n" |
| "str tmp, [sp, #-8]!\n" |
| "ldar r1, sp\n" |
| "movz tmp2, #0x7788\n" |
| "movk tmp2, #0x5566 lsl 16\n" |
| "movk tmp2, #0x3344 lsl 32\n" |
| "movk tmp2, #0x1122 lsl 48\n" |
| "cmp r1, tmp2\n" |
| "bne +164\n" |
| "add sp, sp, #0x8\n" |
| "movz tmp, #0x7788\n" |
| "movk tmp, #0x5566 lsl 16\n" |
| "movk tmp, #0x3344 lsl 32\n" |
| "movk tmp, #0x1122 lsl 48\n" |
| "str tmp, [sp, #-8]!\n" |
| "ldarw r1, sp\n" |
| "movz tmp2, #0x7788\n" |
| "movk tmp2, #0x5566 lsl 16\n" |
| "cmp r1, tmp2\n" |
| "bne +120\n" |
| "add sp, sp, #0x8\n" |
| "movz tmp, #0x0\n" |
| "str tmp, [sp, #-8]!\n" |
| "movz r1, #0x7788\n" |
| "movk r1, #0x5566 lsl 16\n" |
| "movk r1, #0x3344 lsl 32\n" |
| "movk r1, #0x1122 lsl 48\n" |
| "stlr r1, sp\n" |
| "ldr r1, [sp], #8 !\n" |
| "movz tmp2, #0x7788\n" |
| "movk tmp2, #0x5566 lsl 16\n" |
| "movk tmp2, #0x3344 lsl 32\n" |
| "movk tmp2, #0x1122 lsl 48\n" |
| "cmp r1, tmp2\n" |
| "bne +60\n" |
| "movz tmp, #0x0\n" |
| "str tmp, [sp, #-8]!\n" |
| "movz r1, #0x7788\n" |
| "movk r1, #0x5566 lsl 16\n" |
| "movk r1, #0x3344 lsl 32\n" |
| "movk r1, #0x1122 lsl 48\n" |
| "stlrw r1, sp\n" |
| "ldr r1, [sp], #8 !\n" |
| "movz tmp2, #0x7788\n" |
| "movk tmp2, #0x5566 lsl 16\n" |
| "cmp r1, tmp2\n" |
| "bne +12\n" |
| "movz r0, #0x42\n" |
| "b +8\n" |
| "movz r0, #0x84\n" |
| "mov sp, fp\n" |
| "ldp fp, lr, [sp], #16 !\n" |
| "mov csp, sp\n" |
| "ret\n"); |
| } |
| |
| // Logical register operations. |
| ASSEMBLER_TEST_GENERATE(AndRegs, assembler) { |
| __ movz(R1, Immediate(43), 0); |
| __ movz(R2, Immediate(42), 0); |
| __ and_(R0, R1, Operand(R2)); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(AndRegs, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r1, #0x2b\n" |
| "movz r2, #0x2a\n" |
| "and r0, r1, r2\n" |
| "ret\n"); |
| } |
| |
| constexpr uint64_t kU64MinusOne = 0xffffffffffffffffull; |
| constexpr uint64_t kU64MinInt32 = 0xffffffff80000000ull; |
| constexpr uint64_t kU64MaxInt32 = 0x000000007fffffffull; |
| constexpr uint64_t kU64MinInt64 = 0x8000000000000000ull; |
| constexpr uint64_t kU64MaxInt64 = 0x7fffffffffffffffull; |
| |
| #define FOR_EACH_ASR_64_TEST_CONFIG(M) \ |
| M(0ull, 0, 0ull) \ |
| M(1ull, 0, 1ull) \ |
| M(kU64MaxInt32, 0, kU64MaxInt32) \ |
| M(kU64MaxInt64, 0, kU64MaxInt64) \ |
| M(kU64MinInt32, 0, kU64MinInt32) \ |
| M(kU64MinInt64, 0, kU64MinInt64) \ |
| M(0ull, 1, 0ull) \ |
| M(1ull, 1, 0ull) \ |
| M(4ull, 1, 2ull) \ |
| M(0xffffull, 1, 0x7fffull) \ |
| M(0xffffffffull, 1, 0x7fffffffull) \ |
| M(kU64MaxInt32, 1, 0x3fffffffull) \ |
| M(kU64MaxInt64, 1, 0x3fffffffffffffffull) \ |
| M(kU64MinInt32, 1, 0xffffffffc0000000ull) \ |
| M(kU64MinInt64, 1, 0xc000000000000000ull) \ |
| M(kU64MinusOne, 1, kU64MinusOne) \ |
| M(1ull, 2, 0ull) \ |
| M(4ull, 2, 1ull) \ |
| M(0xffffull, 2, 0x3fffull) \ |
| M(0xffffffffull, 2, 0x3fffffffull) \ |
| M(kU64MaxInt32, 2, 0x1fffffffull) \ |
| M(kU64MaxInt64, 2, 0x1fffffffffffffffull) \ |
| M(kU64MinInt32, 2, 0xffffffffe0000000ull) \ |
| M(kU64MinInt64, 2, 0xe000000000000000ull) \ |
| M(kU64MinusOne, 2, kU64MinusOne) \ |
| M(0ull, 31, 0ull) \ |
| M(1ull, 31, 0ull) \ |
| M(4ull, 31, 0ull) \ |
| M(0xffffull, 31, 0ull) \ |
| M(0xffffffffull, 31, 1ull) \ |
| M(kU64MaxInt32, 31, 0ull) \ |
| M(kU64MaxInt64, 31, 0xffffffffull) \ |
| M(kU64MinInt32, 31, kU64MinusOne) \ |
| M(kU64MinInt64, 31, 0xffffffff00000000ull) \ |
| M(kU64MinusOne, 31, kU64MinusOne) \ |
| M(0ull, 32, 0ull) \ |
| M(1ull, 32, 0ull) \ |
| M(4ull, 32, 0ull) \ |
| M(0xffffull, 32, 0ull) \ |
| M(0xffffffffull, 32, 0ull) \ |
| M(kU64MaxInt64, 32, 0x7fffffffull) \ |
| M(kU64MinInt32, 32, kU64MinusOne) \ |
| M(kU64MinInt64, 32, 0xffffffff80000000ull) \ |
| M(kU64MinusOne, 32, kU64MinusOne) \ |
| M(0ull, 62, 0ull) \ |
| M(1ull, 62, 0ull) \ |
| M(4ull, 62, 0ull) \ |
| M(0xffffull, 62, 0ull) \ |
| M(0xffffffffull, 62, 0ull) \ |
| M(kU64MaxInt64, 62, 1ull) \ |
| M(kU64MinInt32, 62, kU64MinusOne) \ |
| M(kU64MinInt64, 62, 0xfffffffffffffffeull) \ |
| M(kU64MinusOne, 62, kU64MinusOne) \ |
| M(0ull, 63, 0ull) \ |
| M(1ull, 63, 0ull) \ |
| M(4ull, 63, 0ull) \ |
| M(0xffffull, 63, 0ull) \ |
| M(0xffffffffull, 63, 0ull) \ |
| M(kU64MaxInt64, 63, 0ull) \ |
| M(kU64MinInt32, 63, kU64MinusOne) \ |
| M(kU64MinInt64, 63, kU64MinusOne) \ |
| M(kU64MinusOne, 63, kU64MinusOne) |
| |
| #define FOR_EACH_LSR_64_TEST_CONFIG(M) \ |
| M(0ull, 0, 0ull) \ |
| M(1ull, 0, 1ull) \ |
| M(kU64MaxInt32, 0, kU64MaxInt32) \ |
| M(kU64MaxInt64, 0, kU64MaxInt64) \ |
| M(kU64MinInt32, 0, kU64MinInt32) \ |
| M(kU64MinInt64, 0, kU64MinInt64) \ |
| M(0ull, 1, 0ull) \ |
| M(1ull, 1, 0ull) \ |
| M(4ull, 1, 2ull) \ |
| M(0xffffull, 1, 0x7fffull) \ |
| M(0xffffffffull, 1, 0x7fffffffull) \ |
| M(kU64MaxInt32, 1, 0x3fffffffull) \ |
| M(kU64MaxInt64, 1, 0x3fffffffffffffffull) \ |
| M(kU64MinInt32, 1, 0x7fffffffc0000000ull) \ |
| M(kU64MinInt64, 1, 0x4000000000000000ull) \ |
| M(kU64MinusOne, 1, 0x7fffffffffffffffull) \ |
| M(1ull, 2, 0ull) \ |
| M(4ull, 2, 1ull) \ |
| M(0xffffull, 2, 0x3fffull) \ |
| M(0xffffffffull, 2, 0x3fffffffull) \ |
| M(kU64MaxInt32, 2, 0x1fffffffull) \ |
| M(kU64MaxInt64, 2, 0x1fffffffffffffffull) \ |
| M(kU64MinInt32, 2, 0x3fffffffe0000000ull) \ |
| M(kU64MinInt64, 2, 0x2000000000000000ull) \ |
| M(kU64MinusOne, 2, 0x3fffffffffffffffull) \ |
| M(0ull, 31, 0ull) \ |
| M(1ull, 31, 0ull) \ |
| M(4ull, 31, 0ull) \ |
| M(0xffffull, 31, 0ull) \ |
| M(0xffffffffull, 31, 1ull) \ |
| M(kU64MaxInt32, 31, 0ull) \ |
| M(kU64MaxInt64, 31, 0xffffffffull) \ |
| M(kU64MinInt32, 31, 0x1ffffffffull) \ |
| M(kU64MinInt64, 31, 0x100000000ull) \ |
| M(kU64MinusOne, 31, 0x1ffffffffull) \ |
| M(0ull, 32, 0ull) \ |
| M(1ull, 32, 0ull) \ |
| M(4ull, 32, 0ull) \ |
| M(0xffffull, 32, 0ull) \ |
| M(0xffffffffull, 32, 0ull) \ |
| M(kU64MaxInt64, 32, 0x7fffffffull) \ |
| M(kU64MinInt32, 32, 0xffffffffull) \ |
| M(kU64MinInt64, 32, 0x80000000ull) \ |
| M(kU64MinusOne, 32, 0xffffffffull) \ |
| M(0ull, 62, 0ull) \ |
| M(1ull, 62, 0ull) \ |
| M(4ull, 62, 0ull) \ |
| M(0xffffull, 62, 0ull) \ |
| M(0xffffffffull, 62, 0ull) \ |
| M(kU64MaxInt64, 62, 1ull) \ |
| M(kU64MinInt32, 62, 3ull) \ |
| M(kU64MinInt64, 62, 2ull) \ |
| M(kU64MinusOne, 62, 3ull) \ |
| M(0ull, 63, 0ull) \ |
| M(1ull, 63, 0ull) \ |
| M(4ull, 63, 0ull) \ |
| M(0xffffull, 63, 0ull) \ |
| M(0xffffffffull, 63, 0ull) \ |
| M(kU64MaxInt64, 63, 0ull) \ |
| M(kU64MinInt32, 63, 1ull) \ |
| M(kU64MinInt64, 63, 1ull) \ |
| M(kU64MinusOne, 63, 1ull) |
| |
| #define FOR_EACH_LSL_64_TEST_CONFIG(M) \ |
| M(0ull, 0, 0ull) \ |
| M(1ull, 0, 1ull) \ |
| M(kU64MaxInt32, 0, kU64MaxInt32) \ |
| M(kU64MaxInt64, 0, kU64MaxInt64) \ |
| M(kU64MinInt32, 0, kU64MinInt32) \ |
| M(kU64MinInt64, 0, kU64MinInt64) \ |
| M(0ull, 1, 0ull) \ |
| M(1ull, 1, 2ull) \ |
| M(4ull, 1, 8ull) \ |
| M(0xffffull, 1, 0x1fffeull) \ |
| M(0xffffffffull, 1, 0x1fffffffeull) \ |
| M(kU64MaxInt32, 1, 0xfffffffeull) \ |
| M(kU64MaxInt64, 1, 0xfffffffffffffffeull) \ |
| M(kU64MinInt32, 1, 0xffffffff00000000ull) \ |
| M(kU64MinInt64, 1, 0ull) \ |
| M(kU64MinusOne, 1, 0xfffffffffffffffeull) \ |
| M(1ull, 2, 4ull) \ |
| M(4ull, 2, 16ull) \ |
| M(0xffffull, 2, 0x3fffcull) \ |
| M(0xffffffffull, 2, 0x3fffffffcull) \ |
| M(kU64MaxInt32, 2, 0x1fffffffcull) \ |
| M(kU64MaxInt64, 2, 0xfffffffffffffffcull) \ |
| M(kU64MinInt32, 2, 0xfffffffe00000000ull) \ |
| M(kU64MinInt64, 2, 0ull) \ |
| M(kU64MinusOne, 2, 0xfffffffffffffffcull) \ |
| M(0ull, 31, 0ull) \ |
| M(1ull, 31, 0x0000000080000000ull) \ |
| M(4ull, 31, 0x0000000200000000ull) \ |
| M(0xffffull, 31, 0x00007fff80000000ull) \ |
| M(0xffffffffull, 31, 0x7fffffff80000000ull) \ |
| M(kU64MaxInt32, 31, 0x3fffffff80000000ull) \ |
| M(kU64MaxInt64, 31, 0xffffffff80000000ull) \ |
| M(kU64MinInt32, 31, 0xc000000000000000ull) \ |
| M(kU64MinInt64, 31, 0ull) \ |
| M(kU64MinusOne, 31, 0xffffffff80000000ull) \ |
| M(0ull, 32, 0ull) \ |
| M(1ull, 32, 0x0000000100000000ull) \ |
| M(4ull, 32, 0x0000000400000000ull) \ |
| M(0xffffull, 32, 0x0000ffff00000000ull) \ |
| M(0xffffffffull, 32, 0xffffffff00000000ull) \ |
| M(kU64MaxInt64, 32, 0xffffffff00000000ull) \ |
| M(kU64MinInt32, 32, 0x8000000000000000ull) \ |
| M(kU64MinInt64, 32, 0ull) \ |
| M(kU64MinusOne, 32, 0xffffffff00000000ull) \ |
| M(0ull, 62, 0ull) \ |
| M(1ull, 62, 0x4000000000000000ull) \ |
| M(4ull, 62, 0ull) \ |
| M(0xffffull, 62, 0xc000000000000000ull) \ |
| M(0xffffffffull, 62, 0xc000000000000000ull) \ |
| M(kU64MaxInt64, 62, 0xc000000000000000ull) \ |
| M(kU64MinInt32, 62, 0ull) \ |
| M(kU64MinInt64, 62, 0ull) \ |
| M(kU64MinusOne, 62, 0xc000000000000000ull) \ |
| M(0ull, 63, 0ull) \ |
| M(1ull, 63, 0x8000000000000000ull) \ |
| M(4ull, 63, 0ull) \ |
| M(0xffffull, 63, 0x8000000000000000ull) \ |
| M(0xffffffffull, 63, 0x8000000000000000ull) \ |
| M(kU64MaxInt64, 63, 0x8000000000000000ull) \ |
| M(kU64MinInt32, 63, 0ull) \ |
| M(kU64MinInt64, 63, 0ull) \ |
| M(kU64MinusOne, 63, 0x8000000000000000ull) |
| |
| #define SHIFT_64_IMMEDIATE_TEST(macro_op, val, shift, expected) \ |
| ASSEMBLER_TEST_GENERATE(macro_op##_##val##_##shift, assembler) { \ |
| __ LoadImmediate(R1, bit_cast<int64_t>(val)); \ |
| __ macro_op(R0, R1, (shift)); \ |
| __ ret(); \ |
| } \ |
| \ |
| ASSEMBLER_TEST_RUN(macro_op##_##val##_##shift, test) { \ |
| typedef int64_t (*Int64Return)() DART_UNUSED; \ |
| EXPECT_EQ((expected), bit_cast<uint64_t>(EXECUTE_TEST_CODE_INT64( \ |
| Int64Return, test->entry()))); \ |
| } |
| |
| #define ASR_64_IMMEDIATE_TEST(val, shift, expected) \ |
| SHIFT_64_IMMEDIATE_TEST(AsrImmediate, val, shift, expected) |
| |
| #define LSR_64_IMMEDIATE_TEST(val, shift, expected) \ |
| SHIFT_64_IMMEDIATE_TEST(LsrImmediate, val, shift, expected) |
| |
| #define LSL_64_IMMEDIATE_TEST(val, shift, expected) \ |
| SHIFT_64_IMMEDIATE_TEST(LslImmediate, val, shift, expected) |
| |
| FOR_EACH_ASR_64_TEST_CONFIG(ASR_64_IMMEDIATE_TEST) |
| FOR_EACH_LSR_64_TEST_CONFIG(LSR_64_IMMEDIATE_TEST) |
| FOR_EACH_LSL_64_TEST_CONFIG(LSL_64_IMMEDIATE_TEST) |
| |
| #undef LSL_64_IMMEDIATE_TEST |
| #undef LSR_64_IMMEDIATE_TEST |
| #undef ASR_64_IMMEDIATE_TEST |
| #undef SHIFT_64_IMMEDIATE_TEST |
| #undef FOR_EACH_LSL_64_TESTS_LIST |
| #undef FOR_EACH_LSR_64_TESTS_LIST |
| #undef FOR_EACH_ASR_64_TESTS_LIST |
| |
| constexpr uint32_t kU32MinusOne = 0xffffffffu; |
| constexpr uint32_t kU32MinInt32 = 0x80000000u; |
| constexpr uint32_t kU32MaxInt32 = 0x7fffffffu; |
| |
| #define FOR_EACH_ASR_32_TEST_CONFIG(M) \ |
| M(0u, 0, 0u) \ |
| M(1u, 0, 1u) \ |
| M(kU32MaxInt32, 0, kU32MaxInt32) \ |
| M(kU32MinInt32, 0, kU32MinInt32) \ |
| M(0u, 1, 0u) \ |
| M(1u, 1, 0u) \ |
| M(4u, 1, 2u) \ |
| M(0xffffu, 1, 0x7fffu) \ |
| M(0xffffffffu, 1, 0xffffffffu) \ |
| M(kU32MaxInt32, 1, 0x3fffffffu) \ |
| M(kU32MinInt32, 1, 0xc0000000u) \ |
| M(kU32MinusOne, 1, 0xffffffffu) \ |
| M(1u, 2, 0u) \ |
| M(4u, 2, 1u) \ |
| M(0xffffu, 2, 0x3fffu) \ |
| M(0xffffffffu, 2, 0xffffffffu) \ |
| M(kU32MaxInt32, 2, 0x1fffffffu) \ |
| M(kU32MinInt32, 2, 0xe0000000u) \ |
| M(kU32MinusOne, 2, kU32MinusOne) \ |
| M(0u, 31, 0u) \ |
| M(1u, 31, 0u) \ |
| M(4u, 31, 0u) \ |
| M(0xffffu, 31, 0u) \ |
| M(0xffffffffu, 31, 0xffffffffu) \ |
| M(kU32MaxInt32, 31, 0u) \ |
| M(kU32MinInt32, 31, kU32MinusOne) \ |
| M(kU32MinusOne, 31, kU32MinusOne) |
| |
| #define FOR_EACH_LSR_32_TEST_CONFIG(M) \ |
| M(0u, 0, 0u) \ |
| M(1u, 0, 1u) \ |
| M(kU32MaxInt32, 0, kU32MaxInt32) \ |
| M(kU32MinInt32, 0, kU32MinInt32) \ |
| M(0u, 1, 0u) \ |
| M(1u, 1, 0u) \ |
| M(4u, 1, 2u) \ |
| M(0xffffu, 1, 0x7fffu) \ |
| M(0xffffffffu, 1, 0x7fffffffu) \ |
| M(kU32MaxInt32, 1, 0x3fffffffu) \ |
| M(kU32MinInt32, 1, 0x40000000u) \ |
| M(kU32MinusOne, 1, 0x7fffffffu) \ |
| M(1u, 2, 0u) \ |
| M(4u, 2, 1u) \ |
| M(0xffffu, 2, 0x3fffu) \ |
| M(0xffffffffu, 2, 0x3fffffffu) \ |
| M(kU32MaxInt32, 2, 0x1fffffffu) \ |
| M(kU32MinInt32, 2, 0x20000000u) \ |
| M(kU32MinusOne, 2, 0x3fffffffu) \ |
| M(0u, 31, 0u) \ |
| M(1u, 31, 0u) \ |
| M(4u, 31, 0u) \ |
| M(0xffffu, 31, 0u) \ |
| M(0xffffffffu, 31, 1u) \ |
| M(kU32MaxInt32, 31, 0u) \ |
| M(kU32MinInt32, 31, 1u) \ |
| M(kU32MinusOne, 31, 1u) |
| |
| #define FOR_EACH_LSL_32_TEST_CONFIG(M) \ |
| M(0u, 0, 0u) \ |
| M(1u, 0, 1u) \ |
| M(kU32MaxInt32, 0, kU32MaxInt32) \ |
| M(kU32MinInt32, 0, kU32MinInt32) \ |
| M(0u, 1, 0u) \ |
| M(1u, 1, 2u) \ |
| M(4u, 1, 8u) \ |
| M(0xffffu, 1, 0x1fffeu) \ |
| M(0xffffffffu, 1, 0xfffffffeu) \ |
| M(kU32MaxInt32, 1, 0xfffffffeu) \ |
| M(kU32MinInt32, 1, 0x00000000u) \ |
| M(kU32MinusOne, 1, 0xfffffffeu) \ |
| M(1u, 2, 4u) \ |
| M(4u, 2, 16u) \ |
| M(0xffffu, 2, 0x3fffcu) \ |
| M(0xffffffffu, 2, 0xfffffffcu) \ |
| M(kU32MaxInt32, 2, 0xfffffffcu) \ |
| M(kU32MinInt32, 2, 0x00000000u) \ |
| M(kU32MinusOne, 2, 0xfffffffcu) \ |
| M(0u, 31, 0u) \ |
| M(1u, 31, 0x80000000u) \ |
| M(4u, 31, 0x00000000u) \ |
| M(0xffffu, 31, 0x80000000u) \ |
| M(0xffffffffu, 31, 0x80000000u) \ |
| M(kU32MaxInt32, 31, 0x80000000u) \ |
| M(kU32MinInt32, 31, 0x00000000u) \ |
| M(kU32MinusOne, 31, 0x80000000u) |
| |
| #define SHIFT_32_IMMEDIATE_TEST(macro_op, val, shift, expected) \ |
| ASSEMBLER_TEST_GENERATE(macro_op##a_##val##_##shift, assembler) { \ |
| __ LoadImmediate(R1, bit_cast<int32_t>(val)); \ |
| __ macro_op(R0, R1, (shift), kFourBytes); \ |
| __ ret(); \ |
| } \ |
| \ |
| ASSEMBLER_TEST_RUN(macro_op##a_##val##_##shift, test) { \ |
| typedef int32_t (*Int32Return)() DART_UNUSED; \ |
| EXPECT_EQ((expected), bit_cast<uint32_t>((int32_t)EXECUTE_TEST_CODE_INT64( \ |
| Int32Return, test->entry()))); \ |
| } |
| |
| #define ASR_32_IMMEDIATE_TEST(val, shift, expected) \ |
| SHIFT_32_IMMEDIATE_TEST(AsrImmediate, val, shift, expected) |
| |
| #define LSR_32_IMMEDIATE_TEST(val, shift, expected) \ |
| SHIFT_32_IMMEDIATE_TEST(LsrImmediate, val, shift, expected) |
| |
| #define LSL_32_IMMEDIATE_TEST(val, shift, expected) \ |
| SHIFT_32_IMMEDIATE_TEST(LslImmediate, val, shift, expected) |
| |
| FOR_EACH_ASR_32_TEST_CONFIG(ASR_32_IMMEDIATE_TEST) |
| FOR_EACH_LSR_32_TEST_CONFIG(LSR_32_IMMEDIATE_TEST) |
| FOR_EACH_LSL_32_TEST_CONFIG(LSL_32_IMMEDIATE_TEST) |
| |
| #undef LSL_32_IMMEDIATE_TEST |
| #undef LSR_32_IMMEDIATE_TEST |
| #undef ASR_32_IMMEDIATE_TEST |
| #undef SHIFT_32_IMMEDIATE_TEST |
| #undef FOR_EACH_LSL_32_TESTS_LIST |
| #undef FOR_EACH_LSR_32_TESTS_LIST |
| #undef FOR_EACH_ASR_32_TESTS_LIST |
| |
| ASSEMBLER_TEST_GENERATE(LslImmediate, assembler) { |
| __ LslImmediate(R0, R0, 1); |
| __ LslImmediate(R0, R0, 2); |
| __ LslImmediate(R0, R0, 3); |
| __ LslImmediate(R0, R0, 4); |
| __ LslImmediate(R0, R0, 60); |
| __ LslImmediate(R0, R0, 61); |
| __ LslImmediate(R0, R0, 62); |
| __ LslImmediate(R0, R0, 63); |
| __ LslImmediate(R0, R0, 1, kFourBytes); |
| __ LslImmediate(R0, R0, 2, kFourBytes); |
| __ LslImmediate(R0, R0, 3, kFourBytes); |
| __ LslImmediate(R0, R0, 4, kFourBytes); |
| __ LslImmediate(R0, R0, 28, kFourBytes); |
| __ LslImmediate(R0, R0, 29, kFourBytes); |
| __ LslImmediate(R0, R0, 30, kFourBytes); |
| __ LslImmediate(R0, R0, 31, kFourBytes); |
| } |
| |
| ASSEMBLER_TEST_RUN(LslImmediate, test) { |
| EXPECT_DISASSEMBLY( |
| "lsl r0, r0, #1\n" |
| "lsl r0, r0, #2\n" |
| "lsl r0, r0, #3\n" |
| "lsl r0, r0, #4\n" |
| "lsl r0, r0, #60\n" |
| "lsl r0, r0, #61\n" |
| "lsl r0, r0, #62\n" |
| "lsl r0, r0, #63\n" |
| "lslw r0, r0, #1\n" |
| "lslw r0, r0, #2\n" |
| "lslw r0, r0, #3\n" |
| "lslw r0, r0, #4\n" |
| "lslw r0, r0, #28\n" |
| "lslw r0, r0, #29\n" |
| "lslw r0, r0, #30\n" |
| "lslw r0, r0, #31\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(LsrImmediate, assembler) { |
| __ LsrImmediate(R0, R0, 1); |
| __ LsrImmediate(R0, R0, 2); |
| __ LsrImmediate(R0, R0, 3); |
| __ LsrImmediate(R0, R0, 4); |
| __ LsrImmediate(R0, R0, 60); |
| __ LsrImmediate(R0, R0, 61); |
| __ LsrImmediate(R0, R0, 62); |
| __ LsrImmediate(R0, R0, 63); |
| __ LsrImmediate(R0, R0, 1, kFourBytes); |
| __ LsrImmediate(R0, R0, 2, kFourBytes); |
| __ LsrImmediate(R0, R0, 3, kFourBytes); |
| __ LsrImmediate(R0, R0, 4, kFourBytes); |
| __ LsrImmediate(R0, R0, 28, kFourBytes); |
| __ LsrImmediate(R0, R0, 29, kFourBytes); |
| __ LsrImmediate(R0, R0, 30, kFourBytes); |
| __ LsrImmediate(R0, R0, 31, kFourBytes); |
| } |
| |
| ASSEMBLER_TEST_RUN(LsrImmediate, test) { |
| EXPECT_DISASSEMBLY( |
| "lsr r0, r0, #1\n" |
| "lsr r0, r0, #2\n" |
| "lsr r0, r0, #3\n" |
| "lsr r0, r0, #4\n" |
| "lsr r0, r0, #60\n" |
| "lsr r0, r0, #61\n" |
| "lsr r0, r0, #62\n" |
| "lsr r0, r0, #63\n" |
| "lsrw r0, r0, #1\n" |
| "lsrw r0, r0, #2\n" |
| "lsrw r0, r0, #3\n" |
| "lsrw r0, r0, #4\n" |
| "lsrw r0, r0, #28\n" |
| "lsrw r0, r0, #29\n" |
| "lsrw r0, r0, #30\n" |
| "lsrw r0, r0, #31\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(AsrImmediate, assembler) { |
| __ AsrImmediate(R0, R0, 1); |
| __ AsrImmediate(R0, R0, 2); |
| __ AsrImmediate(R0, R0, 3); |
| __ AsrImmediate(R0, R0, 4); |
| __ AsrImmediate(R0, R0, 60); |
| __ AsrImmediate(R0, R0, 61); |
| __ AsrImmediate(R0, R0, 62); |
| __ AsrImmediate(R0, R0, 63); |
| __ AsrImmediate(R0, R0, 1, kFourBytes); |
| __ AsrImmediate(R0, R0, 2, kFourBytes); |
| __ AsrImmediate(R0, R0, 3, kFourBytes); |
| __ AsrImmediate(R0, R0, 4, kFourBytes); |
| __ AsrImmediate(R0, R0, 28, kFourBytes); |
| __ AsrImmediate(R0, R0, 29, kFourBytes); |
| __ AsrImmediate(R0, R0, 30, kFourBytes); |
| __ AsrImmediate(R0, R0, 31, kFourBytes); |
| } |
| |
| ASSEMBLER_TEST_RUN(AsrImmediate, test) { |
| EXPECT_DISASSEMBLY( |
| "asr r0, r0, #1\n" |
| "asr r0, r0, #2\n" |
| "asr r0, r0, #3\n" |
| "asr r0, r0, #4\n" |
| "asr r0, r0, #60\n" |
| "asr r0, r0, #61\n" |
| "asr r0, r0, #62\n" |
| "asr r0, r0, #63\n" |
| "asrw r0, r0, #1\n" |
| "asrw r0, r0, #2\n" |
| "asrw r0, r0, #3\n" |
| "asrw r0, r0, #4\n" |
| "asrw r0, r0, #28\n" |
| "asrw r0, r0, #29\n" |
| "asrw r0, r0, #30\n" |
| "asrw r0, r0, #31\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(AndShiftRegs, assembler) { |
| __ movz(R1, Immediate(42), 0); |
| __ movz(R2, Immediate(21), 0); |
| __ and_(R0, R1, Operand(R2, LSL, 1)); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(AndShiftRegs, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r1, #0x2a\n" |
| "movz r2, #0x15\n" |
| "and r0, r1, r2 lsl #1\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(BicRegs, assembler) { |
| __ movz(R1, Immediate(42), 0); |
| __ movz(R2, Immediate(5), 0); |
| __ bic(R0, R1, Operand(R2)); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(BicRegs, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r1, #0x2a\n" |
| "movz r2, #0x5\n" |
| "bic r0, r1, r2\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(OrrRegs, assembler) { |
| __ movz(R1, Immediate(32), 0); |
| __ movz(R2, Immediate(10), 0); |
| __ orr(R0, R1, Operand(R2)); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(OrrRegs, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r1, #0x20\n" |
| "movz r2, #0xa\n" |
| "orr r0, r1, r2\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(OrnRegs, assembler) { |
| __ movz(R1, Immediate(32), 0); |
| __ movn(R2, Immediate(0), 0); // R2 <- 0xffffffffffffffff. |
| __ movk(R2, Immediate(0xffd5), 0); // R2 <- 0xffffffffffffffe5. |
| __ orn(R0, R1, Operand(R2)); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(OrnRegs, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r1, #0x20\n" |
| "movn r2, #0x0\n" |
| "movk r2, #0xffd5\n" |
| "orn r0, r1, r2\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(EorRegs, assembler) { |
| __ movz(R1, Immediate(0xffd5), 0); |
| __ movz(R2, Immediate(0xffff), 0); |
| __ eor(R0, R1, Operand(R2)); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(EorRegs, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r1, #0xffd5\n" |
| "movz r2, #0xffff\n" |
| "eor r0, r1, r2\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(EonRegs, assembler) { |
| __ movz(R1, Immediate(0xffd5), 0); |
| __ movn(R2, Immediate(0xffff), 0); |
| __ eon(R0, R1, Operand(R2)); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(EonRegs, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r1, #0xffd5\n" |
| "movn r2, #0xffff\n" |
| "eon r0, r1, r2\n" |
| "ret\n"); |
| } |
| |
| // Logical immediate operations. |
| ASSEMBLER_TEST_GENERATE(AndImm, assembler) { |
| __ movz(R1, Immediate(42), 0); |
| __ andi(R0, R1, Immediate(0xaaaaaaaaaaaaaaaaULL)); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(AndImm, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r1, #0x2a\n" |
| "and r0, r1, 0xaaaaaaaaaaaaaaaa\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(AndImmCsp, assembler) { |
| // Note we must maintain the ARM64 ABI invariants on CSP here. |
| __ mov(TMP, CSP); |
| __ sub(TMP2, CSP, Operand(31)); |
| __ andi(CSP, TMP2, Immediate(~15)); |
| __ mov(R0, CSP); |
| __ sub(R0, TMP, Operand(R0)); |
| __ mov(CSP, TMP); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(AndImmCsp, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(32, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "mov tmp, csp\n" |
| "sub tmp2, csp, #0x1f\n" |
| "and csp, tmp2, 0xfffffffffffffff0\n" |
| "mov r0, csp\n" |
| "sub r0, tmp, r0\n" |
| "mov csp, tmp\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(AndOneImm, assembler) { |
| __ movz(R1, Immediate(43), 0); |
| __ andi(R0, R1, Immediate(1)); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(AndOneImm, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(1, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r1, #0x2b\n" |
| "and r0, r1, 0x1\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(OrrImm, assembler) { |
| __ movz(R1, Immediate(0), 0); |
| __ movz(R2, Immediate(0x3f), 0); |
| __ movz(R3, Immediate(0xa), 0); |
| __ orri(R1, R1, Immediate(0x0020002000200020ULL)); |
| __ orr(R1, R1, Operand(R3)); |
| __ and_(R0, R1, Operand(R2)); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(OrrImm, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r1, #0x0\n" |
| "movz r2, #0x3f\n" |
| "movz r3, #0xa\n" |
| "orr r1, r1, 0x20002000200020\n" |
| "orr r1, r1, r3\n" |
| "and r0, r1, r2\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(EorImm, assembler) { |
| __ movn(R0, Immediate(0), 0); |
| __ movk(R0, Immediate(0xffd5), 0); // R0 < 0xffffffffffffffd5. |
| __ movz(R1, Immediate(0x3f), 0); |
| __ eori(R0, R0, Immediate(0x3f3f3f3f3f3f3f3fULL)); |
| __ and_(R0, R0, Operand(R1)); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(EorImm, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movn r0, #0x0\n" |
| "movk r0, #0xffd5\n" |
| "movz r1, #0x3f\n" |
| "eor r0, r0, 0x3f3f3f3f3f3f3f3f\n" |
| "and r0, r0, r1\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(Clz, assembler) { |
| Label error; |
| |
| __ clz(R1, ZR); |
| __ cmp(R1, Operand(64)); |
| __ b(&error, NE); |
| __ LoadImmediate(R2, 42); |
| __ clz(R2, R2); |
| __ cmp(R2, Operand(58)); |
| __ b(&error, NE); |
| __ LoadImmediate(R0, -1); |
| __ clz(R1, R0); |
| __ cmp(R1, Operand(0)); |
| __ b(&error, NE); |
| __ add(R0, ZR, Operand(R0, LSR, 3)); |
| __ clz(R1, R0); |
| __ cmp(R1, Operand(3)); |
| __ b(&error, NE); |
| __ mov(R0, ZR); |
| __ ret(); |
| __ Bind(&error); |
| __ LoadImmediate(R0, 1); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(Clz, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(0, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "clz r1, zr\n" |
| "cmp r1, #0x40\n" |
| "bne +60\n" |
| "movz r2, #0x2a\n" |
| "clz r2, r2\n" |
| "cmp r2, #0x3a\n" |
| "bne +44\n" |
| "movn r0, #0x0\n" |
| "clz r1, r0\n" |
| "cmp r1, #0x0\n" |
| "bne +28\n" |
| "add r0, zr, r0 lsr #3\n" |
| "clz r1, r0\n" |
| "cmp r1, #0x3\n" |
| "bne +12\n" |
| "mov r0, zr\n" |
| "ret\n" |
| "movz r0, #0x1\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(Clzw, assembler) { |
| Label error; |
| |
| __ clzw(R1, ZR); |
| __ cmp(R1, Operand(32)); |
| __ b(&error, NE); |
| __ LoadImmediate(R2, 42); |
| __ clzw(R2, R2); |
| __ cmp(R2, Operand(26)); |
| __ b(&error, NE); |
| __ LoadImmediate(R0, -1); |
| __ clzw(R1, R0); |
| __ cmp(R1, Operand(0)); |
| __ b(&error, NE); |
| __ add(R0, ZR, Operand(R0, LSR, 35)); |
| __ clzw(R1, R0); |
| __ cmp(R1, Operand(3)); |
| __ b(&error, NE); |
| __ LoadImmediate(R0, 0xFFFFFFFF0FFFFFFF); |
| __ clzw(R1, R0); |
| __ cmp(R1, Operand(4)); |
| __ b(&error, NE); |
| __ LoadImmediate(R0, 0xFFFFFFFF); |
| __ clzw(R1, R0); |
| __ cmp(R1, Operand(0)); |
| __ b(&error, NE); |
| __ mov(R0, ZR); |
| __ ret(); |
| __ Bind(&error); |
| __ LoadImmediate(R0, 1); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(Clzw, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(0, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "clzw r1, zr\n" |
| "cmp r1, #0x20\n" |
| "bne +92\n" |
| "movz r2, #0x2a\n" |
| "clzw r2, r2\n" |
| "cmp r2, #0x1a\n" |
| "bne +76\n" |
| "movn r0, #0x0\n" |
| "clzw r1, r0\n" |
| "cmp r1, #0x0\n" |
| "bne +60\n" |
| "add r0, zr, r0 lsr #35\n" |
| "clzw r1, r0\n" |
| "cmp r1, #0x3\n" |
| "bne +44\n" |
| "mov r0, 0xffffffff0fffffff\n" |
| "clzw r1, r0\n" |
| "cmp r1, #0x4\n" |
| "bne +28\n" |
| "mov r0, 0xffffffff\n" |
| "clzw r1, r0\n" |
| "cmp r1, #0x0\n" |
| "bne +12\n" |
| "mov r0, zr\n" |
| "ret\n" |
| "movz r0, #0x1\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(Rbit, assembler) { |
| const int64_t immediate = 0x0000000000000015; |
| __ LoadImmediate(R0, immediate); |
| __ rbit(R0, R0); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(Rbit, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| const int64_t expected = 0xa800000000000000; |
| EXPECT_EQ(expected, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r0, #0x15\n" |
| "rbit r0, r0\n" |
| "ret\n"); |
| } |
| |
| // Comparisons, branching. |
| ASSEMBLER_TEST_GENERATE(BranchALForward, assembler) { |
| Label l; |
| __ movz(R0, Immediate(42), 0); |
| __ b(&l, AL); |
| __ movz(R0, Immediate(0), 0); |
| __ Bind(&l); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(BranchALForward, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r0, #0x2a\n" |
| "b +8\n" |
| "movz r0, #0x0\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(BranchALBackwards, assembler) { |
| Label l, leave; |
| __ movz(R0, Immediate(42), 0); |
| __ b(&l, AL); |
| |
| __ movz(R0, Immediate(0), 0); |
| __ Bind(&leave); |
| __ ret(); |
| __ movz(R0, Immediate(0), 0); |
| |
| __ Bind(&l); |
| __ b(&leave, AL); |
| __ movz(R0, Immediate(0), 0); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(BranchALBackwards, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r0, #0x2a\n" |
| "b +16\n" |
| "movz r0, #0x0\n" |
| "ret\n" |
| "movz r0, #0x0\n" |
| "b -8\n" |
| "movz r0, #0x0\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(CmpEqBranch, assembler) { |
| Label l; |
| |
| __ movz(R0, Immediate(42), 0); |
| __ movz(R1, Immediate(234), 0); |
| __ movz(R2, Immediate(234), 0); |
| |
| __ cmp(R1, Operand(R2)); |
| __ b(&l, EQ); |
| __ movz(R0, Immediate(0), 0); |
| __ Bind(&l); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(CmpEqBranch, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r0, #0x2a\n" |
| "movz r1, #0xea\n" |
| "movz r2, #0xea\n" |
| "cmp r1, r2\n" |
| "beq +8\n" |
| "movz r0, #0x0\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(CmpEqBranchNotTaken, assembler) { |
| Label l; |
| |
| __ movz(R0, Immediate(0), 0); |
| __ movz(R1, Immediate(233), 0); |
| __ movz(R2, Immediate(234), 0); |
| |
| __ cmp(R1, Operand(R2)); |
| __ b(&l, EQ); |
| __ movz(R0, Immediate(42), 0); |
| __ Bind(&l); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(CmpEqBranchNotTaken, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r0, #0x0\n" |
| "movz r1, #0xe9\n" |
| "movz r2, #0xea\n" |
| "cmp r1, r2\n" |
| "beq +8\n" |
| "movz r0, #0x2a\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(CmpEq1Branch, assembler) { |
| Label l; |
| |
| __ movz(R0, Immediate(42), 0); |
| __ movz(R1, Immediate(1), 0); |
| |
| __ cmp(R1, Operand(1)); |
| __ b(&l, EQ); |
| __ movz(R0, Immediate(0), 0); |
| __ Bind(&l); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(CmpEq1Branch, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r0, #0x2a\n" |
| "movz r1, #0x1\n" |
| "cmp r1, #0x1\n" |
| "beq +8\n" |
| "movz r0, #0x0\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(CmnEq1Branch, assembler) { |
| Label l; |
| |
| __ movz(R0, Immediate(42), 0); |
| __ movn(R1, Immediate(0), 0); // R1 <- -1 |
| |
| __ cmn(R1, Operand(1)); |
| __ b(&l, EQ); |
| __ movz(R0, Immediate(0), 0); |
| __ Bind(&l); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(CmnEq1Branch, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r0, #0x2a\n" |
| "movn r1, #0x0\n" |
| "cmn r1, #0x1\n" |
| "beq +8\n" |
| "movz r0, #0x0\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(CmpLtBranch, assembler) { |
| Label l; |
| |
| __ movz(R0, Immediate(42), 0); |
| __ movz(R1, Immediate(233), 0); |
| __ movz(R2, Immediate(234), 0); |
| |
| __ cmp(R1, Operand(R2)); |
| __ b(&l, LT); |
| __ movz(R0, Immediate(0), 0); |
| __ Bind(&l); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(CmpLtBranch, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r0, #0x2a\n" |
| "movz r1, #0xe9\n" |
| "movz r2, #0xea\n" |
| "cmp r1, r2\n" |
| "blt +8\n" |
| "movz r0, #0x0\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(CmpLtBranchNotTaken, assembler) { |
| Label l; |
| |
| __ movz(R0, Immediate(0), 0); |
| __ movz(R1, Immediate(235), 0); |
| __ movz(R2, Immediate(234), 0); |
| |
| __ cmp(R1, Operand(R2)); |
| __ b(&l, LT); |
| __ movz(R0, Immediate(42), 0); |
| __ Bind(&l); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(CmpLtBranchNotTaken, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r0, #0x0\n" |
| "movz r1, #0xeb\n" |
| "movz r2, #0xea\n" |
| "cmp r1, r2\n" |
| "blt +8\n" |
| "movz r0, #0x2a\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(CmpBranchIfZero, assembler) { |
| Label l; |
| |
| __ movz(R0, Immediate(42), 0); |
| __ movz(R1, Immediate(0), 0); |
| |
| __ cbz(&l, R1); |
| __ movz(R0, Immediate(0), 0); |
| __ Bind(&l); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(CmpBranchIfZero, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r0, #0x2a\n" |
| "movz r1, #0x0\n" |
| "cbz r1, +8\n" |
| "movz r0, #0x0\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(CmpBranchIfZeroNotTaken, assembler) { |
| Label l; |
| |
| __ movz(R0, Immediate(0), 0); |
| __ movz(R1, Immediate(1), 0); |
| |
| __ cbz(&l, R1); |
| __ movz(R0, Immediate(42), 0); |
| __ Bind(&l); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(CmpBranchIfZeroNotTaken, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r0, #0x0\n" |
| "movz r1, #0x1\n" |
| "cbz r1, +8\n" |
| "movz r0, #0x2a\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(CmpBranchIfNotZero, assembler) { |
| Label l; |
| |
| __ movz(R0, Immediate(42), 0); |
| __ movz(R1, Immediate(1), 0); |
| |
| __ cbnz(&l, R1); |
| __ movz(R0, Immediate(0), 0); |
| __ Bind(&l); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(CmpBranchIfNotZero, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r0, #0x2a\n" |
| "movz r1, #0x1\n" |
| "cbnz r1, +8\n" |
| "movz r0, #0x0\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(CmpBranchIfNotZeroNotTaken, assembler) { |
| Label l; |
| |
| __ movz(R0, Immediate(0), 0); |
| __ movz(R1, Immediate(0), 0); |
| |
| __ cbnz(&l, R1); |
| __ movz(R0, Immediate(42), 0); |
| __ Bind(&l); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(CmpBranchIfNotZeroNotTaken, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r0, #0x0\n" |
| "movz r1, #0x0\n" |
| "cbnz r1, +8\n" |
| "movz r0, #0x2a\n" |
| "ret\n"); |
| } |
| |
| static const int64_t kBits5And35 = (1 << 5) | (1ll << 35); |
| |
| ASSEMBLER_TEST_GENERATE(TstBranchIfZero, assembler) { |
| Label l, l2; |
| |
| __ movz(R0, Immediate(42), 0); |
| __ LoadImmediate(R1, ~kBits5And35); |
| |
| __ tbz(&l, R1, 5); |
| __ movz(R0, Immediate(0), 0); |
| __ Bind(&l); |
| |
| __ tbz(&l2, R1, 35); |
| __ movz(R0, Immediate(0), 0); |
| __ Bind(&l2); |
| |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(TstBranchIfZero, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r0, #0x2a\n" |
| "movn r1, #0x8 lsl 32\n" |
| "movk r1, #0xffdf\n" |
| "tbzw r1, #5, +8\n" |
| "movz r0, #0x0\n" |
| "tbz r1, #35, +8\n" |
| "movz r0, #0x0\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(TstBranchIfZeroNotTaken, assembler) { |
| Label l; |
| |
| __ movz(R0, Immediate(0), 0); |
| __ LoadImmediate(R1, kBits5And35); |
| |
| __ tbz(&l, R1, 5); |
| __ movz(R0, Immediate(42), 0); |
| __ Bind(&l); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(TstBranchIfZeroNotTaken, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r0, #0x0\n" |
| "movz r1, #0x20\n" |
| "movk r1, #0x8 lsl 32\n" |
| "tbzw r1, #5, +8\n" |
| "movz r0, #0x2a\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(TstBranchIfNotZero, assembler) { |
| Label l, l2; |
| |
| __ movz(R0, Immediate(42), 0); |
| __ LoadImmediate(R1, kBits5And35); |
| |
| __ tbnz(&l, R1, 5); |
| __ movz(R0, Immediate(0), 0); |
| __ Bind(&l); |
| |
| __ tbnz(&l2, R1, 35); |
| __ movz(R0, Immediate(0), 0); |
| __ Bind(&l2); |
| |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(TstBranchIfNotZero, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r0, #0x2a\n" |
| "movz r1, #0x20\n" |
| "movk r1, #0x8 lsl 32\n" |
| "tbnzw r1, #5, +8\n" |
| "movz r0, #0x0\n" |
| "tbnz r1, #35, +8\n" |
| "movz r0, #0x0\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(TstBranchIfNotZeroNotTaken, assembler) { |
| Label l; |
| |
| __ movz(R0, Immediate(0), 0); |
| __ LoadImmediate(R1, ~kBits5And35); |
| |
| __ tbnz(&l, R1, 5); |
| __ movz(R0, Immediate(42), 0); |
| __ Bind(&l); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(TstBranchIfNotZeroNotTaken, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r0, #0x0\n" |
| "movn r1, #0x8 lsl 32\n" |
| "movk r1, #0xffdf\n" |
| "tbnzw r1, #5, +8\n" |
| "movz r0, #0x2a\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(TstBranchIfZeroFar, assembler) { |
| Label l; |
| |
| __ movz(R0, Immediate(42), 0); |
| __ LoadImmediate(R1, ~kBits5And35); |
| |
| __ tbz(&l, R1, 5); |
| |
| const intptr_t kRange = 1 << 14; // tbz has 14 bits of range. |
| for (intptr_t i = 0; i < kRange; i++) { |
| __ brk(0); |
| } |
| |
| __ movz(R0, Immediate(0), 0); |
| __ Bind(&l); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(TstBranchIfZeroFar, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(TstBranchIfNotZeroFar, assembler) { |
| Label l; |
| |
| __ movz(R0, Immediate(42), 0); |
| __ LoadImmediate(R1, kBits5And35); |
| |
| __ tbnz(&l, R1, 5); |
| |
| const intptr_t kRange = 1 << 14; // tbnz has 14 bits of range. |
| for (intptr_t i = 0; i < kRange; i++) { |
| __ brk(0); |
| } |
| |
| __ movz(R0, Immediate(0), 0); |
| __ Bind(&l); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(TstBranchIfNotZeroFar, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(FcmpEqBranch, assembler) { |
| Label l; |
| |
| __ LoadDImmediate(V0, 42.0); |
| __ LoadDImmediate(V1, 234.0); |
| __ LoadDImmediate(V2, 234.0); |
| |
| __ fcmpd(V1, V2); |
| __ b(&l, EQ); |
| __ LoadDImmediate(V0, 0.0); |
| __ Bind(&l); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(FcmpEqBranch, test) { |
| typedef double (*DoubleReturn)() DART_UNUSED; |
| EXPECT_EQ(42.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz tmp, #0x4045 lsl 48\n" |
| "fmovdr v0, tmp\n" |
| "movz tmp, #0x4000 lsl 32\n" |
| "movk tmp, #0x406d lsl 48\n" |
| "fmovdr v1, tmp\n" |
| "movz tmp, #0x4000 lsl 32\n" |
| "movk tmp, #0x406d lsl 48\n" |
| "fmovdr v2, tmp\n" |
| "fcmpd v1, v2\n" |
| "beq +12\n" |
| "movz tmp, #0x0\n" |
| "fmovdr v0, tmp\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(TstBranchIfZeroFar1, assembler) { |
| Label l; |
| |
| __ LoadImmediate(R0, 41); |
| __ tbnz(&l, R0, 5); |
| __ Stop("Hammertime"); |
| |
| for (int i = 0; i < 0x10000; i++) { |
| __ add(R0, R0, Operand(1)); |
| __ sub(R0, R0, Operand(1)); |
| } |
| |
| __ AddImmediate(R0, R0, -1); // Not run. |
| |
| __ Bind(&l); |
| __ AddImmediate(R0, R0, 1); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(TstBranchIfZeroFar1, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(TstBranchIfZeroFar2, assembler) { |
| Label l; |
| |
| for (int i = 0; i < 0x10000; i++) { |
| __ add(R0, R0, Operand(1)); |
| __ sub(R0, R0, Operand(1)); |
| } |
| |
| __ LoadImmediate(R0, 41); |
| __ tbnz(&l, R0, 5); |
| __ Stop("Hammertime"); |
| |
| __ AddImmediate(R0, R0, -1); // Not run. |
| |
| __ Bind(&l); |
| __ AddImmediate(R0, R0, 1); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(TstBranchIfZeroFar2, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(TstBranchIfZeroFar3, assembler) { |
| Label l, l2; |
| __ LoadImmediate(R0, 41); |
| __ b(&l, AL); |
| |
| __ AddImmediate(R0, R0, -1); // Not run. |
| |
| __ Bind(&l2); |
| __ AddImmediate(R0, R0, 1); |
| __ ret(); |
| |
| for (int i = 0; i < 0x10000; i++) { |
| __ add(R0, R0, Operand(1)); |
| __ sub(R0, R0, Operand(1)); |
| } |
| |
| __ Bind(&l); |
| __ tbnz(&l2, R0, 5); |
| __ Stop("Hammertime"); |
| } |
| |
| ASSEMBLER_TEST_RUN(TstBranchIfZeroFar3, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(FcmpEqBranchNotTaken, assembler) { |
| Label l; |
| |
| __ LoadDImmediate(V0, 0.0); |
| __ LoadDImmediate(V1, 233.0); |
| __ LoadDImmediate(V2, 234.0); |
| |
| __ fcmpd(V1, V2); |
| __ b(&l, EQ); |
| __ LoadDImmediate(V0, 42.0); |
| __ Bind(&l); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(FcmpEqBranchNotTaken, test) { |
| typedef double (*DoubleReturn)() DART_UNUSED; |
| EXPECT_EQ(42.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz tmp, #0x0\n" |
| "fmovdr v0, tmp\n" |
| "movz tmp, #0x2000 lsl 32\n" |
| "movk tmp, #0x406d lsl 48\n" |
| "fmovdr v1, tmp\n" |
| "movz tmp, #0x4000 lsl 32\n" |
| "movk tmp, #0x406d lsl 48\n" |
| "fmovdr v2, tmp\n" |
| "fcmpd v1, v2\n" |
| "beq +12\n" |
| "movz tmp, #0x4045 lsl 48\n" |
| "fmovdr v0, tmp\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(FcmpLtBranch, assembler) { |
| Label l; |
| |
| __ LoadDImmediate(V0, 42.0); |
| __ LoadDImmediate(V1, 233.0); |
| __ LoadDImmediate(V2, 234.0); |
| |
| __ fcmpd(V1, V2); |
| __ b(&l, LT); |
| __ LoadDImmediate(V0, 0.0); |
| __ Bind(&l); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(FcmpLtBranch, test) { |
| typedef double (*DoubleReturn)() DART_UNUSED; |
| EXPECT_EQ(42.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(FcmpLtBranchNotTaken, assembler) { |
| Label l; |
| |
| __ LoadDImmediate(V0, 0.0); |
| __ LoadDImmediate(V1, 235.0); |
| __ LoadDImmediate(V2, 234.0); |
| |
| __ fcmpd(V1, V2); |
| __ b(&l, LT); |
| __ LoadDImmediate(V0, 42.0); |
| __ Bind(&l); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(FcmpLtBranchNotTaken, test) { |
| typedef double (*DoubleReturn)() DART_UNUSED; |
| EXPECT_EQ(42.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz tmp, #0x0\n" |
| "fmovdr v0, tmp\n" |
| "movz tmp, #0x6000 lsl 32\n" |
| "movk tmp, #0x406d lsl 48\n" |
| "fmovdr v1, tmp\n" |
| "movz tmp, #0x4000 lsl 32\n" |
| "movk tmp, #0x406d lsl 48\n" |
| "fmovdr v2, tmp\n" |
| "fcmpd v1, v2\n" |
| "blt +12\n" |
| "movz tmp, #0x4045 lsl 48\n" |
| "fmovdr v0, tmp\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(FcmpzGtBranch, assembler) { |
| Label l; |
| |
| __ LoadDImmediate(V0, 235.0); |
| __ LoadDImmediate(V1, 233.0); |
| |
| __ fcmpdz(V1); |
| __ b(&l, GT); |
| __ LoadDImmediate(V0, 0.0); |
| __ ret(); |
| __ Bind(&l); |
| __ LoadDImmediate(V0, 42.0); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(FcmpzGtBranch, test) { |
| typedef double (*DoubleReturn)() DART_UNUSED; |
| EXPECT_EQ(42.0, EXECUTE_TEST_CODE_DOUBLE(DoubleReturn, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz tmp, #0x6000 lsl 32\n" |
| "movk tmp, #0x406d lsl 48\n" |
| "fmovdr v0, tmp\n" |
| "movz tmp, #0x2000 lsl 32\n" |
| "movk tmp, #0x406d lsl 48\n" |
| "fmovdr v1, tmp\n" |
| "fcmpd v1, #0.0\n" |
| "bgt +16\n" |
| "movz tmp, #0x0\n" |
| "fmovdr v0, tmp\n" |
| "ret\n" |
| "movz tmp, #0x4045 lsl 48\n" |
| "fmovdr v0, tmp\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(AndsBranch, assembler) { |
| Label l; |
| |
| __ movz(R0, Immediate(42), 0); |
| __ movz(R1, Immediate(2), 0); |
| __ movz(R2, Immediate(1), 0); |
| |
| __ ands(R3, R1, Operand(R2)); |
| __ b(&l, EQ); |
| __ movz(R0, Immediate(0), 0); |
| __ Bind(&l); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(AndsBranch, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r0, #0x2a\n" |
| "movz r1, #0x2\n" |
| "movz r2, #0x1\n" |
| "ands r3, r1, r2\n" |
| "beq +8\n" |
| "movz r0, #0x0\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(AndsBranchNotTaken, assembler) { |
| Label l; |
| |
| __ movz(R0, Immediate(0), 0); |
| __ movz(R1, Immediate(2), 0); |
| __ movz(R2, Immediate(2), 0); |
| |
| __ ands(R3, R1, Operand(R2)); |
| __ b(&l, EQ); |
| __ movz(R0, Immediate(42), 0); |
| __ Bind(&l); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(AndsBranchNotTaken, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r0, #0x0\n" |
| "movz r1, #0x2\n" |
| "movz r2, #0x2\n" |
| "ands r3, r1, r2\n" |
| "beq +8\n" |
| "movz r0, #0x2a\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(BicsBranch, assembler) { |
| Label l; |
| |
| __ movz(R0, Immediate(42), 0); |
| __ movz(R1, Immediate(2), 0); |
| __ movz(R2, Immediate(2), 0); |
| |
| __ bics(R3, R1, Operand(R2)); |
| __ b(&l, EQ); |
| __ movz(R0, Immediate(0), 0); |
| __ Bind(&l); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(BicsBranch, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r0, #0x2a\n" |
| "movz r1, #0x2\n" |
| "movz r2, #0x2\n" |
| "bics r3, r1, r2\n" |
| "beq +8\n" |
| "movz r0, #0x0\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(BicsBranchNotTaken, assembler) { |
| Label l; |
| |
| __ movz(R0, Immediate(0), 0); |
| __ movz(R1, Immediate(2), 0); |
| __ movz(R2, Immediate(1), 0); |
| |
| __ bics(R3, R1, Operand(R2)); |
| __ b(&l, EQ); |
| __ movz(R0, Immediate(42), 0); |
| __ Bind(&l); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(BicsBranchNotTaken, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r0, #0x0\n" |
| "movz r1, #0x2\n" |
| "movz r2, #0x1\n" |
| "bics r3, r1, r2\n" |
| "beq +8\n" |
| "movz r0, #0x2a\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(AndisBranch, assembler) { |
| Label l; |
| |
| __ movz(R0, Immediate(42), 0); |
| __ movz(R1, Immediate(2), 0); |
| |
| __ andis(R3, R1, Immediate(1)); |
| __ b(&l, EQ); |
| __ movz(R0, Immediate(0), 0); |
| __ Bind(&l); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(AndisBranch, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r0, #0x2a\n" |
| "movz r1, #0x2\n" |
| "ands r3, r1, 0x1\n" |
| "beq +8\n" |
| "movz r0, #0x0\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(AndisBranchNotTaken, assembler) { |
| Label l; |
| |
| __ movz(R0, Immediate(0), 0); |
| __ movz(R1, Immediate(2), 0); |
| |
| __ andis(R3, R1, Immediate(2)); |
| __ b(&l, EQ); |
| __ movz(R0, Immediate(42), 0); |
| __ Bind(&l); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(AndisBranchNotTaken, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r0, #0x0\n" |
| "movz r1, #0x2\n" |
| "ands r3, r1, 0x2\n" |
| "beq +8\n" |
| "movz r0, #0x2a\n" |
| "ret\n"); |
| } |
| |
| // Address of PC-rel offset, br, blr. |
| ASSEMBLER_TEST_GENERATE(AdrBr, assembler) { |
| __ movz(R0, Immediate(123), 0); |
| // R1 <- PC + 3*Instr::kInstrSize |
| __ adr(R1, Immediate(3 * Instr::kInstrSize)); |
| __ br(R1); |
| __ ret(); |
| |
| // br goes here. |
| __ movz(R0, Immediate(42), 0); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(AdrBr, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(AdrBlr, assembler) { |
| __ movz(R0, Immediate(123), 0); |
| SPILLS_RETURN_ADDRESS_FROM_LR_TO_REGISTER( |
| __ add(R3, ZR, Operand(LR))); // Save LR. |
| // R1 <- PC + 4*Instr::kInstrSize |
| __ adr(R1, Immediate(4 * Instr::kInstrSize)); |
| __ blr(R1); |
| RESTORES_RETURN_ADDRESS_FROM_REGISTER_TO_LR(__ add(LR, ZR, Operand(R3))); |
| __ ret(); |
| |
| // blr goes here. |
| __ movz(R0, Immediate(42), 0); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(AdrBlr, test) { |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| } |
| |
| // Misc. arithmetic. |
| ASSEMBLER_TEST_GENERATE(Udiv, assembler) { |
| __ movz(R0, Immediate(27), 0); |
| __ movz(R1, Immediate(9), 0); |
| __ udiv(R2, R0, R1); |
| __ mov(R0, R2); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(Udiv, test) { |
| EXPECT(test != NULL); |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(3, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r0, #0x1b\n" |
| "movz r1, #0x9\n" |
| "udiv r2, r0, r1\n" |
| "mov r0, r2\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(Sdiv, assembler) { |
| __ movz(R0, Immediate(27), 0); |
| __ movz(R1, Immediate(9), 0); |
| __ neg(R1, R1); |
| __ sdiv(R2, R0, R1); |
| __ mov(R0, R2); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(Sdiv, test) { |
| EXPECT(test != NULL); |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(-3, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r0, #0x1b\n" |
| "movz r1, #0x9\n" |
| "neg r1, r1\n" |
| "sdiv r2, r0, r1\n" |
| "mov r0, r2\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(Udiv_zero, assembler) { |
| __ movz(R0, Immediate(27), 0); |
| __ movz(R1, Immediate(0), 0); |
| __ udiv(R2, R0, R1); |
| __ mov(R0, R2); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(Udiv_zero, test) { |
| EXPECT(test != NULL); |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(0, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r0, #0x1b\n" |
| "movz r1, #0x0\n" |
| "udiv r2, r0, r1\n" |
| "mov r0, r2\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(Sdiv_zero, assembler) { |
| __ movz(R0, Immediate(27), 0); |
| __ movz(R1, Immediate(0), 0); |
| __ sdiv(R2, R0, R1); |
| __ mov(R0, R2); |
| __ ret(); |
| } |
| |
| ASSEMBLER_TEST_RUN(Sdiv_zero, test) { |
| EXPECT(test != NULL); |
| typedef int64_t (*Int64Return)() DART_UNUSED; |
| EXPECT_EQ(0, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry())); |
| EXPECT_DISASSEMBLY( |
| "movz r0, #0x1b\n" |
| "movz r1, #0x0\n" |
| "sdiv r2, r0, r1\n" |
| "mov r0, r2\n" |
| "ret\n"); |
| } |
| |
| ASSEMBLER_TEST_GENERATE(Udiv_corner, assembler) { |
| __ movz(R0, Immediate(0x8000), 3); // R0 <- 0x8000000000000000 |
| __ movn(R1, Immediate(0), 0); // R1 <- 0xffffffffffffffff |
| __ udiv(R2, R0, R1); |
| __ mov(R0, R2); |
| __ ret(); |
|