|
| 1 | +; RUN: llc -mtriple=aarch64-windows %s --filetype obj -o /dev/null |
| 2 | +; RUN: llc -mtriple=aarch64-windows %s --filetype asm -o - | FileCheck %s |
| 3 | + |
| 4 | +; Check that it doesn't crash and that each instruction in the |
| 5 | +; prologue has a corresponding seh directive. |
| 6 | +; |
| 7 | +; CHECK-NOT: error: Incorrect size for |
| 8 | +; CHECK: foo: |
| 9 | +; CHECK: .seh_proc foo |
| 10 | +; CHECK: sub sp, sp, #288 |
| 11 | +; CHECK: .seh_stackalloc 288 |
| 12 | +; CHECK: str x19, [sp] // 8-byte Folded Spill |
| 13 | +; CHECK: .seh_save_reg x19, 0 |
| 14 | +; CHECK: str x21, [sp, #8] // 8-byte Folded Spill |
| 15 | +; CHECK: .seh_save_reg x21, 8 |
| 16 | +; CHECK: stp x23, x24, [sp, #16] // 16-byte Folded Spill |
| 17 | +; CHECK: .seh_save_regp x23, 16 |
| 18 | +; CHECK: stp x25, x26, [sp, #32] // 16-byte Folded Spill |
| 19 | +; CHECK: .seh_save_regp x25, 32 |
| 20 | +; CHECK: stp x27, x28, [sp, #48] // 16-byte Folded Spill |
| 21 | +; CHECK: .seh_save_regp x27, 48 |
| 22 | +; CHECK: stp x29, x30, [sp, #64] // 16-byte Folded Spill |
| 23 | +; CHECK: .seh_save_fplr 64 |
| 24 | +; CHECK: sub sp, sp, #224 |
| 25 | +; CHECK: .seh_stackalloc 224 |
| 26 | +; CHECK: .seh_endprologue |
| 27 | + |
| 28 | +target datalayout = "e-m:w-p:64:64-i32:32-i64:64-i128:128-n32:64-S128-Fn32" |
| 29 | +target triple = "aarch64-unknown-windows-msvc19.42.34436" |
| 30 | + |
| 31 | +%swift.refcounted = type { ptr, i64 } |
| 32 | +%TScA_pSg = type <{ [16 x i8] }> |
| 33 | +%T5repro4TestVSg = type <{ [32 x i8] }> |
| 34 | +%T5repro4TestV = type <{ %TSS, %TSS }> |
| 35 | +%TSS = type <{ %Ts11_StringGutsV }> |
| 36 | +%Ts11_StringGutsV = type <{ %Ts13_StringObjectV }> |
| 37 | +%Ts13_StringObjectV = type <{ %Ts6UInt64V, ptr }> |
| 38 | +%Ts6UInt64V = type <{ i64 }> |
| 39 | + |
| 40 | +declare swiftcc ptr @swift_task_alloc() |
| 41 | + |
| 42 | +declare swifttailcc void @bar(ptr, ptr, i64, i64, i64, ptr, i64, i64, i64, i64, i64, ptr, i64, ptr, i64, ptr, i64, ptr, i64, ptr, i64, ptr, i64, ptr, i64, ptr, i64, ptr, i64, ptr, i64, ptr, i64, ptr, i64, ptr) |
| 43 | + |
| 44 | +define swifttailcc void @foo(ptr %0, ptr swiftasync %1, ptr swiftself %2, ptr %3, ptr %._guts2._object._object, ptr %.rid4._guts._object._object, ptr %4, ptr %.idx8, ptr %.idx8._guts._object._object, ptr %5, ptr %.rid9._guts._object._object, ptr %6) { |
| 45 | +entry: |
| 46 | + %7 = load i64, ptr null, align 8 |
| 47 | + %8 = load i64, ptr %3, align 8 |
| 48 | + %9 = getelementptr <{ %swift.refcounted, %TScA_pSg, %TSS, %T5repro4TestVSg, %T5repro4TestV, %TSS, %TSS, %TSS, %T5repro4TestV, %TSS, %T5repro4TestV, %T5repro4TestV, %TSS }>, ptr %2, i32 0, i32 2 |
| 49 | + %10 = load i64, ptr %9, align 8 |
| 50 | + %11 = load ptr, ptr %1, align 8 |
| 51 | + %12 = getelementptr <{ %swift.refcounted, %TScA_pSg, %TSS, %T5repro4TestVSg, %T5repro4TestV, %TSS, %TSS, %TSS, %T5repro4TestV, %TSS, %T5repro4TestV, %T5repro4TestV, %TSS }>, ptr %2, i32 0, i32 3 |
| 52 | + %13 = load i64, ptr %.rid9._guts._object._object, align 8 |
| 53 | + %14 = load i64, ptr %.idx8._guts._object._object, align 8 |
| 54 | + %15 = load i64, ptr %5, align 8 |
| 55 | + %16 = getelementptr { i64, i64, i64, i64 }, ptr %12, i32 0, i32 3 |
| 56 | + %17 = load i64, ptr %16, align 8 |
| 57 | + %18 = getelementptr <{ %swift.refcounted, %TScA_pSg, %TSS, %T5repro4TestVSg, %T5repro4TestV, %TSS, %TSS, %TSS, %T5repro4TestV, %TSS, %T5repro4TestV, %T5repro4TestV, %TSS }>, ptr %2, i32 0, i32 4 |
| 58 | + %19 = load i64, ptr %18, align 8 |
| 59 | + %.rid._guts._object._object = getelementptr %Ts13_StringObjectV, ptr %18, i32 0, i32 1 |
| 60 | + %20 = load ptr, ptr %.rid._guts._object._object, align 8 |
| 61 | + %21 = load i64, ptr %.rid4._guts._object._object, align 8 |
| 62 | + %22 = load i64, ptr %0, align 8 |
| 63 | + %23 = load ptr, ptr %6, align 8 |
| 64 | + %24 = load i64, ptr %2, align 8 |
| 65 | + %25 = load ptr, ptr %._guts2._object._object, align 8 |
| 66 | + %26 = getelementptr <{ %swift.refcounted, %TScA_pSg, %TSS, %T5repro4TestVSg, %T5repro4TestV, %TSS, %TSS, %TSS, %T5repro4TestV, %TSS, %T5repro4TestV, %T5repro4TestV, %TSS }>, ptr %2, i32 0, i32 7 |
| 67 | + %27 = load i64, ptr %26, align 8 |
| 68 | + %._guts3._object._object = getelementptr %Ts13_StringObjectV, ptr %26, i32 0, i32 1 |
| 69 | + %28 = load ptr, ptr %._guts3._object._object, align 8 |
| 70 | + %29 = getelementptr <{ %swift.refcounted, %TScA_pSg, %TSS, %T5repro4TestVSg, %T5repro4TestV, %TSS, %TSS, %TSS, %T5repro4TestV, %TSS, %T5repro4TestV, %T5repro4TestV, %TSS }>, ptr %2, i32 0, i32 8 |
| 71 | + %30 = load i64, ptr %29, align 8 |
| 72 | + %.idx5 = getelementptr %T5repro4TestV, ptr %29, i32 0, i32 1 |
| 73 | + %31 = load i64, ptr %.idx5, align 8 |
| 74 | + %.idx5._guts._object._object = getelementptr %Ts13_StringObjectV, ptr %.idx5, i32 0, i32 1 |
| 75 | + %32 = load ptr, ptr %.idx5._guts._object._object, align 8 |
| 76 | + %33 = getelementptr <{ %swift.refcounted, %TScA_pSg, %TSS, %T5repro4TestVSg, %T5repro4TestV, %TSS, %TSS, %TSS, %T5repro4TestV, %TSS, %T5repro4TestV, %T5repro4TestV, %TSS }>, ptr %2, i32 0, i32 9 |
| 77 | + %34 = load i64, ptr %33, align 8 |
| 78 | + %35 = load i64, ptr %4, align 8 |
| 79 | + %36 = load i64, ptr %.idx8, align 8 |
| 80 | + %37 = load i64, ptr %1, align 8 |
| 81 | + %38 = call swiftcc ptr @swift_task_alloc() |
| 82 | + store ptr null, ptr %3, align 8 |
| 83 | + store ptr null, ptr %4, align 8 |
| 84 | + musttail call swifttailcc void @bar(ptr null, ptr swiftasync %.rid4._guts._object._object, i64 %7, i64 %8, i64 %10, ptr %5, i64 %13, i64 %14, i64 %15, i64 %17, i64 %19, ptr %20, i64 %21, ptr %.idx8, i64 %22, ptr %23, i64 %24, ptr %25, i64 %27, ptr %28, i64 %30, ptr %.idx8._guts._object._object, i64 %31, ptr %32, i64 %34, ptr %._guts2._object._object, i64 %35, ptr %2, i64 %36, ptr %1, i64 %37, ptr %0, i64 0, ptr null, i64 0, ptr null) |
| 85 | + ret void |
| 86 | +} |
0 commit comments