1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79
  
     | 
    
      # NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
# RUN: llc -debugify-and-strip-all-safe -mtriple aarch64 -O0 -run-pass=aarch64-prelegalizer-combiner --aarch64prelegalizercombinerhelper-only-enable-rule="load_or_combine" -global-isel -verify-machineinstrs %s -o - | FileCheck %s --check-prefix=NOT_STRICT
# RUN: llc -debugify-and-strip-all-safe -mattr=+strict-align -mtriple aarch64 -O0 -run-pass=aarch64-prelegalizer-combiner --aarch64prelegalizercombinerhelper-only-enable-rule="load_or_combine" -global-isel -verify-machineinstrs %s -o - | FileCheck %s --check-prefix=STRICT
# REQUIRES: asserts
# Check that the load-or combine respects alignment requirements.
...
---
name:            misaligned
tracksRegLiveness: true
body:             |
  bb.0:
    liveins: $x0, $x1
    ; NOT_STRICT-LABEL: name: misaligned
    ; NOT_STRICT: liveins: $x0, $x1
    ; NOT_STRICT: %ptr:_(p0) = COPY $x1
    ; NOT_STRICT: %full_load:_(s32) = G_LOAD %ptr(p0) :: (load (s32), align 2)
    ; NOT_STRICT: $w1 = COPY %full_load(s32)
    ; NOT_STRICT: RET_ReallyLR implicit $w1
    ; STRICT-LABEL: name: misaligned
    ; STRICT: liveins: $x0, $x1
    ; STRICT: %cst_1:_(s64) = G_CONSTANT i64 1
    ; STRICT: %cst_16:_(s32) = G_CONSTANT i32 16
    ; STRICT: %ptr:_(p0) = COPY $x1
    ; STRICT: %ptr_elt_1:_(p0) = G_PTR_ADD %ptr, %cst_1(s64)
    ; STRICT: %low_half:_(s32) = G_ZEXTLOAD %ptr(p0) :: (load (s16))
    ; STRICT: %elt1:_(s32) = G_ZEXTLOAD %ptr_elt_1(p0) :: (load (s16))
    ; STRICT: %high_half:_(s32) = nuw G_SHL %elt1, %cst_16(s32)
    ; STRICT: %full_load:_(s32) = G_OR %low_half, %high_half
    ; STRICT: $w1 = COPY %full_load(s32)
    ; STRICT: RET_ReallyLR implicit $w1
    %cst_1:_(s64) = G_CONSTANT i64 1
    %cst_16:_(s32) = G_CONSTANT i32 16
    %ptr:_(p0) = COPY $x1
    %ptr_elt_1:_(p0) = G_PTR_ADD %ptr, %cst_1(s64)
    %low_half:_(s32) = G_ZEXTLOAD %ptr(p0) :: (load (s16), align 2)
    %elt1:_(s32) = G_ZEXTLOAD %ptr_elt_1(p0) :: (load (s16), align 2)
    %high_half:_(s32) = nuw G_SHL %elt1, %cst_16(s32)
    %full_load:_(s32) = G_OR %low_half, %high_half
    $w1 = COPY %full_load(s32)
    RET_ReallyLR implicit $w1
...
---
name:            aligned
tracksRegLiveness: true
body:             |
  bb.0:
    liveins: $x0, $x1
    ; NOT_STRICT-LABEL: name: aligned
    ; NOT_STRICT: liveins: $x0, $x1
    ; NOT_STRICT: %ptr:_(p0) = COPY $x1
    ; NOT_STRICT: %full_load:_(s32) = G_LOAD %ptr(p0) :: (load (s32))
    ; NOT_STRICT: $w1 = COPY %full_load(s32)
    ; NOT_STRICT: RET_ReallyLR implicit $w1
    ; STRICT-LABEL: name: aligned
    ; STRICT: liveins: $x0, $x1
    ; STRICT: %ptr:_(p0) = COPY $x1
    ; STRICT: %full_load:_(s32) = G_LOAD %ptr(p0) :: (load (s32))
    ; STRICT: $w1 = COPY %full_load(s32)
    ; STRICT: RET_ReallyLR implicit $w1
    %cst_1:_(s64) = G_CONSTANT i64 1
    %cst_16:_(s32) = G_CONSTANT i32 16
    %ptr:_(p0) = COPY $x1
    %ptr_elt_1:_(p0) = G_PTR_ADD %ptr, %cst_1(s64)
    %low_half:_(s32) = G_ZEXTLOAD %ptr(p0) :: (load (s16), align 4)
    %elt1:_(s32) = G_ZEXTLOAD %ptr_elt_1(p0) :: (load (s16), align 4)
    %high_half:_(s32) = nuw G_SHL %elt1, %cst_16(s32)
    %full_load:_(s32) = G_OR %low_half, %high_half
    $w1 = COPY %full_load(s32)
    RET_ReallyLR implicit $w1
 
     |