File: vec-in-place.rs

package info (click to toggle)
rustc 1.85.0%2Bdfsg3-1
  • links: PTS, VCS
  • area: main
  • in suites: experimental, sid, trixie
  • size: 893,396 kB
  • sloc: xml: 158,127; python: 35,830; javascript: 19,497; cpp: 19,002; sh: 17,245; ansic: 13,127; asm: 4,376; makefile: 1,051; perl: 29; lisp: 29; ruby: 19; sql: 11
file content (160 lines) | stat: -rw-r--r-- 4,961 bytes parent folder | download | duplicates (2)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
//@ ignore-std-debug-assertions (FIXME: checks for call detect scoped noalias metadata)
//@ compile-flags: -O -Z merge-functions=disabled
#![crate_type = "lib"]

// Ensure that trivial casts of vec elements are O(1)

pub struct Wrapper<T>(T);

// previously repr(C) caused the optimization to fail
#[repr(C)]
pub struct Foo {
    a: u64,
    b: u64,
    c: u64,
    d: u64,
}

// implementing Copy exercises the TrustedRandomAccess specialization inside the in-place
// specialization
#[derive(Copy, Clone)]
pub struct Bar {
    a: u64,
    b: u64,
    c: u64,
    d: u64,
}

// this exercises the try-fold codepath
pub struct Baz {
    a: u64,
    b: u64,
    c: u64,
    d: u64,
}

// CHECK-LABEL: @vec_iterator_cast_primitive
#[no_mangle]
pub fn vec_iterator_cast_primitive(vec: Vec<i8>) -> Vec<u8> {
    // CHECK-NOT: loop
    // CHECK: call
    // CHECK-SAME: void @llvm.assume(i1 %{{.+}})
    // CHECK-NOT: loop
    // CHECK-NOT: call
    vec.into_iter().map(|e| e as u8).collect()
}

// CHECK-LABEL: @vec_iterator_cast_wrapper
#[no_mangle]
pub fn vec_iterator_cast_wrapper(vec: Vec<u8>) -> Vec<Wrapper<u8>> {
    // CHECK-NOT: loop
    // CHECK: call
    // CHECK-SAME: void @llvm.assume(i1 %{{.+}})
    // CHECK-NOT: loop
    // CHECK-NOT: call
    vec.into_iter().map(|e| Wrapper(e)).collect()
}

// CHECK-LABEL: @vec_iterator_cast_signed
#[no_mangle]
pub fn vec_iterator_cast_signed(vec: Vec<i32>) -> Vec<u32> {
    // CHECK-NOT: and i{{[0-9]+}} %{{.*}}, {{[0-9]+}}
    vec.into_iter().map(|e| u32::from_ne_bytes(e.to_ne_bytes())).collect()
}

// CHECK-LABEL: @vec_iterator_cast_signed_nested
#[no_mangle]
pub fn vec_iterator_cast_signed_nested(vec: Vec<Vec<i32>>) -> Vec<Vec<u32>> {
    // CHECK-NOT: br i1 %{{.*}}, label %{{.*}}, label %{{.*}}
    // CHECK-NOT: %{{.*}} = udiv
    vec.into_iter()
        .map(|e| e.into_iter().map(|e| u32::from_ne_bytes(e.to_ne_bytes())).collect())
        .collect()
}

// CHECK-LABEL: @vec_iterator_cast_unwrap
#[no_mangle]
pub fn vec_iterator_cast_unwrap(vec: Vec<Wrapper<u8>>) -> Vec<u8> {
    // CHECK-NOT: loop
    // CHECK: call
    // CHECK-SAME: void @llvm.assume(i1 %{{.+}})
    // CHECK-NOT: loop
    // CHECK-NOT: call
    vec.into_iter().map(|e| e.0).collect()
}

// CHECK-LABEL: @vec_iterator_cast_aggregate
#[no_mangle]
pub fn vec_iterator_cast_aggregate(vec: Vec<[u64; 4]>) -> Vec<Foo> {
    // CHECK-NOT: loop
    // CHECK: call
    // CHECK-SAME: void @llvm.assume(i1 %{{.+}})
    // CHECK-NOT: loop
    // CHECK-NOT: call
    vec.into_iter().map(|e| unsafe { std::mem::transmute(e) }).collect()
}

// CHECK-LABEL: @vec_iterator_cast_deaggregate_tra
#[no_mangle]
pub fn vec_iterator_cast_deaggregate_tra(vec: Vec<Bar>) -> Vec<[u64; 4]> {
    // CHECK-NOT: loop
    // CHECK: call
    // CHECK-SAME: void @llvm.assume(i1 %{{.+}})
    // CHECK-NOT: loop
    // CHECK-NOT: call

    // Safety: For the purpose of this test we assume that Bar layout matches [u64; 4].
    // This currently is not guaranteed for repr(Rust) types, but it happens to work here and
    // the UCG may add additional guarantees for homogenous types in the future that would make this
    // correct.
    vec.into_iter().map(|e| unsafe { std::mem::transmute(e) }).collect()
}

// CHECK-LABEL: @vec_iterator_cast_deaggregate_fold
#[no_mangle]
pub fn vec_iterator_cast_deaggregate_fold(vec: Vec<Baz>) -> Vec<[u64; 4]> {
    // CHECK-NOT: loop
    // CHECK: call
    // CHECK-SAME: void @llvm.assume(i1 %{{.+}})
    // CHECK-NOT: loop
    // CHECK-NOT: call

    // Safety: For the purpose of this test we assume that Bar layout matches [u64; 4].
    // This currently is not guaranteed for repr(Rust) types, but it happens to work here and
    // the UCG may add additional guarantees for homogenous types in the future that would make this
    // correct.
    vec.into_iter().map(|e| unsafe { std::mem::transmute(e) }).collect()
}

// CHECK-LABEL: @vec_iterator_cast_unwrap_drop
#[no_mangle]
pub fn vec_iterator_cast_unwrap_drop(vec: Vec<Wrapper<String>>) -> Vec<String> {
    // CHECK-NOT: br i1 %{{.*}}, label %{{.*}}, label %{{.*}}
    // CHECK-NOT: %{{.*}} = mul
    // CHECK-NOT: %{{.*}} = udiv
    // CHECK: call
    // CHECK-SAME: void @llvm.assume(i1 %{{.+}})
    // CHECK-NOT: br i1 %{{.*}}, label %{{.*}}, label %{{.*}}
    // CHECK-NOT: call
    // CHECK-NOT: %{{.*}} = mul
    // CHECK-NOT: %{{.*}} = udiv

    vec.into_iter().map(|Wrapper(e)| e).collect()
}

// CHECK-LABEL: @vec_iterator_cast_wrap_drop
#[no_mangle]
pub fn vec_iterator_cast_wrap_drop(vec: Vec<String>) -> Vec<Wrapper<String>> {
    // CHECK-NOT: br i1 %{{.*}}, label %{{.*}}, label %{{.*}}
    // CHECK-NOT: %{{.*}} = mul
    // CHECK-NOT: %{{.*}} = udiv
    // CHECK: call
    // CHECK-SAME: void @llvm.assume(i1 %{{.+}})
    // CHECK-NOT: br i1 %{{.*}}, label %{{.*}}, label %{{.*}}
    // CHECK-NOT: call
    // CHECK-NOT: %{{.*}} = mul
    // CHECK-NOT: %{{.*}} = udiv
    // CHECK: ret void

    vec.into_iter().map(Wrapper).collect()
}