File: slot_reuse.cpp

package info (click to toggle)
swiftlang 6.0.3-2
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid, trixie
  • size: 2,519,992 kB
  • sloc: cpp: 9,107,863; ansic: 2,040,022; asm: 1,135,751; python: 296,500; objc: 82,456; f90: 60,502; lisp: 34,951; pascal: 19,946; sh: 18,133; perl: 7,482; ml: 4,937; javascript: 4,117; makefile: 3,840; awk: 3,535; xml: 914; fortran: 619; cs: 573; ruby: 573
file content (74 lines) | stat: -rw-r--r-- 2,351 bytes parent folder | download | duplicates (26)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
//===-- slot_reuse.cpp ------------------------------------------*- C++ -*-===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//

#include "gwp_asan/tests/harness.h"

#include <set>

void singleByteGoodAllocDealloc(gwp_asan::GuardedPoolAllocator *GPA) {
  void *Ptr = GPA->allocate(1);
  EXPECT_NE(nullptr, Ptr);
  EXPECT_TRUE(GPA->pointerIsMine(Ptr));
  EXPECT_EQ(1u, GPA->getSize(Ptr));
  GPA->deallocate(Ptr);
}

TEST_F(CustomGuardedPoolAllocator, EnsureReuseOfQuarantine1) {
  InitNumSlots(1);
  for (unsigned i = 0; i < 128; ++i)
    singleByteGoodAllocDealloc(&GPA);
}

TEST_F(CustomGuardedPoolAllocator, EnsureReuseOfQuarantine2) {
  InitNumSlots(2);
  for (unsigned i = 0; i < 128; ++i)
    singleByteGoodAllocDealloc(&GPA);
}

TEST_F(CustomGuardedPoolAllocator, EnsureReuseOfQuarantine127) {
  InitNumSlots(127);
  for (unsigned i = 0; i < 128; ++i)
    singleByteGoodAllocDealloc(&GPA);
}

// This test ensures that our slots are not reused ahead of time. We increase
// the use-after-free detection by not reusing slots until all of them have been
// allocated. This is done by always using the slots from left-to-right in the
// pool before we used each slot once, at which point random selection takes
// over.
void runNoReuseBeforeNecessary(gwp_asan::GuardedPoolAllocator *GPA,
                               unsigned PoolSize) {
  std::set<void *> Ptrs;
  for (unsigned i = 0; i < PoolSize; ++i) {
    void *Ptr = GPA->allocate(1);

    EXPECT_TRUE(GPA->pointerIsMine(Ptr));
    EXPECT_EQ(0u, Ptrs.count(Ptr));

    Ptrs.insert(Ptr);
    GPA->deallocate(Ptr);
  }
}

TEST_F(CustomGuardedPoolAllocator, NoReuseBeforeNecessary2) {
  constexpr unsigned kPoolSize = 2;
  InitNumSlots(kPoolSize);
  runNoReuseBeforeNecessary(&GPA, kPoolSize);
}

TEST_F(CustomGuardedPoolAllocator, NoReuseBeforeNecessary128) {
  constexpr unsigned kPoolSize = 128;
  InitNumSlots(kPoolSize);
  runNoReuseBeforeNecessary(&GPA, kPoolSize);
}

TEST_F(CustomGuardedPoolAllocator, NoReuseBeforeNecessary129) {
  constexpr unsigned kPoolSize = 129;
  InitNumSlots(kPoolSize);
  runNoReuseBeforeNecessary(&GPA, kPoolSize);
}