1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151
|
// Copyright 2016 The Chromium Authors
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "base/trace_event/category_registry.h"
#include <string.h>
#include <ostream>
#include <type_traits>
#include "base/check.h"
#include "base/debug/leak_annotations.h"
#include "base/notreached.h"
#include "base/third_party/dynamic_annotations/dynamic_annotations.h"
namespace base {
namespace trace_event {
namespace {
// |categories_| might end up causing creating dynamic initializers if not POD.
static_assert(std::is_pod_v<TraceCategory>, "TraceCategory must be POD");
} // namespace
// static
TraceCategory CategoryRegistry::categories_[kMaxCategories] = {
INTERNAL_TRACE_LIST_BUILTIN_CATEGORIES(INTERNAL_TRACE_INIT_CATEGORY)};
// static
std::atomic<size_t> CategoryRegistry::category_index_{
BuiltinCategories::Size()};
// static
TraceCategory* const CategoryRegistry::kCategoryExhausted = &categories_[0];
TraceCategory* const CategoryRegistry::kCategoryAlreadyShutdown =
&categories_[1];
TraceCategory* const CategoryRegistry::kCategoryMetadata = &categories_[2];
// static
void CategoryRegistry::Initialize() {
// Trace is enabled or disabled on one thread while other threads are
// accessing the enabled flag. We don't care whether edge-case events are
// traced or not, so we allow races on the enabled flag to keep the trace
// macros fast.
for (size_t i = 0; i < kMaxCategories; ++i) {
ANNOTATE_BENIGN_RACE(categories_[i].state_ptr(),
"trace_event category enabled");
// If this DCHECK is hit in a test it means that ResetForTesting() is not
// called and the categories state leaks between test fixtures.
DCHECK(!categories_[i].is_enabled());
}
}
// static
void CategoryRegistry::ResetForTesting() {
// reset_for_testing clears up only the enabled state and filters. The
// categories themselves cannot be cleared up because the static pointers
// injected by the macros still point to them and cannot be reset.
for (size_t i = 0; i < kMaxCategories; ++i)
categories_[i].reset_for_testing();
}
// static
TraceCategory* CategoryRegistry::GetCategoryByName(const char* category_name) {
DCHECK(!strchr(category_name, '"'))
<< "Category names may not contain double quote";
// The categories_ is append only, avoid using a lock for the fast path.
size_t category_index = category_index_.load(std::memory_order_acquire);
// Search for pre-existing category group.
for (size_t i = 0; i < category_index; ++i) {
if (strcmp(categories_[i].name(), category_name) == 0) {
return &categories_[i];
}
}
return nullptr;
}
bool CategoryRegistry::GetOrCreateCategoryLocked(
const char* category_name,
CategoryInitializerFn category_initializer_fn,
TraceCategory** category) {
// This is the slow path: the lock is not held in the fastpath
// (GetCategoryByName), so more than one thread could have reached here trying
// to add the same category.
*category = GetCategoryByName(category_name);
if (*category)
return false;
// Create a new category.
size_t category_index = category_index_.load(std::memory_order_acquire);
if (category_index >= kMaxCategories) {
NOTREACHED() << "must increase kMaxCategories";
*category = kCategoryExhausted;
return false;
}
// TODO(primiano): this strdup should be removed. The only documented reason
// for it was TraceWatchEvent, which is gone. However, something might have
// ended up relying on this. Needs some auditing before removal.
const char* category_name_copy = strdup(category_name);
ANNOTATE_LEAKING_OBJECT_PTR(category_name_copy);
*category = &categories_[category_index];
DCHECK(!(*category)->is_valid());
DCHECK(!(*category)->is_enabled());
(*category)->set_name(category_name_copy);
category_initializer_fn(*category);
// Update the max index now.
category_index_.store(category_index + 1, std::memory_order_release);
return true;
}
// static
const TraceCategory* CategoryRegistry::GetCategoryByStatePtr(
const uint8_t* category_state) {
const TraceCategory* category = TraceCategory::FromStatePtr(category_state);
DCHECK(IsValidCategoryPtr(category));
return category;
}
// static
bool CategoryRegistry::IsMetaCategory(const TraceCategory* category) {
DCHECK(IsValidCategoryPtr(category));
return category <= kCategoryMetadata;
}
// static
base::span<TraceCategory> CategoryRegistry::GetAllCategories() {
// The |categories_| array is append only. We have to only guarantee to
// not return an index to a category which is being initialized by
// GetOrCreateCategoryByName().
size_t category_index = category_index_.load(std::memory_order_acquire);
return base::make_span(categories_).first(category_index);
}
// static
bool CategoryRegistry::IsValidCategoryPtr(const TraceCategory* category) {
// If any of these are hit, something has cached a corrupt category pointer.
uintptr_t ptr = reinterpret_cast<uintptr_t>(category);
return ptr % sizeof(void*) == 0 &&
ptr >= reinterpret_cast<uintptr_t>(&categories_[0]) &&
ptr <= reinterpret_cast<uintptr_t>(&categories_[kMaxCategories - 1]);
}
} // namespace trace_event
} // namespace base
|