1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613
|
/*
* Copyright (C) 2008-2024 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#pragma once
#include "CacheableIdentifier.h"
#include "CodeBlock.h"
#include "CodeOrigin.h"
#include "InlineCacheCompiler.h"
#include "Instruction.h"
#include "JITStubRoutine.h"
#include "MacroAssembler.h"
#include "Options.h"
#include "RegisterSet.h"
#include "Structure.h"
#include "StructureSet.h"
#include "StructureStubClearingWatchpoint.h"
#include "StubInfoSummary.h"
#include <wtf/Box.h>
#include <wtf/Lock.h>
#include <wtf/TZoneMalloc.h>
namespace JSC {
#if ENABLE(JIT)
namespace DFG {
struct UnlinkedStructureStubInfo;
}
class AccessCase;
class AccessGenerationResult;
class PolymorphicAccess;
#define JSC_FOR_EACH_STRUCTURE_STUB_INFO_ACCESS_TYPE(macro) \
macro(GetById) \
macro(GetByIdWithThis) \
macro(GetByIdDirect) \
macro(TryGetById) \
macro(GetByVal) \
macro(GetByValWithThis) \
macro(PutByIdStrict) \
macro(PutByIdSloppy) \
macro(PutByIdDirectStrict) \
macro(PutByIdDirectSloppy) \
macro(PutByValStrict) \
macro(PutByValSloppy) \
macro(PutByValDirectStrict) \
macro(PutByValDirectSloppy) \
macro(DefinePrivateNameByVal) \
macro(DefinePrivateNameById) \
macro(SetPrivateNameByVal) \
macro(SetPrivateNameById) \
macro(InById) \
macro(InByVal) \
macro(HasPrivateName) \
macro(HasPrivateBrand) \
macro(InstanceOf) \
macro(DeleteByIdStrict) \
macro(DeleteByIdSloppy) \
macro(DeleteByValStrict) \
macro(DeleteByValSloppy) \
macro(GetPrivateName) \
macro(GetPrivateNameById) \
macro(CheckPrivateBrand) \
macro(SetPrivateBrand) \
enum class AccessType : int8_t {
#define JSC_DEFINE_ACCESS_TYPE(name) name,
JSC_FOR_EACH_STRUCTURE_STUB_INFO_ACCESS_TYPE(JSC_DEFINE_ACCESS_TYPE)
#undef JSC_DEFINE_ACCESS_TYPE
};
#define JSC_INCREMENT_ACCESS_TYPE(name) + 1
static constexpr unsigned numberOfAccessTypes = 0 JSC_FOR_EACH_STRUCTURE_STUB_INFO_ACCESS_TYPE(JSC_INCREMENT_ACCESS_TYPE);
#undef JSC_INCREMENT_ACCESS_TYPE
struct UnlinkedStructureStubInfo;
struct BaselineUnlinkedStructureStubInfo;
class StructureStubInfo {
WTF_MAKE_NONCOPYABLE(StructureStubInfo);
WTF_MAKE_TZONE_ALLOCATED(StructureStubInfo);
public:
StructureStubInfo(AccessType accessType, CodeOrigin codeOrigin)
: codeOrigin(codeOrigin)
, accessType(accessType)
, bufferingCountdown(Options::initialRepatchBufferingCountdown())
{
}
StructureStubInfo()
: StructureStubInfo(AccessType::GetById, { })
{ }
~StructureStubInfo();
void initGetByIdSelf(const ConcurrentJSLockerBase&, CodeBlock*, Structure* inlineAccessBaseStructure, PropertyOffset);
void initArrayLength(const ConcurrentJSLockerBase&);
void initStringLength(const ConcurrentJSLockerBase&);
void initPutByIdReplace(const ConcurrentJSLockerBase&, CodeBlock*, Structure* inlineAccessBaseStructure, PropertyOffset);
void initInByIdSelf(const ConcurrentJSLockerBase&, CodeBlock*, Structure* inlineAccessBaseStructure, PropertyOffset);
AccessGenerationResult addAccessCase(const GCSafeConcurrentJSLocker&, JSGlobalObject*, CodeBlock*, ECMAMode, CacheableIdentifier, RefPtr<AccessCase>);
void reset(const ConcurrentJSLockerBase&, CodeBlock*);
void deref();
void aboutToDie();
void initializeFromUnlinkedStructureStubInfo(VM&, CodeBlock*, const BaselineUnlinkedStructureStubInfo&);
void initializeFromDFGUnlinkedStructureStubInfo(CodeBlock*, const DFG::UnlinkedStructureStubInfo&);
void initializePredefinedRegisters();
DECLARE_VISIT_AGGREGATE;
// Check if the stub has weak references that are dead. If it does, then it resets itself,
// either entirely or just enough to ensure that those dead pointers don't get used anymore.
void visitWeakReferences(const ConcurrentJSLockerBase&, CodeBlock*);
// This returns true if it has marked everything that it will ever mark.
template<typename Visitor> void propagateTransitions(Visitor&);
StubInfoSummary summary(const ConcurrentJSLocker&, VM&) const;
static StubInfoSummary summary(const ConcurrentJSLocker&, VM&, const StructureStubInfo*);
CacheableIdentifier identifier() const { return m_identifier; }
bool containsPC(void* pc) const;
uint32_t inlineCodeSize() const
{
if (useDataIC)
return 0;
int32_t inlineSize = MacroAssembler::differenceBetweenCodePtr(startLocation, doneLocation);
ASSERT(inlineSize >= 0);
return inlineSize;
}
JSValueRegs valueRegs() const
{
return JSValueRegs(
#if USE(JSVALUE32_64)
m_valueTagGPR,
#endif
m_valueGPR);
}
JSValueRegs propertyRegs() const
{
return JSValueRegs(
#if USE(JSVALUE32_64)
propertyTagGPR(),
#endif
propertyGPR());
}
JSValueRegs baseRegs() const
{
return JSValueRegs(
#if USE(JSVALUE32_64)
m_baseTagGPR,
#endif
m_baseGPR);
}
bool thisValueIsInExtraGPR() const { return accessType == AccessType::GetByIdWithThis || accessType == AccessType::GetByValWithThis; }
#if ASSERT_ENABLED
void checkConsistency();
#else
ALWAYS_INLINE void checkConsistency() { }
#endif
CacheType cacheType() const { return m_cacheType; }
// Not ByVal and ById case: e.g. instanceof, by-index etc.
ALWAYS_INLINE bool considerRepatchingCacheGeneric(VM& vm, CodeBlock* codeBlock, Structure* structure)
{
// We never cache non-cells.
if (!structure) {
sawNonCell = true;
return false;
}
return considerRepatchingCacheImpl(vm, codeBlock, structure, CacheableIdentifier());
}
ALWAYS_INLINE bool considerRepatchingCacheBy(VM& vm, CodeBlock* codeBlock, Structure* structure, CacheableIdentifier impl)
{
// We never cache non-cells.
if (!structure) {
sawNonCell = true;
return false;
}
return considerRepatchingCacheImpl(vm, codeBlock, structure, impl);
}
ALWAYS_INLINE bool considerRepatchingCacheMegamorphic(VM& vm)
{
return considerRepatchingCacheImpl(vm, nullptr, nullptr, CacheableIdentifier());
}
Structure* inlineAccessBaseStructure() const
{
return m_inlineAccessBaseStructureID.get();
}
CallLinkInfo* callLinkInfoAt(const ConcurrentJSLocker&, unsigned index, const AccessCase&);
bool useHandlerIC() const { return useDataIC && Options::useHandlerIC(); }
Vector<AccessCase*, 16> listedAccessCases(const AbstractLocker&) const;
private:
AccessGenerationResult upgradeForPolyProtoIfNecessary(const GCSafeConcurrentJSLocker&, VM&, CodeBlock*, const Vector<AccessCase*, 16>&, AccessCase&);
ALWAYS_INLINE bool considerRepatchingCacheImpl(VM& vm, CodeBlock* codeBlock, Structure* structure, CacheableIdentifier impl)
{
AssertNoGC assertNoGC;
// This method is called from the Optimize variants of IC slow paths. The first part of this
// method tries to determine if the Optimize variant should really behave like the
// non-Optimize variant and leave the IC untouched.
//
// If we determine that we should do something to the IC then the next order of business is
// to determine if this Structure would impact the IC at all. We know that it won't, if we
// have already buffered something on its behalf. That's what the m_bufferedStructures set is
// for.
everConsidered = true;
if (!countdown) {
// Check if we have been doing repatching too frequently. If so, then we should cool off
// for a while.
WTF::incrementWithSaturation(repatchCount);
if (repatchCount > Options::repatchCountForCoolDown()) {
// We've been repatching too much, so don't do it now.
repatchCount = 0;
// The amount of time we require for cool-down depends on the number of times we've
// had to cool down in the past. The relationship is exponential. The max value we
// allow here is 2^256 - 2, since the slow paths may increment the count to indicate
// that they'd like to temporarily skip patching just this once.
countdown = WTF::leftShiftWithSaturation(
static_cast<uint8_t>(Options::initialCoolDownCount()),
numberOfCoolDowns,
static_cast<uint8_t>(std::numeric_limits<uint8_t>::max() - 1));
WTF::incrementWithSaturation(numberOfCoolDowns);
// We may still have had something buffered. Trigger generation now.
bufferingCountdown = 0;
return true;
}
// We don't want to return false due to buffering indefinitely.
if (!bufferingCountdown) {
// Note that when this returns true, it's possible that we will not even get an
// AccessCase because this may cause Repatch.cpp to simply do an in-place
// repatching.
return true;
}
bufferingCountdown--;
if (!structure)
return true;
// Now protect the IC buffering. We want to proceed only if this is a structure that
// we don't already have a case buffered for. Note that if this returns true but the
// bufferingCountdown is not zero then we will buffer the access case for later without
// immediately generating code for it.
//
// NOTE: This will behave oddly for InstanceOf if the user varies the prototype but not
// the base's structure. That seems unlikely for the canonical use of instanceof, where
// the prototype is fixed.
bool isNewlyAdded = false;
StructureID structureID = structure->id();
{
Locker locker { m_bufferedStructuresLock };
if (std::holds_alternative<std::monostate>(m_bufferedStructures)) {
if (m_identifier)
m_bufferedStructures = Vector<StructureID>();
else
m_bufferedStructures = Vector<std::tuple<StructureID, CacheableIdentifier>>();
}
WTF::switchOn(m_bufferedStructures,
[&](std::monostate) { },
[&](Vector<StructureID>& structures) {
for (auto bufferedStructureID : structures) {
if (bufferedStructureID == structureID)
return;
}
structures.append(structureID);
isNewlyAdded = true;
},
[&](Vector<std::tuple<StructureID, CacheableIdentifier>>& structures) {
ASSERT(!m_identifier);
for (auto& [bufferedStructureID, bufferedCacheableIdentifier] : structures) {
if (bufferedStructureID == structureID && bufferedCacheableIdentifier == impl)
return;
}
structures.append(std::tuple { structureID, impl });
isNewlyAdded = true;
});
}
if (isNewlyAdded)
vm.writeBarrier(codeBlock);
return isNewlyAdded;
}
countdown--;
return false;
}
void setCacheType(const ConcurrentJSLockerBase&, CacheType);
void clearBufferedStructures()
{
Locker locker { m_bufferedStructuresLock };
WTF::switchOn(m_bufferedStructures,
[&](std::monostate) { },
[&](Vector<StructureID>& structures) {
structures.shrink(0);
},
[&](Vector<std::tuple<StructureID, CacheableIdentifier>>& structures) {
structures.shrink(0);
});
}
void setInlinedHandler(CodeBlock*, Ref<InlineCacheHandler>&&);
void clearInlinedHandler(CodeBlock*);
void initializeWithUnitHandler(CodeBlock*, Ref<InlineCacheHandler>&&);
void prependHandler(CodeBlock*, Ref<InlineCacheHandler>&&, bool isMegamorphic);
void rewireStubAsJumpInAccess(CodeBlock*, Ref<InlineCacheHandler>&&);
public:
static constexpr ptrdiff_t offsetOfByIdSelfOffset() { return OBJECT_OFFSETOF(StructureStubInfo, byIdSelfOffset); }
static constexpr ptrdiff_t offsetOfInlineAccessBaseStructureID() { return OBJECT_OFFSETOF(StructureStubInfo, m_inlineAccessBaseStructureID); }
static constexpr ptrdiff_t offsetOfInlineHolder() { return OBJECT_OFFSETOF(StructureStubInfo, m_inlineHolder); }
static constexpr ptrdiff_t offsetOfDoneLocation() { return OBJECT_OFFSETOF(StructureStubInfo, doneLocation); }
static constexpr ptrdiff_t offsetOfSlowPathStartLocation() { return OBJECT_OFFSETOF(StructureStubInfo, slowPathStartLocation); }
static constexpr ptrdiff_t offsetOfSlowOperation() { return OBJECT_OFFSETOF(StructureStubInfo, m_slowOperation); }
static constexpr ptrdiff_t offsetOfCountdown() { return OBJECT_OFFSETOF(StructureStubInfo, countdown); }
static constexpr ptrdiff_t offsetOfCallSiteIndex() { return OBJECT_OFFSETOF(StructureStubInfo, callSiteIndex); }
static constexpr ptrdiff_t offsetOfHandler() { return OBJECT_OFFSETOF(StructureStubInfo, m_handler); }
static constexpr ptrdiff_t offsetOfGlobalObject() { return OBJECT_OFFSETOF(StructureStubInfo, m_globalObject); }
JSGlobalObject* globalObject() const { return m_globalObject; }
void resetStubAsJumpInAccess(CodeBlock*);
GPRReg thisGPR() const { return m_extraGPR; }
GPRReg prototypeGPR() const { return m_extraGPR; }
GPRReg brandGPR() const { return m_extraGPR; }
GPRReg propertyGPR() const
{
switch (accessType) {
case AccessType::GetByValWithThis:
return m_extra2GPR;
default:
return m_extraGPR;
}
}
#if USE(JSVALUE32_64)
GPRReg thisTagGPR() const { return m_extraTagGPR; }
GPRReg prototypeTagGPR() const { return m_extraTagGPR; }
GPRReg propertyTagGPR() const
{
switch (accessType) {
case AccessType::GetByValWithThis:
return m_extra2TagGPR;
default:
return m_extraTagGPR;
}
}
#endif
CodeOrigin codeOrigin { };
PropertyOffset byIdSelfOffset;
WriteBarrierStructureID m_inlineAccessBaseStructureID;
JSCell* m_inlineHolder { nullptr };
CacheableIdentifier m_identifier;
// This is either the start of the inline IC for *byId caches. or the location of patchable jump for 'instanceof' caches.
// If useDataIC is true, then it is nullptr.
CodeLocationLabel<JITStubRoutinePtrTag> startLocation;
CodeLocationLabel<JSInternalPtrTag> doneLocation;
CodeLocationLabel<JITStubRoutinePtrTag> slowPathStartLocation;
union {
CodeLocationCall<JSInternalPtrTag> m_slowPathCallLocation;
CodePtr<OperationPtrTag> m_slowOperation;
};
JSGlobalObject* m_globalObject { nullptr };
private:
std::unique_ptr<PolymorphicAccess> m_stub;
RefPtr<InlineCacheHandler> m_inlinedHandler;
RefPtr<InlineCacheHandler> m_handler;
// Represents those structures that already have buffered AccessCases in the PolymorphicAccess.
// Note that it's always safe to clear this. If we clear it prematurely, then if we see the same
// structure again during this buffering countdown, we will create an AccessCase object for it.
// That's not so bad - we'll get rid of the redundant ones once we regenerate.
std::variant<std::monostate, Vector<StructureID>, Vector<std::tuple<StructureID, CacheableIdentifier>>> m_bufferedStructures WTF_GUARDED_BY_LOCK(m_bufferedStructuresLock);
public:
ScalarRegisterSet usedRegisters;
CallSiteIndex callSiteIndex;
GPRReg m_baseGPR { InvalidGPRReg };
GPRReg m_valueGPR { InvalidGPRReg };
GPRReg m_extraGPR { InvalidGPRReg };
GPRReg m_extra2GPR { InvalidGPRReg };
GPRReg m_stubInfoGPR { InvalidGPRReg };
GPRReg m_arrayProfileGPR { InvalidGPRReg };
#if USE(JSVALUE32_64)
GPRReg m_valueTagGPR { InvalidGPRReg };
// FIXME: [32-bits] Check if StructureStubInfo::m_baseTagGPR is used somewhere.
// https://bugs.webkit.org/show_bug.cgi?id=204726
GPRReg m_baseTagGPR { InvalidGPRReg };
GPRReg m_extraTagGPR { InvalidGPRReg };
GPRReg m_extra2TagGPR { InvalidGPRReg };
#endif
AccessType accessType { AccessType::GetById };
private:
CacheType m_cacheType { CacheType::Unset };
public:
CacheType preconfiguredCacheType { CacheType::Unset };
// We repatch only when this is zero. If not zero, we decrement.
// Setting 1 for a totally clear stub, we'll patch it after the first execution.
uint8_t countdown { 1 };
uint8_t repatchCount { 0 };
uint8_t numberOfCoolDowns { 0 };
uint8_t bufferingCountdown;
private:
Lock m_bufferedStructuresLock;
public:
bool resetByGC : 1 { false };
bool tookSlowPath : 1 { false };
bool everConsidered : 1 { false };
bool prototypeIsKnownObject : 1 { false }; // Only relevant for InstanceOf.
bool sawNonCell : 1 { false };
bool propertyIsString : 1 { false };
bool propertyIsInt32 : 1 { false };
bool propertyIsSymbol : 1 { false };
bool canBeMegamorphic : 1 { false };
bool useDataIC : 1 { false };
};
inline CodeOrigin getStructureStubInfoCodeOrigin(StructureStubInfo& structureStubInfo)
{
return structureStubInfo.codeOrigin;
}
inline auto appropriateGetByIdOptimizeFunction(AccessType type) -> decltype(&operationGetByIdOptimize)
{
switch (type) {
case AccessType::GetById:
return operationGetByIdOptimize;
case AccessType::TryGetById:
return operationTryGetByIdOptimize;
case AccessType::GetByIdDirect:
return operationGetByIdDirectOptimize;
case AccessType::GetPrivateNameById:
return operationGetPrivateNameByIdOptimize;
case AccessType::GetByIdWithThis:
default:
ASSERT_NOT_REACHED();
return nullptr;
}
}
inline auto appropriateGetByIdGenericFunction(AccessType type) -> decltype(&operationGetByIdGeneric)
{
switch (type) {
case AccessType::GetById:
return operationGetByIdGeneric;
case AccessType::TryGetById:
return operationTryGetByIdGeneric;
case AccessType::GetByIdDirect:
return operationGetByIdDirectGeneric;
case AccessType::GetPrivateNameById:
return operationGetPrivateNameByIdGeneric;
case AccessType::GetByIdWithThis:
default:
ASSERT_NOT_REACHED();
return nullptr;
}
}
inline auto appropriatePutByIdOptimizeFunction(AccessType type) -> decltype(&operationPutByIdStrictOptimize)
{
switch (type) {
case AccessType::PutByIdStrict:
return operationPutByIdStrictOptimize;
case AccessType::PutByIdSloppy:
return operationPutByIdSloppyOptimize;
case AccessType::PutByIdDirectStrict:
return operationPutByIdDirectStrictOptimize;
case AccessType::PutByIdDirectSloppy:
return operationPutByIdDirectSloppyOptimize;
case AccessType::DefinePrivateNameById:
return operationPutByIdDefinePrivateFieldStrictOptimize;
case AccessType::SetPrivateNameById:
return operationPutByIdSetPrivateFieldStrictOptimize;
default:
break;
}
// Make win port compiler happy
RELEASE_ASSERT_NOT_REACHED();
return nullptr;
}
inline bool hasConstantIdentifier(AccessType accessType)
{
switch (accessType) {
case AccessType::DeleteByValStrict:
case AccessType::DeleteByValSloppy:
case AccessType::GetByVal:
case AccessType::GetPrivateName:
case AccessType::InstanceOf:
case AccessType::InByVal:
case AccessType::HasPrivateName:
case AccessType::HasPrivateBrand:
case AccessType::GetByValWithThis:
case AccessType::PutByValStrict:
case AccessType::PutByValSloppy:
case AccessType::PutByValDirectStrict:
case AccessType::PutByValDirectSloppy:
case AccessType::DefinePrivateNameByVal:
case AccessType::SetPrivateNameByVal:
case AccessType::SetPrivateBrand:
case AccessType::CheckPrivateBrand:
return false;
case AccessType::DeleteByIdStrict:
case AccessType::DeleteByIdSloppy:
case AccessType::InById:
case AccessType::TryGetById:
case AccessType::GetByIdDirect:
case AccessType::GetById:
case AccessType::GetPrivateNameById:
case AccessType::GetByIdWithThis:
case AccessType::PutByIdStrict:
case AccessType::PutByIdSloppy:
case AccessType::PutByIdDirectStrict:
case AccessType::PutByIdDirectSloppy:
case AccessType::DefinePrivateNameById:
case AccessType::SetPrivateNameById:
return true;
}
return false;
}
struct UnlinkedStructureStubInfo {
AccessType accessType;
CacheType preconfiguredCacheType { CacheType::Unset };
bool propertyIsInt32 : 1 { false };
bool propertyIsString : 1 { false };
bool propertyIsSymbol : 1 { false };
bool prototypeIsKnownObject : 1 { false };
bool canBeMegamorphic : 1 { false };
CacheableIdentifier m_identifier; // This only comes from already marked one. Thus, we do not mark it via GC.
CodeLocationLabel<JSInternalPtrTag> doneLocation;
CodeLocationLabel<JITStubRoutinePtrTag> slowPathStartLocation;
};
struct BaselineUnlinkedStructureStubInfo : JSC::UnlinkedStructureStubInfo {
BytecodeIndex bytecodeIndex;
};
using StubInfoMap = UncheckedKeyHashMap<CodeOrigin, StructureStubInfo*, CodeOriginApproximateHash>;
#endif
} // namespace JSC
namespace WTF {
#if ENABLE(JIT)
template<typename T> struct DefaultHash;
template<> struct DefaultHash<JSC::AccessType> : public IntHash<JSC::AccessType> { };
template<typename T> struct HashTraits;
template<> struct HashTraits<JSC::AccessType> : public StrongEnumHashTraits<JSC::AccessType> { };
#endif
} // namespace WTF
|