diff --git a/deps/include/OWNERS b/deps/include/OWNERS index 535040c5..9a2d99e8 100644 --- a/deps/include/OWNERS +++ b/deps/include/OWNERS @@ -19,5 +19,4 @@ per-file v8-version.h=v8-ci-autoroll-builder@chops-service-accounts.iam.gservice # For branch updates: per-file v8-version.h=file:../INFRA_OWNERS -per-file v8-version.h=hablich@chromium.org per-file v8-version.h=vahl@chromium.org diff --git a/deps/include/cppgc/DEPS b/deps/include/cppgc/DEPS index 861d1187..2ec7ebbd 100644 --- a/deps/include/cppgc/DEPS +++ b/deps/include/cppgc/DEPS @@ -2,6 +2,7 @@ include_rules = [ "-include", "+v8config.h", "+v8-platform.h", + "+v8-source-location.h", "+cppgc", "-src", "+libplatform/libplatform.h", diff --git a/deps/include/cppgc/allocation.h b/deps/include/cppgc/allocation.h index 69883fb3..cdd3fd48 100644 --- a/deps/include/cppgc/allocation.h +++ b/deps/include/cppgc/allocation.h @@ -47,7 +47,7 @@ namespace internal { // Similar to C++17 std::align_val_t; enum class AlignVal : size_t {}; -class V8_EXPORT MakeGarbageCollectedTraitInternal { +class MakeGarbageCollectedTraitInternal { protected: static inline void MarkObjectAsFullyConstructed(const void* payload) { // See api_constants for an explanation of the constants. @@ -121,16 +121,15 @@ class V8_EXPORT MakeGarbageCollectedTraitInternal { }; private: - static void* CPPGC_DEFAULT_ALIGNED Allocate(cppgc::AllocationHandle&, size_t, - GCInfoIndex); - static void* CPPGC_DOUBLE_WORD_ALIGNED Allocate(cppgc::AllocationHandle&, - size_t, AlignVal, - GCInfoIndex); - static void* CPPGC_DEFAULT_ALIGNED Allocate(cppgc::AllocationHandle&, size_t, - GCInfoIndex, CustomSpaceIndex); - static void* CPPGC_DOUBLE_WORD_ALIGNED Allocate(cppgc::AllocationHandle&, - size_t, AlignVal, GCInfoIndex, - CustomSpaceIndex); + V8_EXPORT static void* CPPGC_DEFAULT_ALIGNED + Allocate(cppgc::AllocationHandle&, size_t, GCInfoIndex); + V8_EXPORT static void* CPPGC_DOUBLE_WORD_ALIGNED + Allocate(cppgc::AllocationHandle&, size_t, AlignVal, GCInfoIndex); + V8_EXPORT static void* CPPGC_DEFAULT_ALIGNED + Allocate(cppgc::AllocationHandle&, size_t, GCInfoIndex, CustomSpaceIndex); + V8_EXPORT static void* CPPGC_DOUBLE_WORD_ALIGNED + Allocate(cppgc::AllocationHandle&, size_t, AlignVal, GCInfoIndex, + CustomSpaceIndex); friend class HeapObjectHeader; }; diff --git a/deps/include/cppgc/cross-thread-persistent.h b/deps/include/cppgc/cross-thread-persistent.h index 1fa28afa..a5f8bac0 100644 --- a/deps/include/cppgc/cross-thread-persistent.h +++ b/deps/include/cppgc/cross-thread-persistent.h @@ -148,10 +148,11 @@ class BasicCrossThreadPersistent final : public CrossThreadPersistentBase, template ::value>> BasicCrossThreadPersistent( internal::BasicMember + MemberCheckingPolicy, MemberStorageType> member, const SourceLocation& loc = SourceLocation::Current()) : BasicCrossThreadPersistent(member.Get(), loc) {} @@ -230,10 +231,11 @@ class BasicCrossThreadPersistent final : public CrossThreadPersistentBase, // Assignment from member. template ::value>> BasicCrossThreadPersistent& operator=( internal::BasicMember + MemberCheckingPolicy, MemberStorageType> member) { return operator=(member.Get()); } diff --git a/deps/include/cppgc/default-platform.h b/deps/include/cppgc/default-platform.h index a27871cc..07ce55bb 100644 --- a/deps/include/cppgc/default-platform.h +++ b/deps/include/cppgc/default-platform.h @@ -37,11 +37,12 @@ class V8_EXPORT DefaultPlatform : public Platform { return v8_platform_->MonotonicallyIncreasingTime(); } - std::shared_ptr GetForegroundTaskRunner() override { + std::shared_ptr GetForegroundTaskRunner( + TaskPriority priority) override { // V8's default platform creates a new task runner when passed the // `v8::Isolate` pointer the first time. For non-default platforms this will // require getting the appropriate task runner. - return v8_platform_->GetForegroundTaskRunner(kNoIsolate); + return v8_platform_->GetForegroundTaskRunner(kNoIsolate, priority); } std::unique_ptr PostJob( diff --git a/deps/include/cppgc/garbage-collected.h b/deps/include/cppgc/garbage-collected.h index 6737c8be..dfd758a3 100644 --- a/deps/include/cppgc/garbage-collected.h +++ b/deps/include/cppgc/garbage-collected.h @@ -94,6 +94,14 @@ class GarbageCollectedMixin { public: using IsGarbageCollectedMixinTypeMarker = void; + // Must use MakeGarbageCollected. + void* operator new(size_t) = delete; + void* operator new[](size_t) = delete; + // The garbage collector is taking care of reclaiming the object. + // Not override the non-array varaint of `delete` to not conflict with the + // operator in GarbageCollected above. + void operator delete[](void*) = delete; + /** * This Trace method must be overriden by objects inheriting from * GarbageCollectedMixin. diff --git a/deps/include/cppgc/heap-consistency.h b/deps/include/cppgc/heap-consistency.h index 35c59ed1..23b5d909 100644 --- a/deps/include/cppgc/heap-consistency.h +++ b/deps/include/cppgc/heap-consistency.h @@ -62,10 +62,10 @@ class HeapConsistency final { * \returns whether a write barrier is needed and which barrier to invoke. */ template + typename CheckingPolicy, typename StorageType> static V8_INLINE WriteBarrierType GetWriteBarrierType( const internal::BasicMember& value, + CheckingPolicy, StorageType>& value, WriteBarrierParams& params) { return internal::WriteBarrier::GetWriteBarrierType( value.GetRawSlot(), value.GetRawStorage(), params); @@ -114,7 +114,7 @@ class HeapConsistency final { * has not yet been processed. * * \param params The parameters retrieved from `GetWriteBarrierType()`. - * \param object The pointer to the object. May be an interior pointer to a + * \param object The pointer to the object. May be an interior pointer to * an interface of the actual object. */ static V8_INLINE void DijkstraWriteBarrier(const WriteBarrierParams& params, diff --git a/deps/include/cppgc/heap-statistics.h b/deps/include/cppgc/heap-statistics.h index 5e389874..c357f916 100644 --- a/deps/include/cppgc/heap-statistics.h +++ b/deps/include/cppgc/heap-statistics.h @@ -102,6 +102,8 @@ struct HeapStatistics final { size_t resident_size_bytes = 0; /** Amount of memory actually used on the heap. */ size_t used_size_bytes = 0; + /** Memory retained in the page pool, not used directly by the heap. */ + size_t pooled_memory_size_bytes = 0; /** Detail level of this HeapStatistics. */ DetailLevel detail_level; diff --git a/deps/include/cppgc/internal/api-constants.h b/deps/include/cppgc/internal/api-constants.h index 023426e9..8a0bb082 100644 --- a/deps/include/cppgc/internal/api-constants.h +++ b/deps/include/cppgc/internal/api-constants.h @@ -30,9 +30,14 @@ static constexpr size_t kFullyConstructedBitFieldOffsetFromPayload = // Mask for in-construction bit. static constexpr uint16_t kFullyConstructedBitMask = uint16_t{1}; -static constexpr size_t kPageSize = size_t{1} << 17; +static constexpr size_t kPageSizeBits = 17; +static constexpr size_t kPageSize = size_t{1} << kPageSizeBits; -#if defined(V8_TARGET_ARCH_ARM64) && defined(V8_OS_MACOS) +#if defined(V8_HOST_ARCH_ARM64) && defined(V8_OS_DARWIN) +constexpr size_t kGuardPageSize = 0; +#elif defined(V8_HOST_ARCH_PPC64) +constexpr size_t kGuardPageSize = 0; +#elif defined(V8_HOST_ARCH_LOONG64) || defined(V8_HOST_ARCH_MIPS64) constexpr size_t kGuardPageSize = 0; #else constexpr size_t kGuardPageSize = 4096; @@ -40,13 +45,32 @@ constexpr size_t kGuardPageSize = 4096; static constexpr size_t kLargeObjectSizeThreshold = kPageSize / 2; +#if defined(CPPGC_POINTER_COMPRESSION) +#if defined(CPPGC_ENABLE_LARGER_CAGE) +constexpr unsigned kPointerCompressionShift = 3; +#else // !defined(CPPGC_ENABLE_LARGER_CAGE) +constexpr unsigned kPointerCompressionShift = 1; +#endif // !defined(CPPGC_ENABLE_LARGER_CAGE) +#endif // !defined(CPPGC_POINTER_COMPRESSION) + #if defined(CPPGC_CAGED_HEAP) #if defined(CPPGC_2GB_CAGE) -constexpr size_t kCagedHeapReservationSize = static_cast(2) * kGB; -#else // !defined(CPPGC_2GB_CAGE) -constexpr size_t kCagedHeapReservationSize = static_cast(4) * kGB; +constexpr size_t kCagedHeapDefaultReservationSize = + static_cast(2) * kGB; +constexpr size_t kCagedHeapMaxReservationSize = + kCagedHeapDefaultReservationSize; +#else // !defined(CPPGC_2GB_CAGE) +constexpr size_t kCagedHeapDefaultReservationSize = + static_cast(4) * kGB; +#if defined(CPPGC_POINTER_COMPRESSION) +constexpr size_t kCagedHeapMaxReservationSize = + size_t{1} << (31 + kPointerCompressionShift); +#else // !defined(CPPGC_POINTER_COMPRESSION) +constexpr size_t kCagedHeapMaxReservationSize = + kCagedHeapDefaultReservationSize; +#endif // !defined(CPPGC_POINTER_COMPRESSION) #endif // !defined(CPPGC_2GB_CAGE) -constexpr size_t kCagedHeapReservationAlignment = kCagedHeapReservationSize; +constexpr size_t kCagedHeapReservationAlignment = kCagedHeapMaxReservationSize; #endif // defined(CPPGC_CAGED_HEAP) static constexpr size_t kDefaultAlignment = sizeof(void*); @@ -57,6 +81,9 @@ static constexpr size_t kMaxSupportedAlignment = 2 * kDefaultAlignment; // Granularity of heap allocations. constexpr size_t kAllocationGranularity = sizeof(void*); +// Default cacheline size. +constexpr size_t kCachelineSize = 64; + } // namespace api_constants } // namespace internal diff --git a/deps/include/cppgc/internal/caged-heap-local-data.h b/deps/include/cppgc/internal/caged-heap-local-data.h index 7d689f87..1eb87dfb 100644 --- a/deps/include/cppgc/internal/caged-heap-local-data.h +++ b/deps/include/cppgc/internal/caged-heap-local-data.h @@ -46,7 +46,11 @@ class V8_EXPORT AgeTable final { enum class AdjacentCardsPolicy : uint8_t { kConsider, kIgnore }; static constexpr size_t kCardSizeInBytes = - api_constants::kCagedHeapReservationSize / kRequiredSize; + api_constants::kCagedHeapDefaultReservationSize / kRequiredSize; + + static constexpr size_t CalculateAgeTableSizeForHeapSize(size_t heap_size) { + return heap_size / kCardSizeInBytes; + } void SetAge(uintptr_t cage_offset, Age age) { table_[card(cage_offset)] = age; @@ -81,16 +85,18 @@ class V8_EXPORT AgeTable final { #endif // !V8_HAS_BUILTIN_CTZ static_assert((1 << kGranularityBits) == kCardSizeInBytes); const size_t entry = offset >> kGranularityBits; - CPPGC_DCHECK(table_.size() > entry); + CPPGC_DCHECK(CagedHeapBase::GetAgeTableSize() > entry); return entry; } - std::array table_; +#if defined(V8_CC_GNU) + // gcc disallows flexible arrays in otherwise empty classes. + Age table_[0]; +#else // !defined(V8_CC_GNU) + Age table_[]; +#endif // !defined(V8_CC_GNU) }; -static_assert(sizeof(AgeTable) == 1 * api_constants::kMB, - "Size of AgeTable is 1MB"); - #endif // CPPGC_YOUNG_GENERATION struct CagedHeapLocalData final { @@ -98,6 +104,10 @@ struct CagedHeapLocalData final { return *reinterpret_cast(CagedHeapBase::GetBase()); } + static constexpr size_t CalculateLocalDataSizeForHeapSize(size_t heap_size) { + return AgeTable::CalculateAgeTableSizeForHeapSize(heap_size); + } + #if defined(CPPGC_YOUNG_GENERATION) AgeTable age_table; #endif diff --git a/deps/include/cppgc/internal/caged-heap.h b/deps/include/cppgc/internal/caged-heap.h index 4db42aee..0c987a95 100644 --- a/deps/include/cppgc/internal/caged-heap.h +++ b/deps/include/cppgc/internal/caged-heap.h @@ -33,24 +33,31 @@ class V8_EXPORT CagedHeapBase { V8_INLINE static bool AreWithinCage(const void* addr1, const void* addr2) { #if defined(CPPGC_2GB_CAGE) - static constexpr size_t kHalfWordShift = sizeof(uint32_t) * CHAR_BIT - 1; + static constexpr size_t kHeapBaseShift = sizeof(uint32_t) * CHAR_BIT - 1; #else //! defined(CPPGC_2GB_CAGE) - static constexpr size_t kHalfWordShift = sizeof(uint32_t) * CHAR_BIT; +#if defined(CPPGC_POINTER_COMPRESSION) + static constexpr size_t kHeapBaseShift = + 31 + api_constants::kPointerCompressionShift; +#else // !defined(CPPGC_POINTER_COMPRESSION) + static constexpr size_t kHeapBaseShift = sizeof(uint32_t) * CHAR_BIT; +#endif // !defined(CPPGC_POINTER_COMPRESSION) #endif //! defined(CPPGC_2GB_CAGE) - static_assert((static_cast(1) << kHalfWordShift) == - api_constants::kCagedHeapReservationSize); + static_assert((static_cast(1) << kHeapBaseShift) == + api_constants::kCagedHeapMaxReservationSize); CPPGC_DCHECK(g_heap_base_); return !(((reinterpret_cast(addr1) ^ g_heap_base_) | (reinterpret_cast(addr2) ^ g_heap_base_)) >> - kHalfWordShift); + kHeapBaseShift); } V8_INLINE static uintptr_t GetBase() { return g_heap_base_; } + V8_INLINE static size_t GetAgeTableSize() { return g_age_table_size_; } private: friend class CagedHeap; static uintptr_t g_heap_base_; + static size_t g_age_table_size_; }; } // namespace internal diff --git a/deps/include/cppgc/internal/compiler-specific.h b/deps/include/cppgc/internal/compiler-specific.h index 595b6398..175156ca 100644 --- a/deps/include/cppgc/internal/compiler-specific.h +++ b/deps/include/cppgc/internal/compiler-specific.h @@ -5,6 +5,8 @@ #ifndef INCLUDE_CPPGC_INTERNAL_COMPILER_SPECIFIC_H_ #define INCLUDE_CPPGC_INTERNAL_COMPILER_SPECIFIC_H_ +#include "v8config.h" // NOLINT(build/include_directory) + namespace cppgc { #if defined(__has_attribute) @@ -21,7 +23,13 @@ namespace cppgc { // [[no_unique_address]] comes in C++20 but supported in clang with -std >= // c++11. -#if CPPGC_HAS_CPP_ATTRIBUTE(no_unique_address) +#if defined(V8_CC_MSVC) && CPPGC_HAS_CPP_ATTRIBUTE(msvc::no_unique_address) +// Unfortunately MSVC ignores [[no_unique_address]] (see +// https://devblogs.microsoft.com/cppblog/msvc-cpp20-and-the-std-cpp20-switch/#msvc-extensions-and-abi), +// and clang-cl matches it for ABI compatibility reasons. We need to prefer +// [[msvc::no_unique_address]] when available if we actually want any effect. +#define CPPGC_NO_UNIQUE_ADDRESS [[msvc::no_unique_address]] +#elif CPPGC_HAS_CPP_ATTRIBUTE(no_unique_address) #define CPPGC_NO_UNIQUE_ADDRESS [[no_unique_address]] #else #define CPPGC_NO_UNIQUE_ADDRESS diff --git a/deps/include/cppgc/internal/gc-info.h b/deps/include/cppgc/internal/gc-info.h index 08ffd411..d52710b3 100644 --- a/deps/include/cppgc/internal/gc-info.h +++ b/deps/include/cppgc/internal/gc-info.h @@ -24,89 +24,89 @@ struct V8_EXPORT EnsureGCInfoIndexTrait final { // Acquires a new GC info object and updates `registered_index` with the index // that identifies that new info accordingly. template - V8_INLINE static void EnsureIndex( + V8_INLINE static GCInfoIndex EnsureIndex( std::atomic& registered_index) { - EnsureGCInfoIndexTraitDispatch{}(registered_index); + return EnsureGCInfoIndexTraitDispatch{}(registered_index); } private: - template ::value, - bool = FinalizerTrait::HasFinalizer(), + template ::HasFinalizer(), bool = NameTrait::HasNonHiddenName()> struct EnsureGCInfoIndexTraitDispatch; - static void V8_PRESERVE_MOST - EnsureGCInfoIndexPolymorphic(std::atomic&, TraceCallback, - FinalizationCallback, NameCallback); - static void V8_PRESERVE_MOST EnsureGCInfoIndexPolymorphic( + static GCInfoIndex V8_PRESERVE_MOST + EnsureGCInfoIndex(std::atomic&, TraceCallback, + FinalizationCallback, NameCallback); + static GCInfoIndex V8_PRESERVE_MOST EnsureGCInfoIndex( std::atomic&, TraceCallback, FinalizationCallback); - static void V8_PRESERVE_MOST EnsureGCInfoIndexPolymorphic( - std::atomic&, TraceCallback, NameCallback); - static void V8_PRESERVE_MOST - EnsureGCInfoIndexPolymorphic(std::atomic&, TraceCallback); - static void V8_PRESERVE_MOST - EnsureGCInfoIndexNonPolymorphic(std::atomic&, TraceCallback, - FinalizationCallback, NameCallback); - static void V8_PRESERVE_MOST EnsureGCInfoIndexNonPolymorphic( - std::atomic&, TraceCallback, FinalizationCallback); - static void V8_PRESERVE_MOST EnsureGCInfoIndexNonPolymorphic( - std::atomic&, TraceCallback, NameCallback); - static void V8_PRESERVE_MOST - EnsureGCInfoIndexNonPolymorphic(std::atomic&, TraceCallback); + static GCInfoIndex V8_PRESERVE_MOST + EnsureGCInfoIndex(std::atomic&, TraceCallback, NameCallback); + static GCInfoIndex V8_PRESERVE_MOST + EnsureGCInfoIndex(std::atomic&, TraceCallback); }; -#define DISPATCH(is_polymorphic, has_finalizer, has_non_hidden_name, function) \ - template \ - struct EnsureGCInfoIndexTrait::EnsureGCInfoIndexTraitDispatch< \ - T, is_polymorphic, has_finalizer, has_non_hidden_name> { \ - V8_INLINE void operator()(std::atomic& registered_index) { \ - function; \ - } \ +#define DISPATCH(has_finalizer, has_non_hidden_name, function) \ + template \ + struct EnsureGCInfoIndexTrait::EnsureGCInfoIndexTraitDispatch< \ + T, has_finalizer, has_non_hidden_name> { \ + V8_INLINE GCInfoIndex \ + operator()(std::atomic& registered_index) { \ + return function; \ + } \ }; -// --------------------------------------------------------------------- // -// DISPATCH(is_polymorphic, has_finalizer, has_non_hidden_name, function) -// --------------------------------------------------------------------- // -DISPATCH(true, true, true, // - EnsureGCInfoIndexPolymorphic(registered_index, // - TraceTrait::Trace, // - FinalizerTrait::kCallback, // - NameTrait::GetName)) // -DISPATCH(true, true, false, // - EnsureGCInfoIndexPolymorphic(registered_index, // - TraceTrait::Trace, // - FinalizerTrait::kCallback)) // -DISPATCH(true, false, true, // - EnsureGCInfoIndexPolymorphic(registered_index, // - TraceTrait::Trace, // - NameTrait::GetName)) // -DISPATCH(true, false, false, // - EnsureGCInfoIndexPolymorphic(registered_index, // - TraceTrait::Trace)) // -DISPATCH(false, true, true, // - EnsureGCInfoIndexNonPolymorphic(registered_index, // - TraceTrait::Trace, // - FinalizerTrait::kCallback, // - NameTrait::GetName)) // -DISPATCH(false, true, false, // - EnsureGCInfoIndexNonPolymorphic(registered_index, // - TraceTrait::Trace, // - FinalizerTrait::kCallback)) // -DISPATCH(false, false, true, // - EnsureGCInfoIndexNonPolymorphic(registered_index, // - TraceTrait::Trace, // - NameTrait::GetName)) // -DISPATCH(false, false, false, // - EnsureGCInfoIndexNonPolymorphic(registered_index, // - TraceTrait::Trace)) // +// ------------------------------------------------------- // +// DISPATCH(has_finalizer, has_non_hidden_name, function) // +// ------------------------------------------------------- // +DISPATCH(true, true, // + EnsureGCInfoIndex(registered_index, // + TraceTrait::Trace, // + FinalizerTrait::kCallback, // + NameTrait::GetName)) // +DISPATCH(true, false, // + EnsureGCInfoIndex(registered_index, // + TraceTrait::Trace, // + FinalizerTrait::kCallback)) // +DISPATCH(false, true, // + EnsureGCInfoIndex(registered_index, // + TraceTrait::Trace, // + NameTrait::GetName)) // +DISPATCH(false, false, // + EnsureGCInfoIndex(registered_index, // + TraceTrait::Trace)) // #undef DISPATCH +// Trait determines how the garbage collector treats objects wrt. to traversing, +// finalization, and naming. +template +struct GCInfoTrait final { + V8_INLINE static GCInfoIndex Index() { + static_assert(sizeof(T), "T must be fully defined"); + static std::atomic + registered_index; // Uses zero initialization. + GCInfoIndex index = registered_index.load(std::memory_order_acquire); + if (V8_UNLIKELY(!index)) { + index = EnsureGCInfoIndexTrait::EnsureIndex(registered_index); + CPPGC_DCHECK(index != 0); + CPPGC_DCHECK(index == registered_index.load(std::memory_order_acquire)); + } + return index; + } + + static constexpr void CheckCallbacksAreDefined() { + // No USE() macro available. + (void)static_cast(TraceTrait::Trace); + (void)static_cast(FinalizerTrait::kCallback); + (void)static_cast(NameTrait::GetName); + } +}; + // Fold types based on finalizer behavior. Note that finalizer characteristics // align with trace behavior, i.e., destructors are virtual when trace methods // are and vice versa. template -struct GCInfoFolding { +struct GCInfoFolding final { static constexpr bool kHasVirtualDestructorAtBase = std::has_virtual_destructor::value; static constexpr bool kBothTypesAreTriviallyDestructible = @@ -121,34 +121,27 @@ struct GCInfoFolding { static constexpr bool kWantsDetailedObjectNames = false; #endif // !CPPGC_SUPPORTS_OBJECT_NAMES - // Folding would regresses name resolution when deriving names from C++ - // class names as it would just folds a name to the base class name. - using ResultType = std::conditional_t<(kHasVirtualDestructorAtBase || - kBothTypesAreTriviallyDestructible || - kHasCustomFinalizerDispatchAtBase) && - !kWantsDetailedObjectNames, - ParentMostGarbageCollectedType, T>; -}; - -// Trait determines how the garbage collector treats objects wrt. to traversing, -// finalization, and naming. -template -struct GCInfoTrait final { - V8_INLINE static GCInfoIndex Index() { - static_assert(sizeof(T), "T must be fully defined"); - static std::atomic - registered_index; // Uses zero initialization. - GCInfoIndex index = registered_index.load(std::memory_order_acquire); - if (V8_UNLIKELY(!index)) { - EnsureGCInfoIndexTrait::EnsureIndex(registered_index); - // Slow path call uses V8_PRESERVE_MOST which does not support return - // values (also preserves RAX). Avoid out parameter by just reloading the - // value here which at this point is guaranteed to be set. - index = registered_index.load(std::memory_order_acquire); - CPPGC_DCHECK(index != 0); + // Always true. Forces the compiler to resolve callbacks which ensures that + // both modes don't break without requiring compiling a separate + // configuration. Only a single GCInfo (for `ResultType` below) will actually + // be instantiated but existence (and well-formedness) of all callbacks is + // checked. + static constexpr bool WantToFold() { + if constexpr ((kHasVirtualDestructorAtBase || + kBothTypesAreTriviallyDestructible || + kHasCustomFinalizerDispatchAtBase) && + !kWantsDetailedObjectNames) { + GCInfoTrait::CheckCallbacksAreDefined(); + GCInfoTrait::CheckCallbacksAreDefined(); + return true; } - return index; + return false; } + + // Folding would regress name resolution when deriving names from C++ + // class names as it would just folds a name to the base class name. + using ResultType = + std::conditional_t; }; } // namespace internal diff --git a/deps/include/cppgc/internal/member-storage.h b/deps/include/cppgc/internal/member-storage.h index 0eb63820..2b5bfec4 100644 --- a/deps/include/cppgc/internal/member-storage.h +++ b/deps/include/cppgc/internal/member-storage.h @@ -17,6 +17,11 @@ namespace cppgc { namespace internal { +enum class WriteBarrierSlotType { + kCompressed, + kUncompressed, +}; + #if defined(CPPGC_POINTER_COMPRESSION) #if defined(__clang__) @@ -30,16 +35,16 @@ namespace internal { #define CPPGC_REQUIRE_CONSTANT_INIT #endif // defined(__clang__) -class CageBaseGlobal final { +class V8_EXPORT CageBaseGlobal final { public: V8_INLINE CPPGC_CONST static uintptr_t Get() { CPPGC_DCHECK(IsBaseConsistent()); - return g_base_; + return g_base_.base; } V8_INLINE CPPGC_CONST static bool IsSet() { CPPGC_DCHECK(IsBaseConsistent()); - return (g_base_ & ~kLowerHalfWordMask) != 0; + return (g_base_.base & ~kLowerHalfWordMask) != 0; } private: @@ -47,12 +52,15 @@ class CageBaseGlobal final { static constexpr uintptr_t kLowerHalfWordMask = (api_constants::kCagedHeapReservationAlignment - 1); - static V8_EXPORT uintptr_t g_base_ CPPGC_REQUIRE_CONSTANT_INIT; + static union alignas(api_constants::kCachelineSize) Base { + uintptr_t base; + char cache_line[api_constants::kCachelineSize]; + } g_base_ CPPGC_REQUIRE_CONSTANT_INIT; CageBaseGlobal() = delete; V8_INLINE static bool IsBaseConsistent() { - return kLowerHalfWordMask == (g_base_ & kLowerHalfWordMask); + return kLowerHalfWordMask == (g_base_.base & kLowerHalfWordMask); } friend class CageBaseGlobalUpdater; @@ -64,6 +72,8 @@ class CageBaseGlobal final { class V8_TRIVIAL_ABI CompressedPointer final { public: using IntegralType = uint32_t; + static constexpr auto kWriteBarrierSlotType = + WriteBarrierSlotType::kCompressed; V8_INLINE CompressedPointer() : value_(0u) {} V8_INLINE explicit CompressedPointer(const void* ptr) @@ -112,17 +122,22 @@ class V8_TRIVIAL_ABI CompressedPointer final { } static V8_INLINE IntegralType Compress(const void* ptr) { - static_assert( - SentinelPointer::kSentinelValue == 0b10, - "The compression scheme relies on the sentinel encoded as 0b10"); + static_assert(SentinelPointer::kSentinelValue == + 1 << api_constants::kPointerCompressionShift, + "The compression scheme relies on the sentinel encoded as 1 " + "<< kPointerCompressionShift"); static constexpr size_t kGigaCageMask = ~(api_constants::kCagedHeapReservationAlignment - 1); + static constexpr size_t kPointerCompressionShiftMask = + (1 << api_constants::kPointerCompressionShift) - 1; CPPGC_DCHECK(CageBaseGlobal::IsSet()); const uintptr_t base = CageBaseGlobal::Get(); CPPGC_DCHECK(!ptr || ptr == kSentinelPointer || (base & kGigaCageMask) == (reinterpret_cast(ptr) & kGigaCageMask)); + CPPGC_DCHECK( + (reinterpret_cast(ptr) & kPointerCompressionShiftMask) == 0); #if defined(CPPGC_2GB_CAGE) // Truncate the pointer. @@ -130,8 +145,9 @@ class V8_TRIVIAL_ABI CompressedPointer final { static_cast(reinterpret_cast(ptr)); #else // !defined(CPPGC_2GB_CAGE) const auto uptr = reinterpret_cast(ptr); - // Shift the pointer by one and truncate. - auto compressed = static_cast(uptr >> 1); + // Shift the pointer and truncate. + auto compressed = static_cast( + uptr >> api_constants::kPointerCompressionShift); #endif // !defined(CPPGC_2GB_CAGE) // Normal compressed pointers must have the MSB set. CPPGC_DCHECK((!compressed || compressed == kCompressedSentinel) || @@ -142,14 +158,21 @@ class V8_TRIVIAL_ABI CompressedPointer final { static V8_INLINE void* Decompress(IntegralType ptr) { CPPGC_DCHECK(CageBaseGlobal::IsSet()); const uintptr_t base = CageBaseGlobal::Get(); + return Decompress(ptr, base); + } + + static V8_INLINE void* Decompress(IntegralType ptr, uintptr_t base) { + CPPGC_DCHECK(CageBaseGlobal::IsSet()); + CPPGC_DCHECK(base == CageBaseGlobal::Get()); // Treat compressed pointer as signed and cast it to uint64_t, which will // sign-extend it. #if defined(CPPGC_2GB_CAGE) const uint64_t mask = static_cast(static_cast(ptr)); #else // !defined(CPPGC_2GB_CAGE) - // Then, shift the result by one. It's important to shift the unsigned + // Then, shift the result. It's important to shift the unsigned // value, as otherwise it would result in undefined behavior. - const uint64_t mask = static_cast(static_cast(ptr)) << 1; + const uint64_t mask = static_cast(static_cast(ptr)) + << api_constants::kPointerCompressionShift; #endif // !defined(CPPGC_2GB_CAGE) return reinterpret_cast(mask & base); } @@ -160,7 +183,8 @@ class V8_TRIVIAL_ABI CompressedPointer final { SentinelPointer::kSentinelValue; #else // !defined(CPPGC_2GB_CAGE) static constexpr IntegralType kCompressedSentinel = - SentinelPointer::kSentinelValue >> 1; + SentinelPointer::kSentinelValue >> + api_constants::kPointerCompressionShift; #endif // !defined(CPPGC_2GB_CAGE) // All constructors initialize `value_`. Do not add a default value here as it // results in a non-atomic write on some builds, even when the atomic version @@ -173,6 +197,8 @@ class V8_TRIVIAL_ABI CompressedPointer final { class V8_TRIVIAL_ABI RawPointer final { public: using IntegralType = uintptr_t; + static constexpr auto kWriteBarrierSlotType = + WriteBarrierSlotType::kUncompressed; V8_INLINE RawPointer() : ptr_(nullptr) {} V8_INLINE explicit RawPointer(const void* ptr) : ptr_(ptr) {} @@ -225,9 +251,9 @@ class V8_TRIVIAL_ABI RawPointer final { }; #if defined(CPPGC_POINTER_COMPRESSION) -using MemberStorage = CompressedPointer; +using DefaultMemberStorage = CompressedPointer; #else // !defined(CPPGC_POINTER_COMPRESSION) -using MemberStorage = RawPointer; +using DefaultMemberStorage = RawPointer; #endif // !defined(CPPGC_POINTER_COMPRESSION) } // namespace internal diff --git a/deps/include/cppgc/internal/name-trait.h b/deps/include/cppgc/internal/name-trait.h index 1d927a9d..6be29855 100644 --- a/deps/include/cppgc/internal/name-trait.h +++ b/deps/include/cppgc/internal/name-trait.h @@ -121,7 +121,11 @@ class NameTrait final : public NameTraitBase { #undef PRETTY_FUNCTION_VALUE #else // !CPPGC_SUPPORTS_OBJECT_NAMES - return {NameProvider::kHiddenName, true}; + // We wanted to use a class name but were unable to provide one due to + // compiler limitations or build configuration. As such, return the hidden + // name with name_was_hidden=false, which will cause this object to be + // visible in the snapshot. + return {NameProvider::kHiddenName, false}; #endif // !CPPGC_SUPPORTS_OBJECT_NAMES } }; diff --git a/deps/include/cppgc/internal/pointer-policies.h b/deps/include/cppgc/internal/pointer-policies.h index e67da040..06fa884f 100644 --- a/deps/include/cppgc/internal/pointer-policies.h +++ b/deps/include/cppgc/internal/pointer-policies.h @@ -33,10 +33,11 @@ struct DijkstraWriteBarrierPolicy { // barrier doesn't break the tri-color invariant. } + template V8_INLINE static void AssigningBarrier(const void* slot, const void* value) { #ifdef CPPGC_SLIM_WRITE_BARRIER if (V8_UNLIKELY(WriteBarrier::IsEnabled())) - WriteBarrier::CombinedWriteBarrierSlow(slot); + WriteBarrier::CombinedWriteBarrierSlow(slot); #else // !CPPGC_SLIM_WRITE_BARRIER WriteBarrier::Params params; const WriteBarrier::Type type = @@ -45,11 +46,32 @@ struct DijkstraWriteBarrierPolicy { #endif // !CPPGC_SLIM_WRITE_BARRIER } + template + V8_INLINE static void AssigningBarrier(const void* slot, RawPointer storage) { + static_assert( + SlotType == WriteBarrierSlotType::kUncompressed, + "Assigning storages of Member and UncompressedMember is not supported"); +#ifdef CPPGC_SLIM_WRITE_BARRIER + if (V8_UNLIKELY(WriteBarrier::IsEnabled())) + WriteBarrier::CombinedWriteBarrierSlow(slot); +#else // !CPPGC_SLIM_WRITE_BARRIER + WriteBarrier::Params params; + const WriteBarrier::Type type = + WriteBarrier::GetWriteBarrierType(slot, storage, params); + WriteBarrier(type, params, slot, storage.Load()); +#endif // !CPPGC_SLIM_WRITE_BARRIER + } + +#if defined(CPPGC_POINTER_COMPRESSION) + template V8_INLINE static void AssigningBarrier(const void* slot, - MemberStorage storage) { + CompressedPointer storage) { + static_assert( + SlotType == WriteBarrierSlotType::kCompressed, + "Assigning storages of Member and UncompressedMember is not supported"); #ifdef CPPGC_SLIM_WRITE_BARRIER if (V8_UNLIKELY(WriteBarrier::IsEnabled())) - WriteBarrier::CombinedWriteBarrierSlow(slot); + WriteBarrier::CombinedWriteBarrierSlow(slot); #else // !CPPGC_SLIM_WRITE_BARRIER WriteBarrier::Params params; const WriteBarrier::Type type = @@ -57,6 +79,7 @@ struct DijkstraWriteBarrierPolicy { WriteBarrier(type, params, slot, storage.Load()); #endif // !CPPGC_SLIM_WRITE_BARRIER } +#endif // defined(CPPGC_POINTER_COMPRESSION) private: V8_INLINE static void WriteBarrier(WriteBarrier::Type type, @@ -78,7 +101,9 @@ struct DijkstraWriteBarrierPolicy { struct NoWriteBarrierPolicy { V8_INLINE static void InitializingBarrier(const void*, const void*) {} + template V8_INLINE static void AssigningBarrier(const void*, const void*) {} + template V8_INLINE static void AssigningBarrier(const void*, MemberStorage) {} }; @@ -207,7 +232,8 @@ template class BasicPersistent; template + typename CheckingPolicy = DefaultMemberCheckingPolicy, + typename StorageType = DefaultMemberStorage> class BasicMember; } // namespace internal diff --git a/deps/include/cppgc/internal/write-barrier.h b/deps/include/cppgc/internal/write-barrier.h index 80c6ee33..566724d3 100644 --- a/deps/include/cppgc/internal/write-barrier.h +++ b/deps/include/cppgc/internal/write-barrier.h @@ -70,6 +70,7 @@ class V8_EXPORT WriteBarrier final { static V8_INLINE Type GetWriteBarrierType(const void* slot, const void* value, Params& params); // Returns the required write barrier for a given `slot` and `value`. + template static V8_INLINE Type GetWriteBarrierType(const void* slot, MemberStorage, Params& params); // Returns the required write barrier for a given `slot`. @@ -83,6 +84,7 @@ class V8_EXPORT WriteBarrier final { // A write barrier that combines `GenerationalBarrier()` and // `DijkstraMarkingBarrier()`. We only pass a single parameter here to clobber // as few registers as possible. + template static V8_NOINLINE void V8_PRESERVE_MOST CombinedWriteBarrierSlow(const void* slot); #endif // CPPGC_SLIM_WRITE_BARRIER @@ -171,7 +173,8 @@ class V8_EXPORT WriteBarrierTypeForCagedHeapPolicy final { return ValueModeDispatch::Get(slot, value, params, callback); } - template + template static V8_INLINE WriteBarrier::Type Get(const void* slot, MemberStorage value, WriteBarrier::Params& params, HeapHandleCallback callback) { @@ -215,7 +218,7 @@ class V8_EXPORT WriteBarrierTypeForCagedHeapPolicy final { template <> struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch< WriteBarrier::ValueMode::kValuePresent> { - template + template static V8_INLINE WriteBarrier::Type Get(const void* slot, MemberStorage storage, WriteBarrier::Params& params, @@ -313,11 +316,9 @@ class V8_EXPORT WriteBarrierTypeForNonCagedHeapPolicy final { } template - static V8_INLINE WriteBarrier::Type Get(const void* slot, MemberStorage value, + static V8_INLINE WriteBarrier::Type Get(const void* slot, RawPointer value, WriteBarrier::Params& params, HeapHandleCallback callback) { - // `MemberStorage` will always be `RawPointer` for non-caged heap builds. - // Just convert to `void*` in this case. return ValueModeDispatch::Get(slot, value.Load(), params, callback); } @@ -391,6 +392,7 @@ WriteBarrier::Type WriteBarrier::GetWriteBarrierType( } // static +template WriteBarrier::Type WriteBarrier::GetWriteBarrierType( const void* slot, MemberStorage value, WriteBarrier::Params& params) { return WriteBarrierTypePolicy::Get(slot, value, diff --git a/deps/include/cppgc/macros.h b/deps/include/cppgc/macros.h index 030f397e..a9ac22d7 100644 --- a/deps/include/cppgc/macros.h +++ b/deps/include/cppgc/macros.h @@ -11,7 +11,10 @@ namespace cppgc { -// Use if the object is only stack allocated. +// Use CPPGC_STACK_ALLOCATED if the object is only stack allocated. +// Add the CPPGC_STACK_ALLOCATED_IGNORE annotation on a case-by-case basis when +// enforcement of CPPGC_STACK_ALLOCATED should be suppressed. +#if defined(__clang__) #define CPPGC_STACK_ALLOCATED() \ public: \ using IsStackAllocatedTypeMarker CPPGC_UNUSED = int; \ @@ -20,6 +23,12 @@ namespace cppgc { void* operator new(size_t) = delete; \ void* operator new(size_t, void*) = delete; \ static_assert(true, "Force semicolon.") +#define CPPGC_STACK_ALLOCATED_IGNORE(bug_or_reason) \ + __attribute__((annotate("stack_allocated_ignore"))) +#else // !defined(__clang__) +#define CPPGC_STACK_ALLOCATED() static_assert(true, "Force semicolon.") +#define CPPGC_STACK_ALLOCATED_IGNORE(bug_or_reason) +#endif // !defined(__clang__) } // namespace cppgc diff --git a/deps/include/cppgc/member.h b/deps/include/cppgc/member.h index 9bc38363..457f163b 100644 --- a/deps/include/cppgc/member.h +++ b/deps/include/cppgc/member.h @@ -28,13 +28,11 @@ namespace internal { // MemberBase always refers to the object as const object and defers to // BasicMember on casting to the right type as needed. +template class V8_TRIVIAL_ABI MemberBase { public: -#if defined(CPPGC_POINTER_COMPRESSION) - using RawStorage = CompressedPointer; -#else // !defined(CPPGC_POINTER_COMPRESSION) - using RawStorage = RawPointer; -#endif // !defined(CPPGC_POINTER_COMPRESSION) + using RawStorage = StorageType; + protected: struct AtomicInitializerTag {}; @@ -75,16 +73,19 @@ class V8_TRIVIAL_ABI MemberBase { // The basic class from which all Member classes are 'generated'. template -class V8_TRIVIAL_ABI BasicMember final : private MemberBase, + typename CheckingPolicy, typename StorageType> +class V8_TRIVIAL_ABI BasicMember final : private MemberBase, private CheckingPolicy { + using Base = MemberBase; + public: using PointeeType = T; + using RawStorage = typename Base::RawStorage; V8_INLINE constexpr BasicMember() = default; - V8_INLINE constexpr BasicMember(std::nullptr_t) {} // NOLINT - V8_INLINE BasicMember(SentinelPointer s) : MemberBase(s) {} // NOLINT - V8_INLINE BasicMember(T* raw) : MemberBase(raw) { // NOLINT + V8_INLINE constexpr BasicMember(std::nullptr_t) {} // NOLINT + V8_INLINE BasicMember(SentinelPointer s) : Base(s) {} // NOLINT + V8_INLINE BasicMember(T* raw) : Base(raw) { // NOLINT InitializingWriteBarrier(raw); this->CheckPointer(Get()); } @@ -94,13 +95,13 @@ class V8_TRIVIAL_ABI BasicMember final : private MemberBase, // Atomic ctor. Using the AtomicInitializerTag forces BasicMember to // initialize using atomic assignments. This is required for preventing // data races with concurrent marking. - using AtomicInitializerTag = MemberBase::AtomicInitializerTag; + using AtomicInitializerTag = typename Base::AtomicInitializerTag; V8_INLINE BasicMember(std::nullptr_t, AtomicInitializerTag atomic) - : MemberBase(nullptr, atomic) {} + : Base(nullptr, atomic) {} V8_INLINE BasicMember(SentinelPointer s, AtomicInitializerTag atomic) - : MemberBase(s, atomic) {} + : Base(s, atomic) {} V8_INLINE BasicMember(T* raw, AtomicInitializerTag atomic) - : MemberBase(raw, atomic) { + : Base(raw, atomic) { InitializingWriteBarrier(raw); this->CheckPointer(Get()); } @@ -119,7 +120,7 @@ class V8_TRIVIAL_ABI BasicMember final : private MemberBase, std::enable_if_t>* = nullptr> V8_INLINE BasicMember( // NOLINT const BasicMember& other) + OtherCheckingPolicy, StorageType>& other) : BasicMember(other.GetRawStorage()) {} template >* = nullptr> V8_INLINE BasicMember( // NOLINT const BasicMember& other) + OtherCheckingPolicy, StorageType>& other) : BasicMember(other.Get()) {} // Move ctor. @@ -142,8 +143,9 @@ class V8_TRIVIAL_ABI BasicMember final : private MemberBase, template >* = nullptr> - V8_INLINE BasicMember(BasicMember&& other) noexcept + V8_INLINE BasicMember( + BasicMember&& other) noexcept : BasicMember(other.GetRawStorage()) { other.Clear(); } @@ -151,8 +153,9 @@ class V8_TRIVIAL_ABI BasicMember final : private MemberBase, template >* = nullptr> - V8_INLINE BasicMember(BasicMember&& other) noexcept + V8_INLINE BasicMember( + BasicMember&& other) noexcept : BasicMember(other.Get()) { other.Clear(); } @@ -179,7 +182,7 @@ class V8_TRIVIAL_ABI BasicMember final : private MemberBase, typename OtherCheckingPolicy> V8_INLINE BasicMember& operator=( const BasicMember& other) { + OtherCheckingPolicy, StorageType>& other) { if constexpr (internal::IsDecayedSameV) { return operator=(other.GetRawStorage()); } else { @@ -201,8 +204,8 @@ class V8_TRIVIAL_ABI BasicMember final : private MemberBase, template V8_INLINE BasicMember& operator=( - BasicMember&& other) noexcept { + BasicMember&& other) noexcept { if constexpr (internal::IsDecayedSameV) { operator=(other.GetRawStorage()); } else { @@ -226,7 +229,7 @@ class V8_TRIVIAL_ABI BasicMember final : private MemberBase, } V8_INLINE BasicMember& operator=(T* other) { - SetRawAtomic(other); + Base::SetRawAtomic(other); AssigningWriteBarrier(other); this->CheckPointer(Get()); return *this; @@ -237,20 +240,20 @@ class V8_TRIVIAL_ABI BasicMember final : private MemberBase, return *this; } V8_INLINE BasicMember& operator=(SentinelPointer s) { - SetRawAtomic(s); + Base::SetRawAtomic(s); return *this; } template V8_INLINE void Swap(BasicMember& other) { + OtherCheckingPolicy, StorageType>& other) { auto tmp = GetRawStorage(); *this = other; other = tmp; } - V8_INLINE explicit operator bool() const { return !IsCleared(); } + V8_INLINE explicit operator bool() const { return !Base::IsCleared(); } V8_INLINE operator T*() const { return Get(); } V8_INLINE T* operator->() const { return Get(); } V8_INLINE T& operator*() const { return *Get(); } @@ -264,10 +267,12 @@ class V8_TRIVIAL_ABI BasicMember final : private MemberBase, // The const_cast below removes the constness from MemberBase storage. The // following static_cast re-adds any constness if specified through the // user-visible template parameter T. - return static_cast(const_cast(MemberBase::GetRaw())); + return static_cast(const_cast(Base::GetRaw())); } - V8_INLINE void Clear() { SetRawStorageAtomic(RawStorage{}); } + V8_INLINE void Clear() { + Base::SetRawStorageAtomic(RawStorage{}); + } V8_INLINE T* Release() { T* result = Get(); @@ -276,41 +281,44 @@ class V8_TRIVIAL_ABI BasicMember final : private MemberBase, } V8_INLINE const T** GetSlotForTesting() const { - return reinterpret_cast(GetRawSlot()); + return reinterpret_cast(Base::GetRawSlot()); } V8_INLINE RawStorage GetRawStorage() const { - return MemberBase::GetRawStorage(); + return Base::GetRawStorage(); } private: - V8_INLINE explicit BasicMember(RawStorage raw) : MemberBase(raw) { + V8_INLINE explicit BasicMember(RawStorage raw) : Base(raw) { InitializingWriteBarrier(Get()); this->CheckPointer(Get()); } V8_INLINE BasicMember& operator=(RawStorage other) { - SetRawStorageAtomic(other); + Base::SetRawStorageAtomic(other); AssigningWriteBarrier(); this->CheckPointer(Get()); return *this; } V8_INLINE const T* GetRawAtomic() const { - return static_cast(MemberBase::GetRawAtomic()); + return static_cast(Base::GetRawAtomic()); } V8_INLINE void InitializingWriteBarrier(T* value) const { - WriteBarrierPolicy::InitializingBarrier(GetRawSlot(), value); + WriteBarrierPolicy::InitializingBarrier(Base::GetRawSlot(), value); } V8_INLINE void AssigningWriteBarrier(T* value) const { - WriteBarrierPolicy::AssigningBarrier(GetRawSlot(), value); + WriteBarrierPolicy::template AssigningBarrier< + StorageType::kWriteBarrierSlotType>(Base::GetRawSlot(), value); } V8_INLINE void AssigningWriteBarrier() const { - WriteBarrierPolicy::AssigningBarrier(GetRawSlot(), GetRawStorage()); + WriteBarrierPolicy::template AssigningBarrier< + StorageType::kWriteBarrierSlotType>(Base::GetRawSlot(), + Base::GetRawStorage()); } - V8_INLINE void ClearFromGC() const { MemberBase::ClearFromGC(); } + V8_INLINE void ClearFromGC() const { Base::ClearFromGC(); } V8_INLINE T* GetFromGC() const { return Get(); } @@ -319,19 +327,20 @@ class V8_TRIVIAL_ABI BasicMember final : private MemberBase, template friend struct cppgc::TraceTrait; template + typename CheckingPolicy1, typename StorageType1> friend class BasicMember; }; // Member equality operators. template + typename WriteBarrierPolicy2, typename CheckingPolicy2, + typename StorageType> V8_INLINE bool operator==( - const BasicMember& - member1, - const BasicMember& - member2) { + const BasicMember& member1, + const BasicMember& member2) { if constexpr (internal::IsDecayedSameV) { // Check compressed pointers if types are the same. return member1.GetRawStorage() == member2.GetRawStorage(); @@ -345,31 +354,32 @@ V8_INLINE bool operator==( template + typename WriteBarrierPolicy2, typename CheckingPolicy2, + typename StorageType> V8_INLINE bool operator!=( - const BasicMember& - member1, - const BasicMember& - member2) { + const BasicMember& member1, + const BasicMember& member2) { return !(member1 == member2); } // Equality with raw pointers. template -V8_INLINE bool operator==(const BasicMember& member, - U* raw) { + typename CheckingPolicy, typename StorageType, typename U> +V8_INLINE bool operator==( + const BasicMember& member, + U* raw) { // Never allow comparison with erased pointers. static_assert(!internal::IsDecayedSameV); if constexpr (internal::IsDecayedSameV) { // Check compressed pointers if types are the same. - return member.GetRawStorage() == MemberBase::RawStorage(raw); + return member.GetRawStorage() == StorageType(raw); } else if constexpr (internal::IsStrictlyBaseOfV) { // Cast the raw pointer to T, which may adjust the pointer. - return member.GetRawStorage() == - MemberBase::RawStorage(static_cast(raw)); + return member.GetRawStorage() == StorageType(static_cast(raw)); } else { // Otherwise, decompressed the member. return member.Get() == raw; @@ -377,104 +387,112 @@ V8_INLINE bool operator==(const BasicMember -V8_INLINE bool operator!=(const BasicMember& member, - U* raw) { + typename CheckingPolicy, typename StorageType, typename U> +V8_INLINE bool operator!=( + const BasicMember& member, + U* raw) { return !(member == raw); } template -V8_INLINE bool operator==(T* raw, - const BasicMember& member) { + typename WriteBarrierPolicy, typename CheckingPolicy, + typename StorageType> +V8_INLINE bool operator==( + T* raw, const BasicMember& member) { return member == raw; } template -V8_INLINE bool operator!=(T* raw, - const BasicMember& member) { + typename WriteBarrierPolicy, typename CheckingPolicy, + typename StorageType> +V8_INLINE bool operator!=( + T* raw, const BasicMember& member) { return !(raw == member); } // Equality with sentinel. template -V8_INLINE bool operator==(const BasicMember& member, - SentinelPointer) { + typename CheckingPolicy, typename StorageType> +V8_INLINE bool operator==( + const BasicMember& member, + SentinelPointer) { return member.GetRawStorage().IsSentinel(); } template -V8_INLINE bool operator!=(const BasicMember& member, - SentinelPointer s) { + typename CheckingPolicy, typename StorageType> +V8_INLINE bool operator!=( + const BasicMember& member, + SentinelPointer s) { return !(member == s); } template -V8_INLINE bool operator==(SentinelPointer s, - const BasicMember& member) { + typename CheckingPolicy, typename StorageType> +V8_INLINE bool operator==( + SentinelPointer s, const BasicMember& member) { return member == s; } template -V8_INLINE bool operator!=(SentinelPointer s, - const BasicMember& member) { + typename CheckingPolicy, typename StorageType> +V8_INLINE bool operator!=( + SentinelPointer s, const BasicMember& member) { return !(s == member); } // Equality with nullptr. template -V8_INLINE bool operator==(const BasicMember& member, - std::nullptr_t) { + typename CheckingPolicy, typename StorageType> +V8_INLINE bool operator==( + const BasicMember& member, + std::nullptr_t) { return !static_cast(member); } template -V8_INLINE bool operator!=(const BasicMember& member, - std::nullptr_t n) { + typename CheckingPolicy, typename StorageType> +V8_INLINE bool operator!=( + const BasicMember& member, + std::nullptr_t n) { return !(member == n); } template -V8_INLINE bool operator==(std::nullptr_t n, - const BasicMember& member) { + typename CheckingPolicy, typename StorageType> +V8_INLINE bool operator==( + std::nullptr_t n, const BasicMember& member) { return member == n; } template -V8_INLINE bool operator!=(std::nullptr_t n, - const BasicMember& member) { + typename CheckingPolicy, typename StorageType> +V8_INLINE bool operator!=( + std::nullptr_t n, const BasicMember& member) { return !(n == member); } // Relational operators. template + typename WriteBarrierPolicy2, typename CheckingPolicy2, + typename StorageType> V8_INLINE bool operator<( - const BasicMember& - member1, - const BasicMember& - member2) { + const BasicMember& member1, + const BasicMember& member2) { static_assert( internal::IsDecayedSameV, "Comparison works only for same pointer type modulo cv-qualifiers"); @@ -483,12 +501,13 @@ V8_INLINE bool operator<( template + typename WriteBarrierPolicy2, typename CheckingPolicy2, + typename StorageType> V8_INLINE bool operator<=( - const BasicMember& - member1, - const BasicMember& - member2) { + const BasicMember& member1, + const BasicMember& member2) { static_assert( internal::IsDecayedSameV, "Comparison works only for same pointer type modulo cv-qualifiers"); @@ -497,12 +516,13 @@ V8_INLINE bool operator<=( template + typename WriteBarrierPolicy2, typename CheckingPolicy2, + typename StorageType> V8_INLINE bool operator>( - const BasicMember& - member1, - const BasicMember& - member2) { + const BasicMember& member1, + const BasicMember& member2) { static_assert( internal::IsDecayedSameV, "Comparison works only for same pointer type modulo cv-qualifiers"); @@ -511,21 +531,23 @@ V8_INLINE bool operator>( template + typename WriteBarrierPolicy2, typename CheckingPolicy2, + typename StorageType> V8_INLINE bool operator>=( - const BasicMember& - member1, - const BasicMember& - member2) { + const BasicMember& member1, + const BasicMember& member2) { static_assert( internal::IsDecayedSameV, "Comparison works only for same pointer type modulo cv-qualifiers"); return member1.GetRawStorage() >= member2.GetRawStorage(); } -template -struct IsWeak< - internal::BasicMember> +template +struct IsWeak> : std::true_type {}; } // namespace internal @@ -536,8 +558,9 @@ struct IsWeak< * trace method. */ template -using Member = internal::BasicMember; +using Member = internal::BasicMember< + T, internal::StrongMemberTag, internal::DijkstraWriteBarrierPolicy, + internal::DefaultMemberCheckingPolicy, internal::DefaultMemberStorage>; /** * WeakMember is similar to Member in that it is used to point to other garbage @@ -548,8 +571,9 @@ using Member = internal::BasicMember -using WeakMember = internal::BasicMember; +using WeakMember = internal::BasicMember< + T, internal::WeakMemberTag, internal::DijkstraWriteBarrierPolicy, + internal::DefaultMemberCheckingPolicy, internal::DefaultMemberStorage>; /** * UntracedMember is a pointer to an on-heap object that is not traced for some @@ -558,8 +582,47 @@ using WeakMember = internal::BasicMember -using UntracedMember = internal::BasicMember; +using UntracedMember = internal::BasicMember< + T, internal::UntracedMemberTag, internal::NoWriteBarrierPolicy, + internal::DefaultMemberCheckingPolicy, internal::DefaultMemberStorage>; + +namespace subtle { + +/** + * UncompressedMember. Use with care in hot paths that would otherwise cause + * many decompression cycles. + */ +template +using UncompressedMember = internal::BasicMember< + T, internal::StrongMemberTag, internal::DijkstraWriteBarrierPolicy, + internal::DefaultMemberCheckingPolicy, internal::RawPointer>; + +#if defined(CPPGC_POINTER_COMPRESSION) +/** + * CompressedMember. Default implementation of cppgc::Member on builds with + * pointer compression. + */ +template +using CompressedMember = internal::BasicMember< + T, internal::StrongMemberTag, internal::DijkstraWriteBarrierPolicy, + internal::DefaultMemberCheckingPolicy, internal::CompressedPointer>; +#endif // defined(CPPGC_POINTER_COMPRESSION) + +} // namespace subtle + +namespace internal { + +struct Dummy; + +static constexpr size_t kSizeOfMember = sizeof(Member); +static constexpr size_t kSizeOfUncompressedMember = + sizeof(subtle::UncompressedMember); +#if defined(CPPGC_POINTER_COMPRESSION) +static constexpr size_t kSizeofCompressedMember = + sizeof(subtle::CompressedMember); +#endif // defined(CPPGC_POINTER_COMPRESSION) + +} // namespace internal } // namespace cppgc diff --git a/deps/include/cppgc/name-provider.h b/deps/include/cppgc/name-provider.h index 216f6098..849176fd 100644 --- a/deps/include/cppgc/name-provider.h +++ b/deps/include/cppgc/name-provider.h @@ -55,6 +55,16 @@ class V8_EXPORT NameProvider { * Specifies a name for the garbage-collected object. Such names will never * be hidden, as they are explicitly specified by the user of this API. * + * Implementations of this function must not allocate garbage-collected + * objects or otherwise modify the cppgc heap. + * + * V8 may call this function while generating a heap snapshot or at other + * times. If V8 is currently generating a heap snapshot (according to + * HeapProfiler::IsTakingSnapshot), then the returned string must stay alive + * until the snapshot generation has completed. Otherwise, the returned string + * must stay alive forever. If you need a place to store a temporary string + * during snapshot generation, use HeapProfiler::CopyNameForHeapSnapshot. + * * @returns a human readable name for the object. */ virtual const char* GetHumanReadableName() const = 0; diff --git a/deps/include/cppgc/persistent.h b/deps/include/cppgc/persistent.h index 3a66ccc0..6eb1c659 100644 --- a/deps/include/cppgc/persistent.h +++ b/deps/include/cppgc/persistent.h @@ -92,6 +92,7 @@ class BasicPersistent final : public PersistentBase, template ::value>> + // NOLINTNEXTLINE BasicPersistent( const BasicPersistent& other, @@ -114,11 +115,13 @@ class BasicPersistent final : public PersistentBase, // Constructor from member. template ::value>> - BasicPersistent( - const internal::BasicMember& member, - const SourceLocation& loc = SourceLocation::Current()) + // NOLINTNEXTLINE + BasicPersistent(const internal::BasicMember< + U, MemberBarrierPolicy, MemberWeaknessTag, + MemberCheckingPolicy, MemberStorageType>& member, + const SourceLocation& loc = SourceLocation::Current()) : BasicPersistent(member.Get(), loc) {} ~BasicPersistent() { Clear(); } @@ -154,10 +157,12 @@ class BasicPersistent final : public PersistentBase, // Assignment from member. template ::value>> BasicPersistent& operator=( const internal::BasicMember& member) { + MemberCheckingPolicy, MemberStorageType>& + member) { return operator=(member.Get()); } @@ -177,6 +182,8 @@ class BasicPersistent final : public PersistentBase, } explicit operator bool() const { return Get(); } + // Historically we allow implicit conversions to T*. + // NOLINTNEXTLINE operator T*() const { return Get(); } T* operator->() const { return Get(); } T& operator*() const { return *Get(); } @@ -286,36 +293,39 @@ bool operator!=(const BasicPersistent + typename MemberWeaknessTag, typename MemberCheckingPolicy, + typename MemberStorageType> bool operator==( const BasicPersistent& p, const BasicMember& m) { + MemberCheckingPolicy, MemberStorageType>& m) { return p.Get() == m.Get(); } template + typename MemberWeaknessTag, typename MemberCheckingPolicy, + typename MemberStorageType> bool operator!=( const BasicPersistent& p, const BasicMember& m) { + MemberCheckingPolicy, MemberStorageType>& m) { return !(p == m); } template + typename MemberStorageType, typename T2, + typename PersistentWeaknessPolicy, typename PersistentLocationPolicy, + typename PersistentCheckingPolicy> bool operator==( const BasicMember& m, + MemberCheckingPolicy, MemberStorageType>& m, const BasicPersistent& p) { @@ -324,11 +334,12 @@ bool operator==( template + typename MemberStorageType, typename T2, + typename PersistentWeaknessPolicy, typename PersistentLocationPolicy, + typename PersistentCheckingPolicy> bool operator!=( const BasicMember& m, + MemberCheckingPolicy, MemberStorageType>& m, const BasicPersistent& p) { diff --git a/deps/include/cppgc/platform.h b/deps/include/cppgc/platform.h index 5a0a40ec..a5eccfa8 100644 --- a/deps/include/cppgc/platform.h +++ b/deps/include/cppgc/platform.h @@ -52,6 +52,15 @@ class V8_EXPORT Platform { * Foreground task runner that should be used by a Heap. */ virtual std::shared_ptr GetForegroundTaskRunner() { + return GetForegroundTaskRunner(TaskPriority::kUserBlocking); + } + + /** + * Returns a TaskRunner with a specific |priority| which can be used to post a + * task on the foreground thread. + */ + virtual std::shared_ptr GetForegroundTaskRunner( + TaskPriority priority) { return nullptr; } @@ -136,8 +145,13 @@ class V8_EXPORT Platform { * \param page_allocator The allocator used for maintaining meta data. Must stay * always alive and not change between multiple calls to InitializeProcess. If * no allocator is provided, a default internal version will be used. + * \param desired_heap_size Desired amount of virtual address space to reserve + * for the heap, in bytes. Actual size will be clamped to minimum and maximum + * values based on compile-time settings and may be rounded up. If this + * parameter is zero, a default value will be used. */ -V8_EXPORT void InitializeProcess(PageAllocator* page_allocator = nullptr); +V8_EXPORT void InitializeProcess(PageAllocator* page_allocator = nullptr, + size_t desired_heap_size = 0); /** * Must be called after destroying the last used heap. Some process-global diff --git a/deps/include/cppgc/sentinel-pointer.h b/deps/include/cppgc/sentinel-pointer.h index 8dbbab0e..bee96c77 100644 --- a/deps/include/cppgc/sentinel-pointer.h +++ b/deps/include/cppgc/sentinel-pointer.h @@ -7,13 +7,20 @@ #include +#include "cppgc/internal/api-constants.h" + namespace cppgc { namespace internal { // Special tag type used to denote some sentinel member. The semantics of the // sentinel is defined by the embedder. struct SentinelPointer { +#if defined(CPPGC_POINTER_COMPRESSION) + static constexpr intptr_t kSentinelValue = + 1 << api_constants::kPointerCompressionShift; +#else // !defined(CPPGC_POINTER_COMPRESSION) static constexpr intptr_t kSentinelValue = 0b10; +#endif // !defined(CPPGC_POINTER_COMPRESSION) template operator T*() const { return reinterpret_cast(kSentinelValue); diff --git a/deps/include/cppgc/source-location.h b/deps/include/cppgc/source-location.h index da5a5ede..0dc28aed 100644 --- a/deps/include/cppgc/source-location.h +++ b/deps/include/cppgc/source-location.h @@ -5,87 +5,11 @@ #ifndef INCLUDE_CPPGC_SOURCE_LOCATION_H_ #define INCLUDE_CPPGC_SOURCE_LOCATION_H_ -#include -#include - -#include "v8config.h" // NOLINT(build/include_directory) - -#if defined(__has_builtin) -#define CPPGC_SUPPORTS_SOURCE_LOCATION \ - (__has_builtin(__builtin_FUNCTION) && __has_builtin(__builtin_FILE) && \ - __has_builtin(__builtin_LINE)) // NOLINT -#elif defined(V8_CC_GNU) && __GNUC__ >= 7 -#define CPPGC_SUPPORTS_SOURCE_LOCATION 1 -#elif defined(V8_CC_INTEL) && __ICC >= 1800 -#define CPPGC_SUPPORTS_SOURCE_LOCATION 1 -#else -#define CPPGC_SUPPORTS_SOURCE_LOCATION 0 -#endif +#include "v8-source-location.h" namespace cppgc { -/** - * Encapsulates source location information. Mimics C++20's - * `std::source_location`. - */ -class V8_EXPORT SourceLocation final { - public: - /** - * Construct source location information corresponding to the location of the - * call site. - */ -#if CPPGC_SUPPORTS_SOURCE_LOCATION - static constexpr SourceLocation Current( - const char* function = __builtin_FUNCTION(), - const char* file = __builtin_FILE(), size_t line = __builtin_LINE()) { - return SourceLocation(function, file, line); - } -#else - static constexpr SourceLocation Current() { return SourceLocation(); } -#endif // CPPGC_SUPPORTS_SOURCE_LOCATION - - /** - * Constructs unspecified source location information. - */ - constexpr SourceLocation() = default; - - /** - * Returns the name of the function associated with the position represented - * by this object, if any. - * - * \returns the function name as cstring. - */ - constexpr const char* Function() const { return function_; } - - /** - * Returns the name of the current source file represented by this object. - * - * \returns the file name as cstring. - */ - constexpr const char* FileName() const { return file_; } - - /** - * Returns the line number represented by this object. - * - * \returns the line number. - */ - constexpr size_t Line() const { return line_; } - - /** - * Returns a human-readable string representing this object. - * - * \returns a human-readable string representing source location information. - */ - std::string ToString() const; - - private: - constexpr SourceLocation(const char* function, const char* file, size_t line) - : function_(function), file_(file), line_(line) {} - - const char* function_ = nullptr; - const char* file_ = nullptr; - size_t line_ = 0u; -}; +using SourceLocation = v8::SourceLocation; } // namespace cppgc diff --git a/deps/include/cppgc/trace-trait.h b/deps/include/cppgc/trace-trait.h index 694fbfdc..5fc863d2 100644 --- a/deps/include/cppgc/trace-trait.h +++ b/deps/include/cppgc/trace-trait.h @@ -53,6 +53,14 @@ struct TraceDescriptor { TraceCallback callback; }; +/** + * Callback for getting a TraceDescriptor for a given address. + * + * \param address Possibly inner address of an object. + * \returns a TraceDescriptor for the provided address. + */ +using TraceDescriptorCallback = TraceDescriptor (*)(const void* address); + namespace internal { struct V8_EXPORT TraceTraitFromInnerAddressImpl { diff --git a/deps/include/cppgc/type-traits.h b/deps/include/cppgc/type-traits.h index 2f499e68..c595deca 100644 --- a/deps/include/cppgc/type-traits.h +++ b/deps/include/cppgc/type-traits.h @@ -16,7 +16,7 @@ class Visitor; namespace internal { template + typename CheckingPolicy, typename StorageType> class BasicMember; struct DijkstraWriteBarrierPolicy; struct NoWriteBarrierPolicy; @@ -126,14 +126,15 @@ template struct IsSubclassOfBasicMemberTemplate { private: - template + template static std::true_type SubclassCheck( - BasicMember*); + const BasicMember*); static std::false_type SubclassCheck(...); public: - static constexpr bool value = - decltype(SubclassCheck(std::declval()))::value; + static constexpr bool value = decltype(SubclassCheck( + std::declval*>()))::value; }; template , std::decay_t> && !IsDecayedSameV; +template +constexpr bool IsAnyMemberTypeV = false; + +template +constexpr bool IsAnyMemberTypeV> = true; + } // namespace internal /** @@ -244,6 +253,19 @@ constexpr bool IsWeakV = internal::IsWeak::value; template constexpr bool IsCompleteV = internal::IsComplete::value; +/** + * Value is true for member types `Member` and `WeakMember`. + */ +template +constexpr bool IsMemberOrWeakMemberTypeV = + IsMemberTypeV || IsWeakMemberTypeV; + +/** + * Value is true for any member type. + */ +template +constexpr bool IsAnyMemberTypeV = internal::IsAnyMemberTypeV>; + } // namespace cppgc #endif // INCLUDE_CPPGC_TYPE_TRAITS_H_ diff --git a/deps/include/cppgc/visitor.h b/deps/include/cppgc/visitor.h index 704aabcd..1d6b39a1 100644 --- a/deps/include/cppgc/visitor.h +++ b/deps/include/cppgc/visitor.h @@ -5,10 +5,13 @@ #ifndef INCLUDE_CPPGC_VISITOR_H_ #define INCLUDE_CPPGC_VISITOR_H_ +#include + #include "cppgc/custom-space.h" #include "cppgc/ephemeron-pair.h" #include "cppgc/garbage-collected.h" #include "cppgc/internal/logging.h" +#include "cppgc/internal/member-storage.h" #include "cppgc/internal/pointer-policies.h" #include "cppgc/liveness-broker.h" #include "cppgc/member.h" @@ -99,6 +102,44 @@ class V8_EXPORT Visitor { &HandleWeak>, &weak_member); } +#if defined(CPPGC_POINTER_COMPRESSION) + /** + * Trace method for UncompressedMember. + * + * \param member UncompressedMember reference retaining an object. + */ + template + void Trace(const subtle::UncompressedMember& member) { + const T* value = member.GetRawAtomic(); + CPPGC_DCHECK(value != kSentinelPointer); + TraceImpl(value); + } +#endif // defined(CPPGC_POINTER_COMPRESSION) + + template + void TraceMultiple(const subtle::UncompressedMember* start, size_t len) { + static_assert(sizeof(T), "Pointee type must be fully defined."); + static_assert(internal::IsGarbageCollectedOrMixinType::value, + "T must be GarbageCollected or GarbageCollectedMixin type"); + VisitMultipleUncompressedMember(start, len, + &TraceTrait::GetTraceDescriptor); + } + + template , subtle::UncompressedMember>>* = nullptr> + void TraceMultiple(const Member* start, size_t len) { + static_assert(sizeof(T), "Pointee type must be fully defined."); + static_assert(internal::IsGarbageCollectedOrMixinType::value, + "T must be GarbageCollected or GarbageCollectedMixin type"); +#if defined(CPPGC_POINTER_COMPRESSION) + static_assert(std::is_same_v, subtle::CompressedMember>, + "Member and CompressedMember must be the same."); + VisitMultipleCompressedMember(start, len, + &TraceTrait::GetTraceDescriptor); +#endif // defined(CPPGC_POINTER_COMPRESSION) + } + /** * Trace method for inlined objects that are not allocated themselves but * otherwise follow managed heap layout and have a Trace() method. @@ -117,6 +158,26 @@ class V8_EXPORT Visitor { TraceTrait::Trace(this, &object); } + template + void TraceMultiple(const T* start, size_t len) { +#if V8_ENABLE_CHECKS + // This object is embedded in potentially multiple nested objects. The + // outermost object must not be in construction as such objects are (a) not + // processed immediately, and (b) only processed conservatively if not + // otherwise possible. + CheckObjectNotInConstruction(start); +#endif // V8_ENABLE_CHECKS + for (size_t i = 0; i < len; ++i) { + const T* object = &start[i]; + if constexpr (std::is_polymorphic_v) { + // The object's vtable may be uninitialized in which case the object is + // not traced. + if (*reinterpret_cast(object) == 0) continue; + } + TraceTrait::Trace(this, object); + } + } + /** * Registers a weak callback method on the object of type T. See * LivenessBroker for an usage example. @@ -300,6 +361,39 @@ class V8_EXPORT Visitor { WeakCallback callback, const void* data) {} virtual void HandleMovableReference(const void**) {} + virtual void VisitMultipleUncompressedMember( + const void* start, size_t len, + TraceDescriptorCallback get_trace_descriptor) { + // Default implementation merely delegates to Visit(). + const char* it = static_cast(start); + const char* end = it + len * internal::kSizeOfUncompressedMember; + for (; it < end; it += internal::kSizeOfUncompressedMember) { + const auto* current = reinterpret_cast(it); + const void* object = current->LoadAtomic(); + if (!object) continue; + + Visit(object, get_trace_descriptor(object)); + } + } + +#if defined(CPPGC_POINTER_COMPRESSION) + virtual void VisitMultipleCompressedMember( + const void* start, size_t len, + TraceDescriptorCallback get_trace_descriptor) { + // Default implementation merely delegates to Visit(). + const char* it = static_cast(start); + const char* end = it + len * internal::kSizeofCompressedMember; + for (; it < end; it += internal::kSizeofCompressedMember) { + const auto* current = + reinterpret_cast(it); + const void* object = current->LoadAtomic(); + if (!object) continue; + + Visit(object, get_trace_descriptor(object)); + } + } +#endif // defined(CPPGC_POINTER_COMPRESSION) + private: template static void WeakCallbackMethodDelegate(const LivenessBroker& info, @@ -312,8 +406,7 @@ class V8_EXPORT Visitor { template static void HandleWeak(const LivenessBroker& info, const void* object) { const PointerType* weak = static_cast(object); - auto* raw_ptr = weak->GetFromGC(); - if (!info.IsHeapObjectAlive(raw_ptr)) { + if (!info.IsHeapObjectAlive(weak->GetFromGC())) { weak->ClearFromGC(); } } @@ -399,8 +492,7 @@ class V8_EXPORT RootVisitor { template static void HandleWeak(const LivenessBroker& info, const void* object) { const PointerType* weak = static_cast(object); - auto* raw_ptr = weak->GetFromGC(); - if (!info.IsHeapObjectAlive(raw_ptr)) { + if (!info.IsHeapObjectAlive(weak->GetFromGC())) { weak->ClearFromGC(); } } diff --git a/deps/include/js_protocol.pdl b/deps/include/js_protocol.pdl index d4102f5c..b96102b7 100644 --- a/deps/include/js_protocol.pdl +++ b/deps/include/js_protocol.pdl @@ -369,6 +369,14 @@ domain Debugger # call stacks (default). integer maxDepth + # Replace previous blackbox execution contexts with passed ones. Forces backend to skip + # stepping/pausing in scripts in these execution contexts. VM will try to leave blackboxed script by + # performing 'step in' several times, finally resorting to 'step out' if unsuccessful. + experimental command setBlackboxExecutionContexts + parameters + # Array of execution context unique ids for the debugger to ignore. + array of string uniqueIds + # Replace previous blackbox patterns with passed ones. Forces backend to skip stepping/pausing in # scripts with url matching one of the patterns. VM will try to leave blackboxed script by # performing 'step in' several times, finally resorting to 'step out' if unsuccessful. @@ -376,6 +384,8 @@ domain Debugger parameters # Array of regexps that will be used to check script url for blackbox state. array of string patterns + # If true, also ignore scripts with no source url. + optional boolean skipAnonymous # Makes backend skip steps in the script in blackboxed ranges. VM will try leave blacklisted # scripts by performing 'step in' several times, finally resorting to 'step out' if unsuccessful. @@ -511,6 +521,7 @@ domain Debugger CompileError BlockedByActiveGenerator BlockedByActiveFunction + BlockedByTopLevelEsModuleChange # Exception details if any. Only present when `status` is `CompileError`. optional Runtime.ExceptionDetails exceptionDetails @@ -579,6 +590,7 @@ domain Debugger other promiseRejection XHR + step # Object containing break-specific auxiliary properties. optional object data # Hit breakpoints IDs @@ -604,7 +616,6 @@ domain Debugger properties # Type of the debug symbols. enum type - None SourceMap EmbeddedDWARF ExternalDWARF @@ -630,7 +641,7 @@ domain Debugger Runtime.ExecutionContextId executionContextId # Content hash of the script, SHA-256. string hash - # Embedder-specific auxiliary data. + # Embedder-specific auxiliary data likely matching {isDefault: boolean, type: 'default'|'isolated'|'worker', frameId: string} optional object executionContextAuxData # URL of source map associated with script (if any). optional string sourceMapURL @@ -669,7 +680,7 @@ domain Debugger Runtime.ExecutionContextId executionContextId # Content hash of the script, SHA-256. string hash - # Embedder-specific auxiliary data. + # Embedder-specific auxiliary data likely matching {isDefault: boolean, type: 'default'|'isolated'|'worker', frameId: string} optional object executionContextAuxData # True, if this script is generated as a result of the live edit operation. experimental optional boolean isLiveEdit @@ -687,8 +698,8 @@ domain Debugger experimental optional integer codeOffset # The language of the script. experimental optional Debugger.ScriptLanguage scriptLanguage - # If the scriptLanguage is WebASsembly, the source of debug symbols for the module. - experimental optional Debugger.DebugSymbols debugSymbols + # If the scriptLanguage is WebAssembly, the source of debug symbols for the module. + experimental optional array of Debugger.DebugSymbols debugSymbols # The name the embedder supplied for this script. experimental optional string embedderName @@ -1012,9 +1023,31 @@ domain Runtime # Unique script identifier. type ScriptId extends string - # Represents the value serialiazed by the WebDriver BiDi specification - # https://w3c.github.io/webdriver-bidi. - type WebDriverValue extends object + # Represents options for serialization. Overrides `generatePreview` and `returnByValue`. + type SerializationOptions extends object + properties + enum serialization + # Whether the result should be deep-serialized. The result is put into + # `deepSerializedValue` and `ObjectId` is provided. + deep + # Whether the result is expected to be a JSON object which should be sent by value. + # The result is put either into `value` or into `unserializableValue`. Synonym of + # `returnByValue: true`. Overrides `returnByValue`. + json + # Only remote object id is put in the result. Same bahaviour as if no + # `serializationOptions`, `generatePreview` nor `returnByValue` are provided. + idOnly + + # Deep serialization depth. Default is full depth. Respected only in `deep` serialization mode. + optional integer maxDepth + + # Embedder-specific parameters. For example if connected to V8 in Chrome these control DOM + # serialization via `maxNodeDepth: integer` and `includeShadowTree: "none" | "open" | "all"`. + # Values can be only of type string or integer. + optional object additionalParameters + + # Represents deep serialized value. + type DeepSerializedValue extends object properties enum type undefined @@ -1040,8 +1073,13 @@ domain Runtime arraybuffer node window + generator optional any value optional string objectId + # Set if value reference met more then once during serialization. In such + # case, value is provided only to one of the serialized values. Unique + # per value in the scope of one CDP call. + optional integer weakLocalObjectReference # Unique object identifier. type RemoteObjectId extends string @@ -1095,8 +1133,8 @@ domain Runtime optional UnserializableValue unserializableValue # String representation of the object. optional string description - # WebDriver BiDi representation of the value. - experimental optional WebDriverValue webDriverValue + # Deep serialized value. + experimental optional DeepSerializedValue deepSerializedValue # Unique object identifier (for non-primitive values). optional RemoteObjectId objectId # Preview containing abbreviated property values. Specified for `object` type values only. @@ -1282,7 +1320,7 @@ domain Runtime # multiple processes, so can be reliably used to identify specific context while backend # performs a cross-process navigation. experimental string uniqueId - # Embedder-specific auxiliary data. + # Embedder-specific auxiliary data likely matching {isDefault: boolean, type: 'default'|'isolated'|'worker', frameId: string} optional object auxData # Detailed information about exception (or error) that was thrown during script compilation or @@ -1386,6 +1424,7 @@ domain Runtime # execution. Overrides `setPauseOnException` state. optional boolean silent # Whether the result is expected to be a JSON object which should be sent by value. + # Can be overriden by `serializationOptions`. optional boolean returnByValue # Whether preview should be generated for the result. experimental optional boolean generatePreview @@ -1409,10 +1448,10 @@ domain Runtime # boundaries). # This is mutually exclusive with `executionContextId`. experimental optional string uniqueContextId - # Whether the result should contain `webDriverValue`, serialized according to - # https://w3c.github.io/webdriver-bidi. This is mutually exclusive with `returnByValue`, but - # resulting `objectId` is still provided. - experimental optional boolean generateWebDriverValue + # Specifies the result serialization. If provided, overrides + # `generatePreview` and `returnByValue`. + experimental optional SerializationOptions serializationOptions + returns # Call result. RemoteObject result @@ -1498,8 +1537,9 @@ domain Runtime # boundaries). # This is mutually exclusive with `contextId`. experimental optional string uniqueContextId - # Whether the result should be serialized according to https://w3c.github.io/webdriver-bidi. - experimental optional boolean generateWebDriverValue + # Specifies the result serialization. If provided, overrides + # `generatePreview` and `returnByValue`. + experimental optional SerializationOptions serializationOptions returns # Evaluation result. RemoteObject result @@ -1634,7 +1674,7 @@ domain Runtime # Binding function takes exactly one argument, this argument should be string, # in case of any other input, function throws an exception. # Each binding function call produces Runtime.bindingCalled notification. - experimental command addBinding + command addBinding parameters string name # If specified, the binding would only be exposed to the specified @@ -1644,17 +1684,17 @@ domain Runtime # Deprecated in favor of `executionContextName` due to an unclear use case # and bugs in implementation (crbug.com/1169639). `executionContextId` will be # removed in the future. - deprecated optional ExecutionContextId executionContextId + experimental deprecated optional ExecutionContextId executionContextId # If specified, the binding is exposed to the executionContext with # matching name, even for contexts created after the binding is added. # See also `ExecutionContext.name` and `worldName` parameter to # `Page.addScriptToEvaluateOnNewDocument`. # This parameter is mutually exclusive with `executionContextId`. - experimental optional string executionContextName + optional string executionContextName # This method does not remove binding function from global object but # unsubscribes current runtime agent from Runtime.bindingCalled notifications. - experimental command removeBinding + command removeBinding parameters string name diff --git a/deps/include/libplatform/libplatform.h b/deps/include/libplatform/libplatform.h index 9ec60c04..6a34f432 100644 --- a/deps/include/libplatform/libplatform.h +++ b/deps/include/libplatform/libplatform.h @@ -23,6 +23,8 @@ enum class MessageLoopBehavior : bool { kWaitForWork = true }; +enum class PriorityMode : bool { kDontApply, kApply }; + /** * Returns a new instance of the default v8::Platform implementation. * @@ -35,13 +37,17 @@ enum class MessageLoopBehavior : bool { * calling v8::platform::RunIdleTasks to process the idle tasks. * If |tracing_controller| is nullptr, the default platform will create a * v8::platform::TracingController instance and use it. + * If |priority_mode| is PriorityMode::kApply, the default platform will use + * multiple task queues executed by threads different system-level priorities + * (where available) to schedule tasks. */ V8_PLATFORM_EXPORT std::unique_ptr NewDefaultPlatform( int thread_pool_size = 0, IdleTaskSupport idle_task_support = IdleTaskSupport::kDisabled, InProcessStackDumping in_process_stack_dumping = InProcessStackDumping::kDisabled, - std::unique_ptr tracing_controller = {}); + std::unique_ptr tracing_controller = {}, + PriorityMode priority_mode = PriorityMode::kDontApply); /** * The same as NewDefaultPlatform but disables the worker thread pool. diff --git a/deps/include/v8-array-buffer.h b/deps/include/v8-array-buffer.h index 804fc42c..d294b13a 100644 --- a/deps/include/v8-array-buffer.h +++ b/deps/include/v8-array-buffer.h @@ -18,11 +18,12 @@ namespace v8 { class SharedArrayBuffer; #ifndef V8_ARRAY_BUFFER_INTERNAL_FIELD_COUNT -// The number of required internal fields can be defined by embedder. +// Defined using gn arg `v8_array_buffer_internal_field_count`. #define V8_ARRAY_BUFFER_INTERNAL_FIELD_COUNT 2 #endif enum class ArrayBufferCreationMode { kInternalized, kExternalized }; +enum class BackingStoreInitializationMode { kZeroInitialized, kUninitialized }; /** * A wrapper around the backing store (i.e. the raw memory) of an array buffer. @@ -87,6 +88,9 @@ class V8_EXPORT BackingStore : public v8::internal::BackingStoreBase { * Assumes that the backing_store was allocated by the ArrayBuffer allocator * of the given isolate. */ + V8_DEPRECATED( + "Reallocate is unsafe, please do not use. Please allocate a new " + "BackingStore and copy instead.") static std::unique_ptr Reallocate( v8::Isolate* isolate, std::unique_ptr backing_store, size_t byte_length); @@ -179,6 +183,9 @@ class V8_EXPORT ArrayBuffer : public Object { * * The default implementation allocates a new block and copies data. */ + V8_DEPRECATED( + "Reallocate is unsafe, please do not use. Please allocate new memory " + "and copy instead.") virtual void* Reallocate(void* data, size_t old_length, size_t new_length); /** @@ -211,12 +218,27 @@ class V8_EXPORT ArrayBuffer : public Object { size_t MaxByteLength() const; /** - * Create a new ArrayBuffer. Allocate |byte_length| bytes. + * Attempt to create a new ArrayBuffer. Allocate |byte_length| bytes. * Allocated memory will be owned by a created ArrayBuffer and * will be deallocated when it is garbage-collected, + * unless the object is externalized. If allocation fails, the Maybe + * returned will be empty. + */ + static MaybeLocal MaybeNew( + Isolate* isolate, size_t byte_length, + BackingStoreInitializationMode initialization_mode = + BackingStoreInitializationMode::kZeroInitialized); + + /** + * Create a new ArrayBuffer. Allocate |byte_length| bytes, which are either + * zero-initialized or uninitialized. Allocated memory will be owned by a + * created ArrayBuffer and will be deallocated when it is garbage-collected, * unless the object is externalized. */ - static Local New(Isolate* isolate, size_t byte_length); + static Local New( + Isolate* isolate, size_t byte_length, + BackingStoreInitializationMode initialization_mode = + BackingStoreInitializationMode::kZeroInitialized); /** * Create a new ArrayBuffer with an existing backing store. @@ -235,15 +257,18 @@ class V8_EXPORT ArrayBuffer : public Object { /** * Returns a new standalone BackingStore that is allocated using the array - * buffer allocator of the isolate. The result can be later passed to + * buffer allocator of the isolate. The allocation can either be zero + * intialized, or uninitialized. The result can be later passed to * ArrayBuffer::New. * * If the allocator returns nullptr, then the function may cause GCs in the * given isolate and re-try the allocation. If GCs do not help, then the * function will crash with an out-of-memory error. */ - static std::unique_ptr NewBackingStore(Isolate* isolate, - size_t byte_length); + static std::unique_ptr NewBackingStore( + Isolate* isolate, size_t byte_length, + BackingStoreInitializationMode initialization_mode = + BackingStoreInitializationMode::kZeroInitialized); /** * Returns a new standalone BackingStore that takes over the ownership of * the given buffer. The destructor of the BackingStore invokes the given @@ -287,7 +312,7 @@ class V8_EXPORT ArrayBuffer : public Object { * preventing JavaScript from ever accessing underlying backing store. * ArrayBuffer should have been externalized and must be detachable. */ - V8_DEPRECATE_SOON( + V8_DEPRECATED( "Use the version which takes a key parameter (passing a null handle is " "ok).") void Detach(); @@ -318,6 +343,12 @@ class V8_EXPORT ArrayBuffer : public Object { */ std::shared_ptr GetBackingStore(); + /** + * More efficient shortcut for + * GetBackingStore()->IsResizableByUserJavaScript(). + */ + bool IsResizableByUserJavaScript() const; + /** * More efficient shortcut for GetBackingStore()->Data(). The returned pointer * is valid as long as the ArrayBuffer is alive. @@ -331,8 +362,9 @@ class V8_EXPORT ArrayBuffer : public Object { return static_cast(value); } - static const int kInternalFieldCount = V8_ARRAY_BUFFER_INTERNAL_FIELD_COUNT; - static const int kEmbedderFieldCount = V8_ARRAY_BUFFER_INTERNAL_FIELD_COUNT; + static constexpr int kInternalFieldCount = + V8_ARRAY_BUFFER_INTERNAL_FIELD_COUNT; + static constexpr int kEmbedderFieldCount = kInternalFieldCount; private: ArrayBuffer(); @@ -340,7 +372,7 @@ class V8_EXPORT ArrayBuffer : public Object { }; #ifndef V8_ARRAY_BUFFER_VIEW_INTERNAL_FIELD_COUNT -// The number of required internal fields can be defined by embedder. +// Defined using gn arg `v8_array_buffer_view_internal_field_count`. #define V8_ARRAY_BUFFER_VIEW_INTERNAL_FIELD_COUNT 2 #endif @@ -387,10 +419,9 @@ class V8_EXPORT ArrayBufferView : public Object { return static_cast(value); } - static const int kInternalFieldCount = - V8_ARRAY_BUFFER_VIEW_INTERNAL_FIELD_COUNT; - static const int kEmbedderFieldCount = + static constexpr int kInternalFieldCount = V8_ARRAY_BUFFER_VIEW_INTERNAL_FIELD_COUNT; + static const int kEmbedderFieldCount = kInternalFieldCount; private: ArrayBufferView(); @@ -434,12 +465,15 @@ class V8_EXPORT SharedArrayBuffer : public Object { size_t MaxByteLength() const; /** - * Create a new SharedArrayBuffer. Allocate |byte_length| bytes. - * Allocated memory will be owned by a created SharedArrayBuffer and - * will be deallocated when it is garbage-collected, - * unless the object is externalized. + * Create a new SharedArrayBuffer. Allocate |byte_length| bytes, which are + * either zero-initialized or uninitialized. Allocated memory will be owned by + * a created SharedArrayBuffer and will be deallocated when it is + * garbage-collected, unless the object is externalized. */ - static Local New(Isolate* isolate, size_t byte_length); + static Local New( + Isolate* isolate, size_t byte_length, + BackingStoreInitializationMode initialization_mode = + BackingStoreInitializationMode::kZeroInitialized); /** * Create a new SharedArrayBuffer with an existing backing store. @@ -458,15 +492,18 @@ class V8_EXPORT SharedArrayBuffer : public Object { /** * Returns a new standalone BackingStore that is allocated using the array - * buffer allocator of the isolate. The result can be later passed to + * buffer allocator of the isolate. The allocation can either be zero + * intialized, or uninitialized. The result can be later passed to * SharedArrayBuffer::New. * * If the allocator returns nullptr, then the function may cause GCs in the * given isolate and re-try the allocation. If GCs do not help, then the * function will crash with an out-of-memory error. */ - static std::unique_ptr NewBackingStore(Isolate* isolate, - size_t byte_length); + static std::unique_ptr NewBackingStore( + Isolate* isolate, size_t byte_length, + BackingStoreInitializationMode initialization_mode = + BackingStoreInitializationMode::kZeroInitialized); /** * Returns a new standalone BackingStore that takes over the ownership of * the given buffer. The destructor of the BackingStore invokes the given @@ -500,7 +537,8 @@ class V8_EXPORT SharedArrayBuffer : public Object { return static_cast(value); } - static const int kInternalFieldCount = V8_ARRAY_BUFFER_INTERNAL_FIELD_COUNT; + static constexpr int kInternalFieldCount = + V8_ARRAY_BUFFER_INTERNAL_FIELD_COUNT; private: SharedArrayBuffer(); diff --git a/deps/include/v8-callbacks.h b/deps/include/v8-callbacks.h index f3e96c37..f757eb15 100644 --- a/deps/include/v8-callbacks.h +++ b/deps/include/v8-callbacks.h @@ -7,6 +7,7 @@ #include +#include #include #include "cppgc/common.h" @@ -146,14 +147,15 @@ using JitCodeEventHandler = void (*)(const JitCodeEvent* event); * the callback functions, you therefore cannot manipulate objects (set or * delete properties for example) since it is possible such operations will * result in the allocation of objects. + * TODO(v8:12612): Deprecate kGCTypeMinorMarkSweep after updating blink. */ enum GCType { kGCTypeScavenge = 1 << 0, - kGCTypeMinorMarkCompact = 1 << 1, + kGCTypeMinorMarkSweep = 1 << 1, kGCTypeMarkSweepCompact = 1 << 2, kGCTypeIncrementalMarking = 1 << 3, kGCTypeProcessWeakCallbacks = 1 << 4, - kGCTypeAll = kGCTypeScavenge | kGCTypeMinorMarkCompact | + kGCTypeAll = kGCTypeScavenge | kGCTypeMinorMarkSweep | kGCTypeMarkSweepCompact | kGCTypeIncrementalMarking | kGCTypeProcessWeakCallbacks }; @@ -229,7 +231,7 @@ using MessageCallback = void (*)(Local message, Local data); // --- Tracing --- -enum LogEventStatus : int { kStart = 0, kEnd = 1, kStamp = 2 }; +enum LogEventStatus : int { kStart = 0, kEnd = 1, kLog = 2 }; using LogEventCallback = void (*)(const char* name, int /* LogEventStatus */ status); @@ -252,15 +254,7 @@ using AddCrashKeyCallback = void (*)(CrashKeyId id, const std::string& value); using BeforeCallEnteredCallback = void (*)(Isolate*); using CallCompletedCallback = void (*)(Isolate*); -// --- AllowCodeGenerationFromStrings callbacks --- - -/** - * Callback to check if code generation from strings is allowed. See - * Context::AllowCodeGenerationFromStrings. - */ -using AllowCodeGenerationFromStringsCallback = bool (*)(Local context, - Local source); - +// --- Modify Code Generation From Strings Callback --- struct ModifyCodeGenerationFromStringsResult { // If true, proceed with the codegen algorithm. Otherwise, block it. bool codegen_allowed = false; @@ -270,6 +264,20 @@ struct ModifyCodeGenerationFromStringsResult { MaybeLocal modified_source; }; +/** + * Callback to check if codegen is allowed from a source object, and convert + * the source to string if necessary. See: ModifyCodeGenerationFromStrings. + */ +using ModifyCodeGenerationFromStringsCallback = + ModifyCodeGenerationFromStringsResult (*)(Local context, + Local source); +using ModifyCodeGenerationFromStringsCallback2 = + ModifyCodeGenerationFromStringsResult (*)(Local context, + Local source, + bool is_code_like); + +// --- Failed Access Check Callback --- + /** * Access type specification. */ @@ -281,23 +289,9 @@ enum AccessType { ACCESS_KEYS }; -// --- Failed Access Check Callback --- - using FailedAccessCheckCallback = void (*)(Local target, AccessType type, Local data); -/** - * Callback to check if codegen is allowed from a source object, and convert - * the source to string if necessary. See: ModifyCodeGenerationFromStrings. - */ -using ModifyCodeGenerationFromStringsCallback = - ModifyCodeGenerationFromStringsResult (*)(Local context, - Local source); -using ModifyCodeGenerationFromStringsCallback2 = - ModifyCodeGenerationFromStringsResult (*)(Local context, - Local source, - bool is_code_like); - // --- WebAssembly compilation callbacks --- using ExtensionCallback = bool (*)(const FunctionCallbackInfo&); @@ -322,20 +316,28 @@ using WasmAsyncResolvePromiseCallback = void (*)( using WasmLoadSourceMapCallback = Local (*)(Isolate* isolate, const char* name); -// --- Callback for checking if WebAssembly Simd is enabled --- -using WasmSimdEnabledCallback = bool (*)(Local context); - -// --- Callback for checking if WebAssembly exceptions are enabled --- -using WasmExceptionsEnabledCallback = bool (*)(Local context); - -// --- Callback for checking if WebAssembly GC is enabled --- -// If the callback returns true, it will also enable Wasm stringrefs. -using WasmGCEnabledCallback = bool (*)(Local context); +// --- Callback for checking if WebAssembly imported strings are enabled --- +using WasmImportedStringsEnabledCallback = bool (*)(Local context); // --- Callback for checking if the SharedArrayBuffer constructor is enabled --- using SharedArrayBufferConstructorEnabledCallback = bool (*)(Local context); +// --- Callback for checking if the compile hints magic comments are enabled --- +using JavaScriptCompileHintsMagicEnabledCallback = + bool (*)(Local context); + +// --- Callback for checking if WebAssembly JSPI is enabled --- +using WasmJSPIEnabledCallback = bool (*)(Local context); + +/** + * Import phases in import requests. + */ +enum class ModuleImportPhase { + kSource, + kEvaluation, +}; + /** * HostImportModuleDynamicallyCallback is called when we * require the embedder to load a module. This is used as part of the dynamic @@ -346,11 +348,11 @@ using SharedArrayBufferConstructorEnabledCallback = * * The specifier is the name of the module that should be imported. * - * The import_assertions are import assertions for this request in the form: + * The import_attributes are import attributes for this request in the form: * [key1, value1, key2, value2, ...] where the keys and values are of type * v8::String. Note, unlike the FixedArray passed to ResolveModuleCallback and - * returned from ModuleRequest::GetImportAssertions(), this array does not - * contain the source Locations of the assertions. + * returned from ModuleRequest::GetImportAttributes(), this array does not + * contain the source Locations of the attributes. * * The embedder must compile, instantiate, evaluate the Module, and * obtain its namespace object. @@ -362,15 +364,58 @@ using SharedArrayBufferConstructorEnabledCallback = * fails (e.g. due to stack overflow), the embedder must propagate * that exception by returning an empty MaybeLocal. */ -using HostImportModuleDynamicallyWithImportAssertionsCallback = - MaybeLocal (*)(Local context, - Local referrer, - Local specifier, - Local import_assertions); using HostImportModuleDynamicallyCallback = MaybeLocal (*)( Local context, Local host_defined_options, Local resource_name, Local specifier, - Local import_assertions); + Local import_attributes); + +/** + * HostImportModuleWithPhaseDynamicallyCallback is called when we + * require the embedder to load a module with a specific phase. This is used + * as part of the dynamic import syntax. + * + * The referrer contains metadata about the script/module that calls + * import. + * + * The specifier is the name of the module that should be imported. + * + * The phase is the phase of the import requested. + * + * The import_attributes are import attributes for this request in the form: + * [key1, value1, key2, value2, ...] where the keys and values are of type + * v8::String. Note, unlike the FixedArray passed to ResolveModuleCallback and + * returned from ModuleRequest::GetImportAttributes(), this array does not + * contain the source Locations of the attributes. + * + * The Promise returned from this function is forwarded to userland + * JavaScript. The embedder must resolve this promise according to the phase + * requested: + * - For ModuleImportPhase::kSource, the promise must be resolved with a + * compiled ModuleSource object, or rejected with a ReferenceError if the + * module does not support source representation. + * - For ModuleImportPhase::kEvaluation, the promise must be resolved with a + * ModuleNamespace object of a module that has been compiled, instantiated, + * and evaluated. + * + * In case of an exception, the embedder must reject this promise with the + * exception. If the promise creation itself fails (e.g. due to stack + * overflow), the embedder must propagate that exception by returning an empty + * MaybeLocal. + * + * This callback is still experimental and is only invoked for source phase + * imports. + */ +using HostImportModuleWithPhaseDynamicallyCallback = MaybeLocal (*)( + Local context, Local host_defined_options, + Local resource_name, Local specifier, + ModuleImportPhase phase, Local import_attributes); + +/** + * Callback for requesting a compile hint for a function from the embedder. The + * first parameter is the position of the function in source code and the second + * parameter is embedder data to be passed back. + */ +using CompileHintCallback = bool (*)(int, void*); /** * HostInitializeImportMetaObjectCallback is called the first time import.meta @@ -411,6 +456,45 @@ using PrepareStackTraceCallback = MaybeLocal (*)(Local context, Local error, Local sites); +#if defined(V8_OS_WIN) +/** + * Callback to selectively enable ETW tracing based on the document URL. + * Implemented by the embedder, it should never call back into V8. + * + * Windows allows passing additional data to the ETW EnableCallback: + * https://learn.microsoft.com/en-us/windows/win32/api/evntprov/nc-evntprov-penablecallback + * + * This data can be configured in a WPR (Windows Performance Recorder) + * profile, adding a CustomFilter to an EventProvider like the following: + * + * + * + * + * + * Where: + * - Name="57277741-3638-4A4B-BDBA-0AC6E45DA56C" is the GUID of the V8 + * ETW provider, (see src/libplatform/etw/etw-provider-win.h), + * - Type="0x80000000" is EVENT_FILTER_TYPE_SCHEMATIZED, + * - Value="AQABAAAAAA..." is a base64-encoded byte array that is + * base64-decoded by Windows and passed to the ETW enable callback in + * the 'PEVENT_FILTER_DESCRIPTOR FilterData' argument; see: + * https://learn.microsoft.com/en-us/windows/win32/api/evntprov/ns-evntprov-event_filter_descriptor. + * + * This array contains a struct EVENT_FILTER_HEADER followed by a + * variable length payload, and as payload we pass a string in JSON format, + * with a list of regular expressions that should match the document URL + * in order to enable ETW tracing: + * { + * "version": "1.0", + * "filtered_urls": [ + * "https:\/\/.*\.chromium\.org\/.*", "https://v8.dev/";, "..." + * ] + * } + */ +using FilterETWSessionByURLCallback = + bool (*)(Local context, const std::string& etw_filter_payload); +#endif // V8_OS_WIN + } // namespace v8 #endif // INCLUDE_V8_ISOLATE_CALLBACKS_H_ diff --git a/deps/include/v8-container.h b/deps/include/v8-container.h index ce068603..380999e5 100644 --- a/deps/include/v8-container.h +++ b/deps/include/v8-container.h @@ -8,6 +8,8 @@ #include #include +#include + #include "v8-local-handle.h" // NOLINT(build/include_directory) #include "v8-object.h" // NOLINT(build/include_directory) #include "v8config.h" // NOLINT(build/include_directory) @@ -43,6 +45,58 @@ class V8_EXPORT Array : public Object { return static_cast(value); } + /** + * Creates a JavaScript array from a provided callback. + * + * \param context The v8::Context to create the array in. + * \param length The length of the array to be created. + * \param next_value_callback The callback that is invoked to retrieve + * elements for the array. The embedder can signal that the array + * initialization should be aborted by throwing an exception and returning + * an empty MaybeLocal. + * \returns The v8::Array if all elements were constructed successfully and an + * empty MaybeLocal otherwise. + */ + static MaybeLocal New( + Local context, size_t length, + std::function()> next_value_callback); + + enum class CallbackResult { + kException, + kBreak, + kContinue, + }; + using IterationCallback = CallbackResult (*)(uint32_t index, + Local element, + void* data); + + /** + * Calls {callback} for every element of this array, passing {callback_data} + * as its {data} parameter. + * This function will typically be faster than calling {Get()} repeatedly. + * As a consequence of being optimized for low overhead, the provided + * callback must adhere to the following restrictions: + * - It must not allocate any V8 objects and continue iterating; it may + * allocate (e.g. an error message/object) and then immediately terminate + * the iteration. + * - It must not modify the array being iterated. + * - It must not call back into V8 (unless it can guarantee that such a + * call does not violate the above restrictions, which is difficult). + * - The {Local element} must not "escape", i.e. must not be assigned + * to any other {Local}. Creating a {Global} from it, or updating a + * v8::TypecheckWitness with it, is safe. + * These restrictions may be lifted in the future if use cases arise that + * justify a slower but more robust implementation. + * + * Returns {Nothing} on exception; use a {TryCatch} to catch and handle this + * exception. + * When the {callback} returns {kException}, iteration is terminated + * immediately, returning {Nothing}. By returning {kBreak}, the callback + * can request non-exceptional early termination of the iteration. + */ + Maybe Iterate(Local context, IterationCallback callback, + void* callback_data); + private: Array(); static void CheckCast(Value* obj); diff --git a/deps/include/v8-context.h b/deps/include/v8-context.h index 3ce0eb0a..0861d664 100644 --- a/deps/include/v8-context.h +++ b/deps/include/v8-context.h @@ -7,8 +7,11 @@ #include +#include + #include "v8-data.h" // NOLINT(build/include_directory) #include "v8-local-handle.h" // NOLINT(build/include_directory) +#include "v8-maybe.h" // NOLINT(build/include_directory) #include "v8-snapshot.h" // NOLINT(build/include_directory) #include "v8config.h" // NOLINT(build/include_directory) @@ -81,6 +84,33 @@ class V8_EXPORT Context : public Data { * created by a previous call to Context::New with the same global * template. The state of the global object will be completely reset * and only object identify will remain. + * + * \param internal_fields_deserializer An optional callback used + * to deserialize fields set by + * v8::Object::SetAlignedPointerInInternalField() in wrapper objects + * from the default context snapshot. It should match the + * SerializeInternalFieldsCallback() used by + * v8::SnapshotCreator::SetDefaultContext() when the default context + * snapshot is created. It does not need to be configured if the default + * context snapshot contains no wrapper objects with pointer internal + * fields, or if no custom startup snapshot is configured + * in the v8::CreateParams used to create the isolate. + * + * \param microtask_queue An optional microtask queue used to manage + * the microtasks created in this context. If not set the per-isolate + * default microtask queue would be used. + * + * \param context_data_deserializer An optional callback used + * to deserialize embedder data set by + * v8::Context::SetAlignedPointerInEmbedderData() in the default + * context from the default context snapshot. It does not need to be + * configured if the default context snapshot contains no pointer embedder + * data, or if no custom startup snapshot is configured in the + * v8::CreateParams used to create the isolate. + * + * \param api_wrapper_deserializer An optional callback used to deserialize + * API wrapper objects that was initially set with v8::Object::Wrap() and then + * serialized using SerializeAPIWrapperCallback. */ static Local New( Isolate* isolate, ExtensionConfiguration* extensions = nullptr, @@ -88,33 +118,59 @@ class V8_EXPORT Context : public Data { MaybeLocal global_object = MaybeLocal(), DeserializeInternalFieldsCallback internal_fields_deserializer = DeserializeInternalFieldsCallback(), - MicrotaskQueue* microtask_queue = nullptr); + MicrotaskQueue* microtask_queue = nullptr, + DeserializeContextDataCallback context_data_deserializer = + DeserializeContextDataCallback(), + DeserializeAPIWrapperCallback api_wrapper_deserializer = + DeserializeAPIWrapperCallback()); /** * Create a new context from a (non-default) context snapshot. There * is no way to provide a global object template since we do not create * a new global object from template, but we can reuse a global object. * - * \param isolate See v8::Context::New. + * \param isolate See v8::Context::New(). * * \param context_snapshot_index The index of the context snapshot to - * deserialize from. Use v8::Context::New for the default snapshot. + * deserialize from. Use v8::Context::New() for the default snapshot. + * + * \param internal_fields_deserializer An optional callback used + * to deserialize fields set by + * v8::Object::SetAlignedPointerInInternalField() in wrapper objects + * from the default context snapshot. It does not need to be + * configured if there are no wrapper objects with no internal + * pointer fields in the default context snapshot or if no startup + * snapshot is configured when the isolate is created. * - * \param embedder_fields_deserializer Optional callback to deserialize - * internal fields. It should match the SerializeInternalFieldCallback used - * to serialize. + * \param extensions See v8::Context::New(). * - * \param extensions See v8::Context::New. + * \param global_object See v8::Context::New(). * - * \param global_object See v8::Context::New. + * \param internal_fields_deserializer Similar to + * internal_fields_deserializer in v8::Context::New() but applies to + * the context specified by the context_snapshot_index. + * + * \param microtask_queue See v8::Context::New(). + * + * \param context_data_deserializer Similar to + * context_data_deserializer in v8::Context::New() but applies to + * the context specified by the context_snapshot_index. + * + *\param api_wrapper_deserializer Similar to api_wrapper_deserializer in + * v8::Context::New() but applies to the context specified by the + * context_snapshot_index. */ static MaybeLocal FromSnapshot( Isolate* isolate, size_t context_snapshot_index, - DeserializeInternalFieldsCallback embedder_fields_deserializer = + DeserializeInternalFieldsCallback internal_fields_deserializer = DeserializeInternalFieldsCallback(), ExtensionConfiguration* extensions = nullptr, MaybeLocal global_object = MaybeLocal(), - MicrotaskQueue* microtask_queue = nullptr); + MicrotaskQueue* microtask_queue = nullptr, + DeserializeContextDataCallback context_data_deserializer = + DeserializeContextDataCallback(), + DeserializeAPIWrapperCallback api_wrapper_deserializer = + DeserializeAPIWrapperCallback()); /** * Returns an global object that isn't backed by an actual context. @@ -163,6 +219,42 @@ class V8_EXPORT Context : public Data { */ void Exit(); + /** + * Delegate to help with Deep freezing embedder-specific objects (such as + * JSApiObjects) that can not be frozen natively. + */ + class DeepFreezeDelegate { + public: + /** + * Performs embedder-specific operations to freeze the provided embedder + * object. The provided object *will* be frozen by DeepFreeze after this + * function returns, so only embedder-specific objects need to be frozen. + * This function *may not* create new JS objects or perform JS allocations. + * Any v8 objects reachable from the provided embedder object that should + * also be considered for freezing should be added to the children_out + * parameter. Returns true if the operation completed successfully. + */ + virtual bool FreezeEmbedderObjectAndGetChildren( + Local obj, LocalVector& children_out) = 0; + }; + + /** + * Attempts to recursively freeze all objects reachable from this context. + * Some objects (generators, iterators, non-const closures) can not be frozen + * and will cause this method to throw an error. An optional delegate can be + * provided to help freeze embedder-specific objects. + * + * Freezing occurs in two steps: + * 1. "Marking" where we iterate through all objects reachable by this + * context, accumulating a list of objects that need to be frozen and + * looking for objects that can't be frozen. This step is separated because + * it is more efficient when we can assume there is no garbage collection. + * 2. "Freezing" where we go through the list of objects and freezing them. + * This effectively requires copying them so it may trigger garbage + * collection. + */ + Maybe DeepFreeze(DeepFreezeDelegate* delegate = nullptr); + /** Returns the isolate associated with a current context. */ Isolate* GetIsolate(); @@ -210,6 +302,8 @@ class V8_EXPORT Context : public Data { * SetAlignedPointerInEmbedderData with the same index. Note that index 0 * currently has a special meaning for Chrome's debugger. */ + V8_INLINE void* GetAlignedPointerFromEmbedderData(Isolate* isolate, + int index); V8_INLINE void* GetAlignedPointerFromEmbedderData(int index); /** @@ -226,7 +320,7 @@ class V8_EXPORT Context : public Data { * 'Function' constructor are used an exception will be thrown. * * If code generation from strings is not allowed the - * V8::AllowCodeGenerationFromStrings callback will be invoked if + * V8::ModifyCodeGenerationFromStringsCallback callback will be invoked if * set before blocking the call to 'eval' or the 'Function' * constructor. If that callback returns true, the call will be * allowed, otherwise an exception will be thrown. If no callback is @@ -270,18 +364,6 @@ class V8_EXPORT Context : public Data { Local context); void SetAbortScriptExecution(AbortScriptExecutionCallback callback); - /** - * Returns the value that was set or restored by - * SetContinuationPreservedEmbedderData(), if any. - */ - Local GetContinuationPreservedEmbedderData() const; - - /** - * Sets a value that will be stored on continuations and reset while the - * continuation runs. - */ - void SetContinuationPreservedEmbedderData(Local context); - /** * Set or clear hooks to be invoked for promise lifecycle operations. * To clear a hook, set it to an empty v8::Function. Each function will @@ -356,7 +438,7 @@ Local Context::GetEmbedderData(int index) { #ifndef V8_ENABLE_CHECKS using A = internal::Address; using I = internal::Internals; - A ctx = *reinterpret_cast(this); + A ctx = internal::ValueHelper::ValueAsAddress(this); A embedder_data = I::ReadTaggedPointerField(ctx, I::kNativeContextEmbedderDataOffset); int value_offset = @@ -365,23 +447,40 @@ Local Context::GetEmbedderData(int index) { #ifdef V8_COMPRESS_POINTERS // We read the full pointer value and then decompress it in order to avoid // dealing with potential endiannes issues. - value = - I::DecompressTaggedAnyField(embedder_data, static_cast(value)); + value = I::DecompressTaggedField(embedder_data, static_cast(value)); #endif - internal::Isolate* isolate = internal::IsolateFromNeverReadOnlySpaceObject( - *reinterpret_cast(this)); - A* result = HandleScope::CreateHandle(isolate, value); - return Local(reinterpret_cast(result)); + + auto isolate = reinterpret_cast( + internal::IsolateFromNeverReadOnlySpaceObject(ctx)); + return Local::New(isolate, value); #else return SlowGetEmbedderData(index); #endif } +void* Context::GetAlignedPointerFromEmbedderData(Isolate* isolate, int index) { +#if !defined(V8_ENABLE_CHECKS) + using A = internal::Address; + using I = internal::Internals; + A ctx = internal::ValueHelper::ValueAsAddress(this); + A embedder_data = + I::ReadTaggedPointerField(ctx, I::kNativeContextEmbedderDataOffset); + int value_offset = I::kEmbedderDataArrayHeaderSize + + (I::kEmbedderDataSlotSize * index) + + I::kEmbedderDataSlotExternalPointerOffset; + return reinterpret_cast( + I::ReadExternalPointerField( + isolate, embedder_data, value_offset)); +#else + return SlowGetAlignedPointerFromEmbedderData(index); +#endif +} + void* Context::GetAlignedPointerFromEmbedderData(int index) { #if !defined(V8_ENABLE_CHECKS) using A = internal::Address; using I = internal::Internals; - A ctx = *reinterpret_cast(this); + A ctx = internal::ValueHelper::ValueAsAddress(this); A embedder_data = I::ReadTaggedPointerField(ctx, I::kNativeContextEmbedderDataOffset); int value_offset = I::kEmbedderDataArrayHeaderSize + @@ -398,9 +497,12 @@ void* Context::GetAlignedPointerFromEmbedderData(int index) { template MaybeLocal Context::GetDataFromSnapshotOnce(size_t index) { - T* data = reinterpret_cast(GetDataFromSnapshotOnce(index)); - if (data) internal::PerformCastCheck(data); - return Local(data); + if (auto slot = GetDataFromSnapshotOnce(index); slot) { + internal::PerformCastCheck( + internal::ValueHelper::SlotAsValue(slot)); + return Local::FromSlot(slot); + } + return {}; } Context* Context::Cast(v8::Data* data) { diff --git a/deps/include/v8-cppgc.h b/deps/include/v8-cppgc.h index 4a457027..6ebae86c 100644 --- a/deps/include/v8-cppgc.h +++ b/deps/include/v8-cppgc.h @@ -32,62 +32,15 @@ class CppHeap; class CustomSpaceStatisticsReceiver; -/** - * Describes how V8 wrapper objects maintain references to garbage-collected C++ - * objects. - */ -struct WrapperDescriptor final { - /** - * The index used on `v8::Ojbect::SetAlignedPointerFromInternalField()` and - * related APIs to add additional data to an object which is used to identify - * JS->C++ references. - */ - using InternalFieldIndex = int; - - /** - * Unknown embedder id. The value is reserved for internal usages and must not - * be used with `CppHeap`. - */ - static constexpr uint16_t kUnknownEmbedderId = UINT16_MAX; - - constexpr WrapperDescriptor(InternalFieldIndex wrappable_type_index, - InternalFieldIndex wrappable_instance_index, - uint16_t embedder_id_for_garbage_collected) - : wrappable_type_index(wrappable_type_index), - wrappable_instance_index(wrappable_instance_index), - embedder_id_for_garbage_collected(embedder_id_for_garbage_collected) {} - - /** - * Index of the wrappable type. - */ - InternalFieldIndex wrappable_type_index; - - /** - * Index of the wrappable instance. - */ - InternalFieldIndex wrappable_instance_index; - - /** - * Embedder id identifying instances of garbage-collected objects. It is - * expected that the first field of the wrappable type is a uint16_t holding - * the id. Only references to instances of wrappables types with an id of - * `embedder_id_for_garbage_collected` will be considered by CppHeap. - */ - uint16_t embedder_id_for_garbage_collected; -}; - struct V8_EXPORT CppHeapCreateParams { - CppHeapCreateParams( - std::vector> custom_spaces, - WrapperDescriptor wrapper_descriptor) - : custom_spaces(std::move(custom_spaces)), - wrapper_descriptor(wrapper_descriptor) {} + explicit CppHeapCreateParams( + std::vector> custom_spaces) + : custom_spaces(std::move(custom_spaces)) {} CppHeapCreateParams(const CppHeapCreateParams&) = delete; CppHeapCreateParams& operator=(const CppHeapCreateParams&) = delete; std::vector> custom_spaces; - WrapperDescriptor wrapper_descriptor; /** * Specifies which kind of marking are supported by the heap. The type may be * further reduced via runtime flags when attaching the heap to an Isolate. diff --git a/deps/include/v8-date.h b/deps/include/v8-date.h index 8d82ccc9..5c3cbd91 100644 --- a/deps/include/v8-date.h +++ b/deps/include/v8-date.h @@ -21,6 +21,10 @@ class V8_EXPORT Date : public Object { static V8_WARN_UNUSED_RESULT MaybeLocal New(Local context, double time); + static V8_WARN_UNUSED_RESULT MaybeLocal Parse( + Local context, + Local date_string); + /** * A specialization of Value::NumberValue that is more efficient * because we know the structure of this object. @@ -32,6 +36,11 @@ class V8_EXPORT Date : public Object { */ v8::Local ToISOString() const; + /** + * Generates UTC string representation. + */ + v8::Local ToUTCString() const; + V8_INLINE static Date* Cast(Value* value) { #ifdef V8_ENABLE_CHECKS CheckCast(value); diff --git a/deps/include/v8-debug.h b/deps/include/v8-debug.h index 52255f37..55ee9381 100644 --- a/deps/include/v8-debug.h +++ b/deps/include/v8-debug.h @@ -130,6 +130,11 @@ class V8_EXPORT StackTrace { kDetailed = kOverview | kIsEval | kIsConstructor | kScriptNameOrSourceURL }; + /** + * Returns the (unique) ID of this stack trace. + */ + int GetID() const; + /** * Returns a StackFrame at a particular index. */ diff --git a/deps/include/v8-embedder-heap.h b/deps/include/v8-embedder-heap.h index 9e2e3ef5..184dfa79 100644 --- a/deps/include/v8-embedder-heap.h +++ b/deps/include/v8-embedder-heap.h @@ -9,6 +9,9 @@ #include "v8config.h" // NOLINT(build/include_directory) namespace v8 { +namespace internal { +class TracedHandles; +} // namespace internal class Isolate; class Value; @@ -18,8 +21,19 @@ class Value; */ class V8_EXPORT EmbedderRootsHandler { public: + enum class RootHandling { + kQueryEmbedderForNonDroppableReferences, + kDontQueryEmbedderForAnyReference, + }; + virtual ~EmbedderRootsHandler() = default; + EmbedderRootsHandler() = default; + + V8_DEPRECATED("Use the default constructor instead.") + explicit EmbedderRootsHandler(RootHandling default_traced_reference_handling) + : default_traced_reference_handling_(default_traced_reference_handling) {} + /** * Returns true if the |TracedReference| handle should be considered as root * for the currently running non-tracing garbage collection and false @@ -31,11 +45,14 @@ class V8_EXPORT EmbedderRootsHandler { * |TracedReference|. * * Note that the `handle` is different from the handle that the embedder holds - * for retaining the object. The embedder may use |WrapperClassId()| to - * distinguish cases where it wants handles to be treated as roots from not - * being treated as roots. + * for retaining the object. + * + * The concrete implementations must be thread-safe. */ - virtual bool IsRoot(const v8::TracedReference& handle) = 0; + V8_DEPRECATED("Use TracedReferenceHandling::kDroppable instead.") + virtual bool IsRoot(const v8::TracedReference& handle) { + return true; + } /** * Used in combination with |IsRoot|. Called by V8 when an @@ -47,6 +64,22 @@ class V8_EXPORT EmbedderRootsHandler { * handle via the object or class id. */ virtual void ResetRoot(const v8::TracedReference& handle) = 0; + + /** + * Similar to |ResetRoot()|, but opportunistic. The function is called in + * parallel for different handles and as such must be thread-safe. In case, + * |false| is returned, |ResetRoot()| will be recalled for the same handle. + */ + virtual bool TryResetRoot(const v8::TracedReference& handle) { + ResetRoot(handle); + return true; + } + + private: + const RootHandling default_traced_reference_handling_ = + RootHandling::kDontQueryEmbedderForAnyReference; + + friend class internal::TracedHandles; }; } // namespace v8 diff --git a/deps/include/v8-embedder-state-scope.h b/deps/include/v8-embedder-state-scope.h index d8a3b08d..ec8da457 100644 --- a/deps/include/v8-embedder-state-scope.h +++ b/deps/include/v8-embedder-state-scope.h @@ -7,12 +7,13 @@ #include -#include "v8-context.h" // NOLINT(build/include_directory) #include "v8-internal.h" // NOLINT(build/include_directory) #include "v8-local-handle.h" // NOLINT(build/include_directory) namespace v8 { +class Context; + namespace internal { class EmbedderState; } // namespace internal diff --git a/deps/include/v8-exception.h b/deps/include/v8-exception.h index bc058e3f..a7f49b6c 100644 --- a/deps/include/v8-exception.h +++ b/deps/include/v8-exception.h @@ -8,6 +8,7 @@ #include #include "v8-local-handle.h" // NOLINT(build/include_directory) +#include "v8-object.h" // NOLINT(build/include_directory) #include "v8config.h" // NOLINT(build/include_directory) namespace v8 { @@ -30,14 +31,21 @@ class ThreadLocalTop; */ class V8_EXPORT Exception { public: - static Local RangeError(Local message); - static Local ReferenceError(Local message); - static Local SyntaxError(Local message); - static Local TypeError(Local message); - static Local WasmCompileError(Local message); - static Local WasmLinkError(Local message); - static Local WasmRuntimeError(Local message); - static Local Error(Local message); + static Local RangeError(Local message, + Local options = {}); + static Local ReferenceError(Local message, + Local options = {}); + static Local SyntaxError(Local message, + Local options = {}); + static Local TypeError(Local message, + Local options = {}); + static Local WasmCompileError(Local message, + Local options = {}); + static Local WasmLinkError(Local message, + Local options = {}); + static Local WasmRuntimeError(Local message, + Local options = {}); + static Local Error(Local message, Local options = {}); /** * Creates an error message for the given exception. @@ -51,8 +59,77 @@ class V8_EXPORT Exception { * of a given exception, or an empty handle if not available. */ static Local GetStackTrace(Local exception); + + /** + * Captures the current stack trace and attaches it to the given object in the + * form of `stack` property. + */ + static Maybe CaptureStackTrace(Local context, + Local object); }; +/** + * This is a part of experimental Api and might be changed without further + * notice. + * Do not use it. + */ +enum class ExceptionContext : uint32_t { + kUnknown, + kConstructor, + kOperation, + kAttributeGet, + kAttributeSet, + kIndexedQuery, + kIndexedGetter, + kIndexedDescriptor, + kIndexedSetter, + kIndexedDefiner, + kIndexedDeleter, + kNamedQuery, + kNamedGetter, + kNamedDescriptor, + kNamedSetter, + kNamedDefiner, + kNamedDeleter, + kNamedEnumerator +}; + +/** + * This is a part of experimental Api and might be changed without further + * notice. + * Do not use it. + */ +class ExceptionPropagationMessage { + public: + ExceptionPropagationMessage(v8::Isolate* isolate, Local exception, + Local interface_name, + Local property_name, + ExceptionContext exception_context) + : isolate_(isolate), + exception_(exception), + interface_name_(interface_name), + property_name_(property_name), + exception_context_(exception_context) {} + + V8_INLINE Isolate* GetIsolate() const { return isolate_; } + V8_INLINE Local GetException() const { return exception_; } + V8_INLINE Local GetInterfaceName() const { return interface_name_; } + V8_INLINE Local GetPropertyName() const { return property_name_; } + V8_INLINE ExceptionContext GetExceptionContext() const { + return exception_context_; + } + + private: + Isolate* isolate_; + Local exception_; + Local interface_name_; + Local property_name_; + ExceptionContext exception_context_; +}; + +using ExceptionPropagationCallback = + void (*)(ExceptionPropagationMessage message); + /** * An external exception handler. */ @@ -206,7 +283,6 @@ class V8_EXPORT TryCatch { bool can_continue_ : 1; bool capture_message_ : 1; bool rethrow_ : 1; - bool has_terminated_ : 1; friend class internal::Isolate; friend class internal::ThreadLocalTop; diff --git a/deps/include/v8-fast-api-calls.h b/deps/include/v8-fast-api-calls.h index 0fe7cd24..18466688 100644 --- a/deps/include/v8-fast-api-calls.h +++ b/deps/include/v8-fast-api-calls.h @@ -254,8 +254,8 @@ class CTypeInfo { // migrated from v8::ApiObject to v8::Local. kAny, // This is added to enable untyped representation of fast // call arguments for test purposes. It can represent any of - // the other types stored in the same memory as a union (see - // the AnyCType struct declared below). This allows for + // the other types stored in the same memory as a union + // (see AnyCType declared below). This allows for // uniform passing of arguments w.r.t. their location // (in a register or on the stack), independent of their // actual type. It's currently used by the arm64 simulator @@ -337,14 +337,21 @@ struct FastApiTypedArrayBase { }; template -struct FastApiTypedArray : public FastApiTypedArrayBase { +struct V8_DEPRECATE_SOON( + "When an API function expects a TypedArray as a parameter, the type in the " + "signature should be `v8::Local` instead of " + "FastApiTypedArray<>. The API function then has to type-check the " + "parameter and convert it to a `v8::Local(data_) + index, sizeof(T)); + memcpy(&tmp, static_cast(reinterpret_cast(data_) + index), + sizeof(T)); return tmp; } @@ -388,13 +395,19 @@ struct FastOneByteString { class V8_EXPORT CFunctionInfo { public: + enum class Int64Representation : uint8_t { + kNumber = 0, // Use numbers to represent 64 bit integers. + kBigInt = 1, // Use BigInts to represent 64 bit integers. + }; + // Construct a struct to hold a CFunction's type information. // |return_info| describes the function's return type. // |arg_info| is an array of |arg_count| CTypeInfos describing the // arguments. Only the last argument may be of the special type // CTypeInfo::kCallbackOptionsType. CFunctionInfo(const CTypeInfo& return_info, unsigned int arg_count, - const CTypeInfo* arg_info); + const CTypeInfo* arg_info, + Int64Representation repr = Int64Representation::kNumber); const CTypeInfo& ReturnInfo() const { return return_info_; } @@ -404,6 +417,8 @@ class V8_EXPORT CFunctionInfo { return HasOptions() ? arg_count_ - 1 : arg_count_; } + Int64Representation GetInt64Representation() const { return repr_; } + // |index| must be less than ArgumentCount(). // Note: if the last argument passed on construction of CFunctionInfo // has type CTypeInfo::kCallbackOptionsType, it is not included in @@ -418,6 +433,7 @@ class V8_EXPORT CFunctionInfo { private: const CTypeInfo return_info_; + const Int64Representation repr_; const unsigned int arg_count_; const CTypeInfo* arg_info_; }; @@ -425,35 +441,36 @@ class V8_EXPORT CFunctionInfo { struct FastApiCallbackOptions; // Provided for testing. -struct AnyCType { +union V8_TRIVIAL_ABI AnyCType { AnyCType() : int64_value(0) {} - union { - bool bool_value; - int32_t int32_value; - uint32_t uint32_value; - int64_t int64_value; - uint64_t uint64_value; - float float_value; - double double_value; - void* pointer_value; - Local object_value; - Local sequence_value; - const FastApiTypedArray* uint8_ta_value; - const FastApiTypedArray* int32_ta_value; - const FastApiTypedArray* uint32_ta_value; - const FastApiTypedArray* int64_ta_value; - const FastApiTypedArray* uint64_ta_value; - const FastApiTypedArray* float_ta_value; - const FastApiTypedArray* double_ta_value; - const FastOneByteString* string_value; - FastApiCallbackOptions* options_value; - }; +#if defined(V8_ENABLE_LOCAL_OFF_STACK_CHECK) && V8_HAS_ATTRIBUTE_TRIVIAL_ABI + // In this case, Local is not trivially copyable and the implicit + // copy constructor and copy assignment for the union are deleted. + AnyCType(const AnyCType& other) : int64_value(other.int64_value) {} + AnyCType& operator=(const AnyCType& other) { + int64_value = other.int64_value; + return *this; + } +#endif + + bool bool_value; + int32_t int32_value; + uint32_t uint32_value; + int64_t int64_value; + uint64_t uint64_value; + float float_value; + double double_value; + void* pointer_value; + Local object_value; + Local sequence_value; + const FastOneByteString* string_value; + FastApiCallbackOptions* options_value; }; static_assert( sizeof(AnyCType) == 8, - "The AnyCType struct should have size == 64 bits, as this is assumed " + "The union AnyCType should have size == 64 bits, as this is assumed " "by EffectControlLinearizer."); class V8_EXPORT CFunction { @@ -469,6 +486,9 @@ class V8_EXPORT CFunction { unsigned int ArgumentCount() const { return type_info_->ArgumentCount(); } const void* GetAddress() const { return address_; } + CFunctionInfo::Int64Representation GetInt64Representation() const { + return type_info_->GetInt64Representation(); + } const CFunctionInfo* GetTypeInfo() const { return type_info_; } enum class OverloadResolution { kImpossible, kAtRuntime, kAtCompileTime }; @@ -508,16 +528,22 @@ class V8_EXPORT CFunction { } template - static CFunction Make(F* func) { - return ArgUnwrap::Make(func); + static CFunction Make(F* func, + CFunctionInfo::Int64Representation int64_rep = + CFunctionInfo::Int64Representation::kNumber) { + CFunction result = ArgUnwrap::Make(func, int64_rep); + result.GetInt64Representation(); + return result; } // Provided for testing purposes. template static CFunction Make(R (*func)(Args...), - R_Patch (*patching_func)(Args_Patch...)) { - CFunction c_func = ArgUnwrap::Make(func); + R_Patch (*patching_func)(Args_Patch...), + CFunctionInfo::Int64Representation int64_rep = + CFunctionInfo::Int64Representation::kNumber) { + CFunction c_func = ArgUnwrap::Make(func, int64_rep); static_assert( sizeof...(Args_Patch) == sizeof...(Args), "The patching function must have the same number of arguments."); @@ -540,7 +566,9 @@ class V8_EXPORT CFunction { template class ArgUnwrap { public: - static CFunction Make(R (*func)(Args...)); + static CFunction Make(R (*func)(Args...), + CFunctionInfo::Int64Representation int64_rep = + CFunctionInfo::Int64Representation::kNumber); }; }; @@ -556,35 +584,15 @@ struct FastApiCallbackOptions { * returned instance may be filled with mock data. */ static FastApiCallbackOptions CreateForTesting(Isolate* isolate) { - return {false, {0}, nullptr}; + return {}; } - /** - * If the callback wants to signal an error condition or to perform an - * allocation, it must set options.fallback to true and do an early return - * from the fast method. Then V8 checks the value of options.fallback and if - * it's true, falls back to executing the SlowCallback, which is capable of - * reporting the error (either by throwing a JS exception or logging to the - * console) or doing the allocation. It's the embedder's responsibility to - * ensure that the fast callback is idempotent up to the point where error and - * fallback conditions are checked, because otherwise executing the slow - * callback might produce visible side-effects twice. - */ - bool fallback; + v8::Isolate* isolate = nullptr; /** * The `data` passed to the FunctionTemplate constructor, or `undefined`. - * `data_ptr` allows for default constructing FastApiCallbackOptions. */ - union { - uintptr_t data_ptr; - v8::Local data; - }; - - /** - * When called from WebAssembly, a view of the calling module's memory. - */ - FastApiTypedArray* const wasm_memory; + v8::Local data; }; namespace internal { @@ -598,7 +606,8 @@ struct count template struct count : count {}; -template +template class CFunctionInfoImpl : public CFunctionInfo { static constexpr int kOptionsArgCount = count(); @@ -613,18 +622,20 @@ class CFunctionInfoImpl : public CFunctionInfo { public: constexpr CFunctionInfoImpl() : CFunctionInfo(RetBuilder::Build(), sizeof...(ArgBuilders), - arg_info_storage_), + arg_info_storage_, Representation), arg_info_storage_{ArgBuilders::Build()...} { constexpr CTypeInfo::Type kReturnType = RetBuilder::Build().GetType(); static_assert(kReturnType == CTypeInfo::Type::kVoid || kReturnType == CTypeInfo::Type::kBool || kReturnType == CTypeInfo::Type::kInt32 || kReturnType == CTypeInfo::Type::kUint32 || + kReturnType == CTypeInfo::Type::kInt64 || + kReturnType == CTypeInfo::Type::kUint64 || kReturnType == CTypeInfo::Type::kFloat32 || kReturnType == CTypeInfo::Type::kFloat64 || kReturnType == CTypeInfo::Type::kPointer || kReturnType == CTypeInfo::Type::kAny, - "64-bit int, string and api object values are not currently " + "String and api object values are not currently " "supported return types."); } @@ -845,8 +856,11 @@ class CFunctionBuilderWithFunction { return *this; } + template auto Build() { - static CFunctionInfoImpl instance; + static CFunctionInfoImpl + instance; return CFunction(fn_, &instance); } @@ -902,8 +916,14 @@ class CFunctionBuilder { // static template -CFunction CFunction::ArgUnwrap::Make(R (*func)(Args...)) { - return internal::CFunctionBuilder().Fn(func).Build(); +CFunction CFunction::ArgUnwrap::Make( + R (*func)(Args...), CFunctionInfo::Int64Representation int64_rep) { + if (int64_rep == CFunctionInfo::Int64Representation::kNumber) { + return internal::CFunctionBuilder().Fn(func).Build(); + } + return internal::CFunctionBuilder() + .Fn(func) + .template Build(); } using CFunctionBuilder = internal::CFunctionBuilder; diff --git a/deps/include/v8-forward.h b/deps/include/v8-forward.h index db3a2017..435fe856 100644 --- a/deps/include/v8-forward.h +++ b/deps/include/v8-forward.h @@ -27,6 +27,7 @@ class Context; class DataView; class Data; class Date; +class DictionaryTemplate; class Extension; class External; class FixedArray; diff --git a/deps/include/v8-function-callback.h b/deps/include/v8-function-callback.h index 2adff99b..9ff4c595 100644 --- a/deps/include/v8-function-callback.h +++ b/deps/include/v8-function-callback.h @@ -5,6 +5,10 @@ #ifndef INCLUDE_V8_FUNCTION_CALLBACK_H_ #define INCLUDE_V8_FUNCTION_CALLBACK_H_ +#include +#include + +#include "v8-internal.h" // NOLINT(build/include_directory) #include "v8-local-handle.h" // NOLINT(build/include_directory) #include "v8-primitive.h" // NOLINT(build/include_directory) #include "v8config.h" // NOLINT(build/include_directory) @@ -21,12 +25,18 @@ class Value; namespace internal { class FunctionCallbackArguments; class PropertyCallbackArguments; +class Builtins; } // namespace internal namespace debug { class ConsoleCallArguments; } // namespace debug +namespace api_internal { +V8_EXPORT v8::Local GetFunctionTemplateData( + v8::Isolate* isolate, v8::Local raw_target); +} // namespace api_internal + template class ReturnValue { public: @@ -34,21 +44,32 @@ class ReturnValue { V8_INLINE ReturnValue(const ReturnValue& that) : value_(that.value_) { static_assert(std::is_base_of::value, "type check"); } - // Local setters + // Handle-based setters. template V8_INLINE void Set(const Global& handle); template + V8_INLINE void SetNonEmpty(const Global& handle); + template V8_INLINE void Set(const BasicTracedReference& handle); template + V8_INLINE void SetNonEmpty(const BasicTracedReference& handle); + template V8_INLINE void Set(const Local handle); - // Fast primitive setters + template + V8_INLINE void SetNonEmpty(const Local handle); + // Fast primitive number setters. V8_INLINE void Set(bool value); V8_INLINE void Set(double i); + V8_INLINE void Set(int16_t i); V8_INLINE void Set(int32_t i); + V8_INLINE void Set(int64_t i); + V8_INLINE void Set(uint16_t i); V8_INLINE void Set(uint32_t i); + V8_INLINE void Set(uint64_t i); // Fast JS primitive setters V8_INLINE void SetNull(); V8_INLINE void SetUndefined(); + V8_INLINE void SetFalse(); V8_INLINE void SetEmptyString(); // Convenience getter for Isolate V8_INLINE Isolate* GetIsolate() const; @@ -71,9 +92,19 @@ class ReturnValue { friend class PropertyCallbackInfo; template friend class PersistentValueMapBase; - V8_INLINE void SetInternal(internal::Address value) { *value_ = value; } - V8_INLINE internal::Address GetDefaultValue(); + V8_INLINE void SetInternal(internal::Address value); + // Default value depends on : + // - -> true_value, + // - -> true_value, + // - -> 0, + // - -> undefined_value, + // - -> undefined_value. + V8_INLINE void SetDefaultValue(); V8_INLINE explicit ReturnValue(internal::Address* slot); + + // See FunctionCallbackInfo. + static constexpr int kIsolateValueIndex = -2; + internal::Address* value_; }; @@ -105,6 +136,12 @@ class FunctionCallbackInfo { * referencing this callback was found (which in V8 internally is often * referred to as holder [sic]). */ + V8_DEPRECATED( + "V8 will stop providing access to hidden prototype (i.e. " + "JSGlobalObject). Use This() instead. \n" + "DO NOT try to workaround this by accessing JSGlobalObject via " + "v8::Object::GetPrototype() - it'll be deprecated soon too. \n" + "See http://crbug.com/333672197. ") V8_INLINE Local Holder() const; /** For construct calls, this returns the "new.target" value. */ V8_INLINE Local NewTarget() const; @@ -116,25 +153,49 @@ class FunctionCallbackInfo { V8_INLINE Isolate* GetIsolate() const; /** The ReturnValue for the call. */ V8_INLINE ReturnValue GetReturnValue() const; - // This shouldn't be public, but the arm compiler needs it. - static const int kArgsLength = 6; - protected: + // This is a temporary replacement for Holder() added just for the purpose + // of testing the deprecated Holder() machinery until it's removed for real. + // DO NOT use it. + V8_INLINE Local HolderSoonToBeDeprecated() const; + + private: friend class internal::FunctionCallbackArguments; friend class internal::CustomArguments; friend class debug::ConsoleCallArguments; - static const int kHolderIndex = 0; - static const int kIsolateIndex = 1; - static const int kReturnValueDefaultValueIndex = 2; - static const int kReturnValueIndex = 3; - static const int kDataIndex = 4; - static const int kNewTargetIndex = 5; + friend void internal::PrintFunctionCallbackInfo(void*); + + static constexpr int kHolderIndex = 0; + static constexpr int kIsolateIndex = 1; + static constexpr int kContextIndex = 2; + static constexpr int kReturnValueIndex = 3; + static constexpr int kTargetIndex = 4; + static constexpr int kNewTargetIndex = 5; + static constexpr int kArgsLength = 6; + + static constexpr int kArgsLengthWithReceiver = kArgsLength + 1; + + // Codegen constants: + static constexpr int kSize = 3 * internal::kApiSystemPointerSize; + static constexpr int kImplicitArgsOffset = 0; + static constexpr int kValuesOffset = + kImplicitArgsOffset + internal::kApiSystemPointerSize; + static constexpr int kLengthOffset = + kValuesOffset + internal::kApiSystemPointerSize; + + static constexpr int kThisValuesIndex = -1; + static_assert(ReturnValue::kIsolateValueIndex == + kIsolateIndex - kReturnValueIndex); V8_INLINE FunctionCallbackInfo(internal::Address* implicit_args, internal::Address* values, int length); + + // TODO(https://crbug.com/326505377): flatten the v8::FunctionCallbackInfo + // object to avoid indirect loads through values_ and implicit_args_ and + // reduce the number of instructions in the CallApiCallback builtin. internal::Address* implicit_args_; internal::Address* values_; - int length_; + internal::Address length_; }; /** @@ -208,8 +269,23 @@ class PropertyCallbackInfo { * * \note For security reasons, do not pass the object back into the runtime. */ + V8_DEPRECATE_SOON( + "V8 will stop providing access to hidden prototype (i.e. " + "JSGlobalObject). Use HolderV2() instead. \n" + "DO NOT try to workaround this by accessing JSGlobalObject via " + "v8::Object::GetPrototype() - it'll be deprecated soon too. \n" + "See http://crbug.com/333672197. ") V8_INLINE Local Holder() const; + /** + * \return The object in the prototype chain of the receiver that has the + * interceptor. Suppose you have `x` and its prototype is `y`, and `y` + * has an interceptor. Then `info.This()` is `x` and `info.Holder()` is `y`. + * In case the property is installed on the global object the Holder() + * would return the global proxy. + */ + V8_INLINE Local HolderV2() const; + /** * \return The return value of the callback. * Can be changed by calling Set(). @@ -229,23 +305,29 @@ class PropertyCallbackInfo { */ V8_INLINE bool ShouldThrowOnError() const; - // This shouldn't be public, but the arm compiler needs it. - static const int kArgsLength = 7; - - protected: + private: + template + friend class PropertyCallbackInfo; friend class MacroAssembler; friend class internal::PropertyCallbackArguments; friend class internal::CustomArguments; - static const int kShouldThrowOnErrorIndex = 0; - static const int kHolderIndex = 1; - static const int kIsolateIndex = 2; - static const int kReturnValueDefaultValueIndex = 3; - static const int kReturnValueIndex = 4; - static const int kDataIndex = 5; - static const int kThisIndex = 6; - - V8_INLINE PropertyCallbackInfo(internal::Address* args) : args_(args) {} - internal::Address* args_; + friend void internal::PrintPropertyCallbackInfo(void*); + + static constexpr int kPropertyKeyIndex = 0; + static constexpr int kShouldThrowOnErrorIndex = 1; + static constexpr int kHolderIndex = 2; + static constexpr int kIsolateIndex = 3; + static constexpr int kHolderV2Index = 4; + static constexpr int kReturnValueIndex = 5; + static constexpr int kDataIndex = 6; + static constexpr int kThisIndex = 7; + static constexpr int kArgsLength = 8; + + static constexpr int kSize = kArgsLength * internal::kApiSystemPointerSize; + + PropertyCallbackInfo() = default; + + mutable internal::Address args_[kArgsLength]; }; using FunctionCallback = void (*)(const FunctionCallbackInfo& info); @@ -255,115 +337,291 @@ using FunctionCallback = void (*)(const FunctionCallbackInfo& info); template ReturnValue::ReturnValue(internal::Address* slot) : value_(slot) {} +template +void ReturnValue::SetInternal(internal::Address value) { +#if V8_STATIC_ROOTS_BOOL + using I = internal::Internals; + // Ensure that the upper 32-bits are not modified. Compiler should be + // able to optimize this to a store of a lower 32-bits of the value. + // This is fine since the callback can return only JavaScript values which + // are either Smis or heap objects allocated in the main cage. + *value_ = I::DecompressTaggedField(*value_, I::CompressTagged(value)); +#else + *value_ = value; +#endif // V8_STATIC_ROOTS_BOOL +} + template template void ReturnValue::Set(const Global& handle) { static_assert(std::is_base_of::value, "type check"); if (V8_UNLIKELY(handle.IsEmpty())) { - *value_ = GetDefaultValue(); + SetDefaultValue(); } else { - *value_ = *reinterpret_cast(*handle); + SetInternal(handle.ptr()); } } +template +template +void ReturnValue::SetNonEmpty(const Global& handle) { + static_assert(std::is_base_of::value, "type check"); +#ifdef V8_ENABLE_CHECKS + internal::VerifyHandleIsNonEmpty(handle.IsEmpty()); +#endif // V8_ENABLE_CHECKS + SetInternal(handle.ptr()); +} + template template void ReturnValue::Set(const BasicTracedReference& handle) { static_assert(std::is_base_of::value, "type check"); if (V8_UNLIKELY(handle.IsEmpty())) { - *value_ = GetDefaultValue(); + SetDefaultValue(); } else { - *value_ = *reinterpret_cast(handle.val_); + SetInternal(handle.ptr()); } } +template +template +void ReturnValue::SetNonEmpty(const BasicTracedReference& handle) { + static_assert(std::is_base_of::value, "type check"); +#ifdef V8_ENABLE_CHECKS + internal::VerifyHandleIsNonEmpty(handle.IsEmpty()); +#endif // V8_ENABLE_CHECKS + SetInternal(handle.ptr()); +} + template template void ReturnValue::Set(const Local handle) { - static_assert(std::is_void::value || std::is_base_of::value, - "type check"); + // "V8_DEPRECATE_SOON" this method if |T| is |void|. +#ifdef V8_IMMINENT_DEPRECATION_WARNINGS + static constexpr bool is_allowed_void = false; + static_assert(!std::is_void::value, + "ReturnValue::Set(const Local) is deprecated. " + "Do nothing to indicate that the operation succeeded or use " + "SetFalse() to indicate that the operation failed (don't " + "forget to handle info.ShouldThrowOnError()). " + "See http://crbug.com/348660658 for details."); +#else + static constexpr bool is_allowed_void = std::is_void::value; +#endif // V8_IMMINENT_DEPRECATION_WARNINGS + static_assert(is_allowed_void || std::is_base_of::value, "type check"); if (V8_UNLIKELY(handle.IsEmpty())) { - *value_ = GetDefaultValue(); + SetDefaultValue(); + } else if constexpr (is_allowed_void) { + // Simulate old behaviour for "v8::AccessorSetterCallback" for which + // it was possible to set the return value even for ReturnValue. + Set(handle->BooleanValue(GetIsolate())); } else { - *value_ = *reinterpret_cast(*handle); + SetInternal(handle.ptr()); + } +} + +template +template +void ReturnValue::SetNonEmpty(const Local handle) { + // "V8_DEPRECATE_SOON" this method if |T| is |void|. +#ifdef V8_IMMINENT_DEPRECATION_WARNINGS + static constexpr bool is_allowed_void = false; + static_assert(!std::is_void::value, + "ReturnValue::SetNonEmpty(const Local) is deprecated. " + "Do nothing to indicate that the operation succeeded or use " + "SetFalse() to indicate that the operation failed (don't " + "forget to handle info.ShouldThrowOnError()). " + "See http://crbug.com/348660658 for details."); +#else + static constexpr bool is_allowed_void = std::is_void::value; +#endif // V8_IMMINENT_DEPRECATION_WARNINGS + static_assert(is_allowed_void || std::is_base_of::value, "type check"); +#ifdef V8_ENABLE_CHECKS + internal::VerifyHandleIsNonEmpty(handle.IsEmpty()); +#endif // V8_ENABLE_CHECKS + if constexpr (is_allowed_void) { + // Simulate old behaviour for "v8::AccessorSetterCallback" for which + // it was possible to set the return value even for ReturnValue. + Set(handle->BooleanValue(GetIsolate())); + } else { + SetInternal(handle.ptr()); } } template void ReturnValue::Set(double i) { static_assert(std::is_base_of::value, "type check"); - Set(Number::New(GetIsolate(), i)); + SetNonEmpty(Number::New(GetIsolate(), i)); } template -void ReturnValue::Set(int32_t i) { +void ReturnValue::Set(int16_t i) { static_assert(std::is_base_of::value, "type check"); using I = internal::Internals; - if (V8_LIKELY(I::IsValidSmi(i))) { - *value_ = I::IntToSmi(i); + static_assert(I::IsValidSmi(std::numeric_limits::min())); + static_assert(I::IsValidSmi(std::numeric_limits::max())); + SetInternal(I::IntegralToSmi(i)); +} + +template +void ReturnValue::Set(int32_t i) { + static_assert(std::is_base_of::value, "type check"); + if (const auto result = internal::Internals::TryIntegralToSmi(i)) { + SetInternal(*result); + return; + } + SetNonEmpty(Integer::New(GetIsolate(), i)); +} + +template +void ReturnValue::Set(int64_t i) { + static_assert(std::is_base_of::value, "type check"); + if (const auto result = internal::Internals::TryIntegralToSmi(i)) { + SetInternal(*result); return; } - Set(Integer::New(GetIsolate(), i)); + SetNonEmpty(Number::New(GetIsolate(), static_cast(i))); +} + +template +void ReturnValue::Set(uint16_t i) { + static_assert(std::is_base_of::value, "type check"); + using I = internal::Internals; + static_assert(I::IsValidSmi(std::numeric_limits::min())); + static_assert(I::IsValidSmi(std::numeric_limits::max())); + SetInternal(I::IntegralToSmi(i)); } template void ReturnValue::Set(uint32_t i) { static_assert(std::is_base_of::value, "type check"); - // Can't simply use INT32_MAX here for whatever reason. - bool fits_into_int32_t = (i & (1U << 31)) == 0; - if (V8_LIKELY(fits_into_int32_t)) { - Set(static_cast(i)); + if (const auto result = internal::Internals::TryIntegralToSmi(i)) { + SetInternal(*result); return; } - Set(Integer::NewFromUnsigned(GetIsolate(), i)); + SetNonEmpty(Integer::NewFromUnsigned(GetIsolate(), i)); +} + +template +void ReturnValue::Set(uint64_t i) { + static_assert(std::is_base_of::value, "type check"); + if (const auto result = internal::Internals::TryIntegralToSmi(i)) { + SetInternal(*result); + return; + } + SetNonEmpty(Number::New(GetIsolate(), static_cast(i))); } template void ReturnValue::Set(bool value) { - static_assert(std::is_base_of::value, "type check"); + static_assert(std::is_void::value || std::is_base_of::value, + "type check"); using I = internal::Internals; +#if V8_STATIC_ROOTS_BOOL +#ifdef V8_ENABLE_CHECKS + internal::PerformCastCheck( + internal::ValueHelper::SlotAsValue(value_)); +#endif // V8_ENABLE_CHECKS + SetInternal(value ? I::StaticReadOnlyRoot::kTrueValue + : I::StaticReadOnlyRoot::kFalseValue); +#else int root_index; if (value) { root_index = I::kTrueValueRootIndex; } else { root_index = I::kFalseValueRootIndex; } - *value_ = *I::GetRoot(GetIsolate(), root_index); + *value_ = I::GetRoot(GetIsolate(), root_index); +#endif // V8_STATIC_ROOTS_BOOL +} + +template +void ReturnValue::SetDefaultValue() { + using I = internal::Internals; + if constexpr (std::is_same_v || std::is_same_v) { + Set(true); + } else if constexpr (std::is_same_v) { + SetInternal(I::IntegralToSmi(0)); + } else { + static_assert(std::is_same_v || std::is_same_v); +#if V8_STATIC_ROOTS_BOOL + SetInternal(I::StaticReadOnlyRoot::kUndefinedValue); +#else + *value_ = I::GetRoot(GetIsolate(), I::kUndefinedValueRootIndex); +#endif // V8_STATIC_ROOTS_BOOL + } } template void ReturnValue::SetNull() { static_assert(std::is_base_of::value, "type check"); using I = internal::Internals; - *value_ = *I::GetRoot(GetIsolate(), I::kNullValueRootIndex); +#if V8_STATIC_ROOTS_BOOL +#ifdef V8_ENABLE_CHECKS + internal::PerformCastCheck( + internal::ValueHelper::SlotAsValue(value_)); +#endif // V8_ENABLE_CHECKS + SetInternal(I::StaticReadOnlyRoot::kNullValue); +#else + *value_ = I::GetRoot(GetIsolate(), I::kNullValueRootIndex); +#endif // V8_STATIC_ROOTS_BOOL } template void ReturnValue::SetUndefined() { static_assert(std::is_base_of::value, "type check"); using I = internal::Internals; - *value_ = *I::GetRoot(GetIsolate(), I::kUndefinedValueRootIndex); +#if V8_STATIC_ROOTS_BOOL +#ifdef V8_ENABLE_CHECKS + internal::PerformCastCheck( + internal::ValueHelper::SlotAsValue(value_)); +#endif // V8_ENABLE_CHECKS + SetInternal(I::StaticReadOnlyRoot::kUndefinedValue); +#else + *value_ = I::GetRoot(GetIsolate(), I::kUndefinedValueRootIndex); +#endif // V8_STATIC_ROOTS_BOOL +} + +template +void ReturnValue::SetFalse() { + static_assert(std::is_void::value || std::is_base_of::value, + "type check"); + using I = internal::Internals; +#if V8_STATIC_ROOTS_BOOL +#ifdef V8_ENABLE_CHECKS + internal::PerformCastCheck( + internal::ValueHelper::SlotAsValue(value_)); +#endif // V8_ENABLE_CHECKS + SetInternal(I::StaticReadOnlyRoot::kFalseValue); +#else + *value_ = I::GetRoot(GetIsolate(), I::kFalseValueRootIndex); +#endif // V8_STATIC_ROOTS_BOOL } template void ReturnValue::SetEmptyString() { static_assert(std::is_base_of::value, "type check"); using I = internal::Internals; - *value_ = *I::GetRoot(GetIsolate(), I::kEmptyStringRootIndex); +#if V8_STATIC_ROOTS_BOOL +#ifdef V8_ENABLE_CHECKS + internal::PerformCastCheck( + internal::ValueHelper::SlotAsValue(value_)); +#endif // V8_ENABLE_CHECKS + SetInternal(I::StaticReadOnlyRoot::kEmptyString); +#else + *value_ = I::GetRoot(GetIsolate(), I::kEmptyStringRootIndex); +#endif // V8_STATIC_ROOTS_BOOL } template Isolate* ReturnValue::GetIsolate() const { - // Isolate is always the pointer below the default value on the stack. - return *reinterpret_cast(&value_[-2]); + return *reinterpret_cast(&value_[kIsolateValueIndex]); } template Local ReturnValue::Get() const { - using I = internal::Internals; - if (*value_ == *I::GetRoot(GetIsolate(), I::kTheHoleValueRootIndex)) - return Local(*Undefined(GetIsolate())); - return Local::New(GetIsolate(), reinterpret_cast(value_)); + return Local::New(GetIsolate(), + internal::ValueHelper::SlotAsValue(value_)); } template @@ -372,12 +630,6 @@ void ReturnValue::Set(S* whatever) { static_assert(sizeof(S) < 0, "incompilable to prevent inadvertent misuse"); } -template -internal::Address ReturnValue::GetDefaultValue() { - // Default value is always the pointer below value_ on the stack. - return value_[-1]; -} - template FunctionCallbackInfo::FunctionCallbackInfo(internal::Address* implicit_args, internal::Address* values, @@ -387,31 +639,35 @@ FunctionCallbackInfo::FunctionCallbackInfo(internal::Address* implicit_args, template Local FunctionCallbackInfo::operator[](int i) const { // values_ points to the first argument (not the receiver). - if (i < 0 || length_ <= i) return Local(*Undefined(GetIsolate())); - return Local(reinterpret_cast(values_ + i)); + if (i < 0 || Length() <= i) return Undefined(GetIsolate()); + return Local::FromSlot(values_ + i); } template Local FunctionCallbackInfo::This() const { // values_ points to the first argument (not the receiver). - return Local(reinterpret_cast(values_ - 1)); + return Local::FromSlot(values_ + kThisValuesIndex); +} + +template +Local FunctionCallbackInfo::HolderSoonToBeDeprecated() const { + return Local::FromSlot(&implicit_args_[kHolderIndex]); } template Local FunctionCallbackInfo::Holder() const { - return Local( - reinterpret_cast(&implicit_args_[kHolderIndex])); + return HolderSoonToBeDeprecated(); } template Local FunctionCallbackInfo::NewTarget() const { - return Local( - reinterpret_cast(&implicit_args_[kNewTargetIndex])); + return Local::FromSlot(&implicit_args_[kNewTargetIndex]); } template Local FunctionCallbackInfo::Data() const { - return Local(reinterpret_cast(&implicit_args_[kDataIndex])); + auto target = Local::FromSlot(&implicit_args_[kTargetIndex]); + return api_internal::GetFunctionTemplateData(GetIsolate(), target); } template @@ -431,7 +687,7 @@ bool FunctionCallbackInfo::IsConstructCall() const { template int FunctionCallbackInfo::Length() const { - return length_; + return static_cast(length_); } template @@ -441,17 +697,34 @@ Isolate* PropertyCallbackInfo::GetIsolate() const { template Local PropertyCallbackInfo::Data() const { - return Local(reinterpret_cast(&args_[kDataIndex])); + return Local::FromSlot(&args_[kDataIndex]); } template Local PropertyCallbackInfo::This() const { - return Local(reinterpret_cast(&args_[kThisIndex])); + return Local::FromSlot(&args_[kThisIndex]); } template Local PropertyCallbackInfo::Holder() const { - return Local(reinterpret_cast(&args_[kHolderIndex])); + return Local::FromSlot(&args_[kHolderIndex]); +} + +namespace api_internal { +// Returns JSGlobalProxy if holder is JSGlobalObject or unmodified holder +// otherwise. +V8_EXPORT internal::Address ConvertToJSGlobalProxyIfNecessary( + internal::Address holder); +} // namespace api_internal + +template +Local PropertyCallbackInfo::HolderV2() const { + using I = internal::Internals; + if (!I::HasHeapObjectTag(args_[kHolderV2Index])) { + args_[kHolderV2Index] = + api_internal::ConvertToJSGlobalProxyIfNecessary(args_[kHolderIndex]); + } + return Local::FromSlot(&args_[kHolderV2Index]); } template @@ -463,8 +736,8 @@ template bool PropertyCallbackInfo::ShouldThrowOnError() const { using I = internal::Internals; if (args_[kShouldThrowOnErrorIndex] != - I::IntToSmi(I::kInferShouldThrowMode)) { - return args_[kShouldThrowOnErrorIndex] != I::IntToSmi(I::kDontThrow); + I::IntegralToSmi(I::kInferShouldThrowMode)) { + return args_[kShouldThrowOnErrorIndex] != I::IntegralToSmi(I::kDontThrow); } return v8::internal::ShouldThrowOnError( reinterpret_cast(GetIsolate())); diff --git a/deps/include/v8-function.h b/deps/include/v8-function.h index 2dc7e722..d28d4b50 100644 --- a/deps/include/v8-function.h +++ b/deps/include/v8-function.h @@ -52,6 +52,10 @@ class V8_EXPORT Function : public Object { Local context, int argc, Local argv[], SideEffectType side_effect_type = SideEffectType::kHasSideEffect) const; + V8_WARN_UNUSED_RESULT MaybeLocal Call(v8::Isolate* isolate, + Local context, + Local recv, int argc, + Local argv[]); V8_WARN_UNUSED_RESULT MaybeLocal Call(Local context, Local recv, int argc, Local argv[]); @@ -59,8 +63,6 @@ class V8_EXPORT Function : public Object { void SetName(Local name); Local GetName() const; - MaybeLocal GetUnboundScript() const; - /** * Name inferred from variable or property assignment of this function. * Used to facilitate debugging and profiling of JavaScript code written @@ -86,6 +88,12 @@ class V8_EXPORT Function : public Object { */ int GetScriptColumnNumber() const; + /** + * Returns zero based start position (character offset) of function body and + * kLineOffsetNotFound if no information available. + */ + int GetScriptStartPosition() const; + /** * Returns scriptId. */ diff --git a/deps/include/v8-handle-base.h b/deps/include/v8-handle-base.h new file mode 100644 index 00000000..3075441e --- /dev/null +++ b/deps/include/v8-handle-base.h @@ -0,0 +1,137 @@ +// Copyright 2023 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef INCLUDE_V8_HANDLE_BASE_H_ +#define INCLUDE_V8_HANDLE_BASE_H_ + +#include "v8-internal.h" // NOLINT(build/include_directory) + +namespace v8::api_internal { + +template +class StackAllocated { + public: + V8_INLINE StackAllocated() = default; + + protected: + struct no_checking_tag {}; + static constexpr no_checking_tag do_not_check{}; + + V8_INLINE explicit StackAllocated(no_checking_tag) {} + V8_INLINE explicit StackAllocated(const StackAllocated& other, + no_checking_tag) {} + + V8_INLINE void VerifyOnStack() const {} +}; + +template <> +class V8_TRIVIAL_ABI StackAllocated : public StackAllocated { + public: + V8_INLINE StackAllocated() { VerifyOnStack(); } + +#if V8_HAS_ATTRIBUTE_TRIVIAL_ABI + // In this case, StackAllocated becomes not trivially copyable. + V8_INLINE StackAllocated(const StackAllocated& other) { VerifyOnStack(); } + StackAllocated& operator=(const StackAllocated&) = default; +#endif + + protected: + V8_INLINE explicit StackAllocated(no_checking_tag tag) + : StackAllocated(tag) {} + V8_INLINE explicit StackAllocated(const StackAllocated& other, + no_checking_tag tag) + : StackAllocated(other, tag) {} + + V8_EXPORT void VerifyOnStack() const; +}; + +/** + * A base class for abstract handles containing indirect pointers. + * These are useful regardless of whether direct local support is enabled. + */ +class IndirectHandleBase { + public: + // Returns true if the handle is empty. + V8_INLINE bool IsEmpty() const { return location_ == nullptr; } + + // Sets the handle to be empty. IsEmpty() will then return true. + V8_INLINE void Clear() { location_ = nullptr; } + + protected: + friend class internal::ValueHelper; + friend class internal::HandleHelper; + + V8_INLINE IndirectHandleBase() = default; + V8_INLINE IndirectHandleBase(const IndirectHandleBase& other) = default; + V8_INLINE IndirectHandleBase& operator=(const IndirectHandleBase& that) = + default; + + V8_INLINE explicit IndirectHandleBase(internal::Address* location) + : location_(location) {} + + // Returns the address of the actual heap object (tagged). + // This method must be called only if the handle is not empty, otherwise it + // will crash. + V8_INLINE internal::Address ptr() const { return *location_; } + + // Returns a reference to the slot (indirect pointer). + V8_INLINE internal::Address* const& slot() const { return location_; } + V8_INLINE internal::Address*& slot() { return location_; } + + // Returns the handler's "value" (direct or indirect pointer, depending on + // whether direct local support is enabled). + template + V8_INLINE T* value() const { + return internal::ValueHelper::SlotAsValue(slot()); + } + + private: + internal::Address* location_ = nullptr; +}; + +#ifdef V8_ENABLE_DIRECT_HANDLE + +/** + * A base class for abstract handles containing direct pointers. + * These are only possible when conservative stack scanning is enabled. + */ +class DirectHandleBase { + public: + // Returns true if the handle is empty. + V8_INLINE bool IsEmpty() const { + return ptr_ == internal::ValueHelper::kEmpty; + } + + // Sets the handle to be empty. IsEmpty() will then return true. + V8_INLINE void Clear() { ptr_ = internal::ValueHelper::kEmpty; } + + protected: + friend class internal::ValueHelper; + friend class internal::HandleHelper; + + V8_INLINE DirectHandleBase() = default; + V8_INLINE DirectHandleBase(const DirectHandleBase& other) = default; + V8_INLINE DirectHandleBase& operator=(const DirectHandleBase& that) = default; + + V8_INLINE explicit DirectHandleBase(internal::Address ptr) : ptr_(ptr) {} + + // Returns the address of the referenced object. + V8_INLINE internal::Address ptr() const { return ptr_; } + + // Returns the handler's "value" (direct pointer, as direct local support + // is guaranteed to be enabled here). + template + V8_INLINE T* value() const { + return reinterpret_cast(ptr_); + } + + private: + internal::Address ptr_ = internal::ValueHelper::kEmpty; +}; + +#endif // V8_ENABLE_DIRECT_HANDLE + +} // namespace v8::api_internal + +#endif // INCLUDE_V8_HANDLE_BASE_H_ diff --git a/deps/include/v8-initialization.h b/deps/include/v8-initialization.h index d3e35d6e..46a21a02 100644 --- a/deps/include/v8-initialization.h +++ b/deps/include/v8-initialization.h @@ -52,6 +52,9 @@ using ReturnAddressLocationResolver = using DcheckErrorCallback = void (*)(const char* file, int line, const char* message); +using V8FatalErrorCallback = void (*)(const char* file, int line, + const char* message); + /** * Container class for static utility functions. */ @@ -77,6 +80,12 @@ class V8_EXPORT V8 { /** Set the callback to invoke in case of Dcheck failures. */ static void SetDcheckErrorHandler(DcheckErrorCallback that); + /** Set the callback to invoke in the case of CHECK failures or fatal + * errors. This is distinct from Isolate::SetFatalErrorHandler, which + * is invoked in response to API usage failures. + * */ + static void SetFatalErrorHandler(V8FatalErrorCallback that); + /** * Sets V8 flags from a string. */ @@ -97,10 +106,24 @@ class V8_EXPORT V8 { * is created. It always returns true. */ V8_INLINE static bool Initialize() { +#ifdef V8_TARGET_OS_ANDROID + const bool kV8TargetOsIsAndroid = true; +#else + const bool kV8TargetOsIsAndroid = false; +#endif + +#ifdef V8_ENABLE_CHECKS + const bool kV8EnableChecks = true; +#else + const bool kV8EnableChecks = false; +#endif + const int kBuildConfiguration = (internal::PointerCompressionIsEnabled() ? kPointerCompression : 0) | (internal::SmiValuesAre31Bits() ? k31BitSmis : 0) | - (internal::SandboxIsEnabled() ? kSandbox : 0); + (internal::SandboxIsEnabled() ? kSandbox : 0) | + (kV8TargetOsIsAndroid ? kTargetOsIsAndroid : 0) | + (kV8EnableChecks ? kEnableChecks : 0); return Initialize(kBuildConfiguration); } @@ -271,6 +294,8 @@ class V8_EXPORT V8 { kPointerCompression = 1 << 0, k31BitSmis = 1 << 1, kSandbox = 1 << 2, + kTargetOsIsAndroid = 1 << 3, + kEnableChecks = 1 << 4, }; /** diff --git a/deps/include/v8-inspector.h b/deps/include/v8-inspector.h index 563ad196..61eb837c 100644 --- a/deps/include/v8-inspector.h +++ b/deps/include/v8-inspector.h @@ -172,10 +172,6 @@ class V8_EXPORT V8InspectorSession { virtual v8::Local get(v8::Local) = 0; virtual ~Inspectable() = default; }; - class V8_EXPORT CommandLineAPIScope { - public: - virtual ~CommandLineAPIScope() = default; - }; virtual void addInspectedObject(std::unique_ptr) = 0; // Dispatching protocol messages. @@ -185,9 +181,6 @@ class V8_EXPORT V8InspectorSession { virtual std::vector> supportedDomains() = 0; - virtual std::unique_ptr - initializeCommandLineAPIScope(int executionContextId) = 0; - // Debugger actions. virtual void schedulePauseOnNextStatement(StringView breakReason, StringView breakDetails) = 0; @@ -213,19 +206,47 @@ class V8_EXPORT V8InspectorSession { virtual void releaseObjectGroup(StringView) = 0; virtual void triggerPreciseCoverageDeltaUpdate(StringView occasion) = 0; + struct V8_EXPORT EvaluateResult { + enum class ResultType { + kNotRun, + kSuccess, + kException, + }; + + ResultType type; + v8::Local value; + }; + // Evalaute 'expression' in the provided context. Does the same as + // Runtime#evaluate under-the-hood but exposed on the C++ side. + virtual EvaluateResult evaluate(v8::Local context, + StringView expression, + bool includeCommandLineAPI = false) = 0; + // Prepare for shutdown (disables debugger pausing, etc.). virtual void stop() = 0; }; -class V8_EXPORT WebDriverValue { - public: - explicit WebDriverValue(std::unique_ptr type, - v8::MaybeLocal value = {}) +struct V8_EXPORT DeepSerializedValue { + explicit DeepSerializedValue(std::unique_ptr type, + v8::MaybeLocal value = {}) : type(std::move(type)), value(value) {} std::unique_ptr type; v8::MaybeLocal value; }; +struct V8_EXPORT DeepSerializationResult { + explicit DeepSerializationResult( + std::unique_ptr serializedValue) + : serializedValue(std::move(serializedValue)), isSuccess(true) {} + explicit DeepSerializationResult(std::unique_ptr errorMessage) + : errorMessage(std::move(errorMessage)), isSuccess(false) {} + + // Use std::variant when available. + std::unique_ptr serializedValue; + std::unique_ptr errorMessage; + bool isSuccess; +}; + class V8_EXPORT V8InspectorClient { public: virtual ~V8InspectorClient() = default; @@ -243,8 +264,9 @@ class V8_EXPORT V8InspectorClient { virtual void beginUserGesture() {} virtual void endUserGesture() {} - virtual std::unique_ptr serializeToWebDriverValue( - v8::Local v8_value, int max_depth) { + virtual std::unique_ptr deepSerialize( + v8::Local v8Value, int maxDepth, + v8::Local additionalParameters) { return nullptr; } virtual std::unique_ptr valueSubtype(v8::Local) { @@ -275,9 +297,12 @@ class V8_EXPORT V8InspectorClient { return v8::MaybeLocal(); } - virtual void consoleTime(const StringView& title) {} - virtual void consoleTimeEnd(const StringView& title) {} - virtual void consoleTimeStamp(const StringView& title) {} + virtual void consoleTime(v8::Isolate* isolate, v8::Local label) {} + virtual void consoleTimeEnd(v8::Isolate* isolate, + v8::Local label) {} + virtual void consoleTimeStamp(v8::Isolate* isolate, + v8::Local label) {} + virtual void consoleClear(int contextGroupId) {} virtual double currentTimeMS() { return 0; } typedef void (*TimerCallback)(void*); diff --git a/deps/include/v8-internal.h b/deps/include/v8-internal.h index 53837aa5..bc96f82a 100644 --- a/deps/include/v8-internal.h +++ b/deps/include/v8-internal.h @@ -10,10 +10,29 @@ #include #include +#include +#include +#include +#include #include -#include "v8-version.h" // NOLINT(build/include_directory) -#include "v8config.h" // NOLINT(build/include_directory) +#include "v8config.h" // NOLINT(build/include_directory) + +// TODO(pkasting): Use /spaceship unconditionally after dropping +// support for old libstdc++ versions. +#if __has_include() +#include +#endif +#if defined(__cpp_lib_three_way_comparison) && \ + __cpp_lib_three_way_comparison >= 201711L && \ + defined(__cpp_lib_concepts) && __cpp_lib_concepts >= 202002L +#include +#include + +#define V8_HAVE_SPACESHIP_OPERATOR 1 +#else +#define V8_HAVE_SPACESHIP_OPERATOR 0 +#endif namespace v8 { @@ -21,15 +40,14 @@ class Array; class Context; class Data; class Isolate; -template -class Local; namespace internal { +class Heap; class Isolate; typedef uintptr_t Address; -static const Address kNullAddress = 0; +static constexpr Address kNullAddress = 0; constexpr int KB = 1024; constexpr int MB = KB * 1024; @@ -82,12 +100,15 @@ struct SmiTagging<4> { static_cast(kUintptrAllBitsSet << (kSmiValueSize - 1)); static constexpr intptr_t kSmiMaxValue = -(kSmiMinValue + 1); - V8_INLINE static int SmiToInt(const internal::Address value) { + V8_INLINE static constexpr int SmiToInt(Address value) { int shift_bits = kSmiTagSize + kSmiShiftSize; // Truncate and shift down (requires >> to be sign extending). return static_cast(static_cast(value)) >> shift_bits; } - V8_INLINE static constexpr bool IsValidSmi(intptr_t value) { + + template && + std::is_signed_v>* = nullptr> + V8_INLINE static constexpr bool IsValidSmi(T value) { // Is value in range [kSmiMinValue, kSmiMaxValue]. // Use unsigned operations in order to avoid undefined behaviour in case of // signed integer overflow. @@ -96,6 +117,28 @@ struct SmiTagging<4> { (static_cast(kSmiMaxValue) - static_cast(kSmiMinValue)); } + + template && + std::is_unsigned_v>* = nullptr> + V8_INLINE static constexpr bool IsValidSmi(T value) { + static_assert(kSmiMaxValue <= std::numeric_limits::max()); + return value <= static_cast(kSmiMaxValue); + } + + // Same as the `intptr_t` version but works with int64_t on 32-bit builds + // without slowing down anything else. + V8_INLINE static constexpr bool IsValidSmi(int64_t value) { + return (static_cast(value) - + static_cast(kSmiMinValue)) <= + (static_cast(kSmiMaxValue) - + static_cast(kSmiMinValue)); + } + + V8_INLINE static constexpr bool IsValidSmi(uint64_t value) { + static_assert(kSmiMaxValue <= std::numeric_limits::max()); + return value <= static_cast(kSmiMaxValue); + } }; // Smi constants for systems where tagged pointer is a 64-bit value. @@ -107,14 +150,25 @@ struct SmiTagging<8> { static_cast(kUintptrAllBitsSet << (kSmiValueSize - 1)); static constexpr intptr_t kSmiMaxValue = -(kSmiMinValue + 1); - V8_INLINE static int SmiToInt(const internal::Address value) { + V8_INLINE static constexpr int SmiToInt(Address value) { int shift_bits = kSmiTagSize + kSmiShiftSize; // Shift down and throw away top 32 bits. return static_cast(static_cast(value) >> shift_bits); } - V8_INLINE static constexpr bool IsValidSmi(intptr_t value) { + + template && + std::is_signed_v>* = nullptr> + V8_INLINE static constexpr bool IsValidSmi(T value) { // To be representable as a long smi, the value must be a 32-bit integer. - return (value == static_cast(value)); + return std::numeric_limits::min() <= value && + value <= std::numeric_limits::max(); + } + + template && + std::is_unsigned_v>* = nullptr> + V8_INLINE static constexpr bool IsValidSmi(T value) { + return value <= std::numeric_limits::max(); } }; @@ -150,8 +204,9 @@ const int kSmiMinValue = static_cast(PlatformSmiTagging::kSmiMinValue); const int kSmiMaxValue = static_cast(PlatformSmiTagging::kSmiMaxValue); constexpr bool SmiValuesAre31Bits() { return kSmiValueSize == 31; } constexpr bool SmiValuesAre32Bits() { return kSmiValueSize == 32; } +constexpr bool Is64() { return kApiSystemPointerSize == sizeof(int64_t); } -V8_INLINE static constexpr internal::Address IntToSmi(int value) { +V8_INLINE static constexpr Address IntToSmi(int value) { return (static_cast
(value) << (kSmiTagSize + kSmiShiftSize)) | kSmiTag; } @@ -174,11 +229,15 @@ using SandboxedPointer_t = Address; #ifdef V8_ENABLE_SANDBOX // Size of the sandbox, excluding the guard regions surrounding it. -#ifdef V8_TARGET_OS_ANDROID +#if defined(V8_TARGET_OS_ANDROID) // On Android, most 64-bit devices seem to be configured with only 39 bits of // virtual address space for userspace. As such, limit the sandbox to 128GB (a // quarter of the total available address space). constexpr size_t kSandboxSizeLog2 = 37; // 128 GB +#elif defined(V8_TARGET_ARCH_LOONG64) +// Some Linux distros on LoongArch64 configured with only 40 bits of virtual +// address space for userspace. Limit the sandbox to 256GB here. +constexpr size_t kSandboxSizeLog2 = 38; // 256 GB #else // Everywhere else use a 1TB sandbox. constexpr size_t kSandboxSizeLog2 = 40; // 1 TB @@ -248,20 +307,23 @@ static_assert(1ULL << (64 - kBoundedSizeShift) == // size allows omitting bounds checks on table accesses if the indices are // guaranteed (e.g. through shifting) to be below the maximum index. This // value must be a power of two. -static const size_t kExternalPointerTableReservationSize = 512 * MB; +constexpr size_t kExternalPointerTableReservationSize = 256 * MB; // The external pointer table indices stored in HeapObjects as external // pointers are shifted to the left by this amount to guarantee that they are -// smaller than the maximum table size. -static const uint32_t kExternalPointerIndexShift = 6; +// smaller than the maximum table size even after the C++ compiler multiplies +// them by 8 to be used as indexes into a table of 64 bit pointers. +constexpr uint32_t kExternalPointerIndexShift = 7; #else -static const size_t kExternalPointerTableReservationSize = 1024 * MB; -static const uint32_t kExternalPointerIndexShift = 5; +constexpr size_t kExternalPointerTableReservationSize = 512 * MB; +constexpr uint32_t kExternalPointerIndexShift = 6; #endif // V8_TARGET_OS_ANDROID // The maximum number of entries in an external pointer table. -static const size_t kMaxExternalPointers = - kExternalPointerTableReservationSize / kApiSystemPointerSize; +constexpr int kExternalPointerTableEntrySize = 8; +constexpr int kExternalPointerTableEntrySizeLog2 = 3; +constexpr size_t kMaxExternalPointers = + kExternalPointerTableReservationSize / kExternalPointerTableEntrySize; static_assert((1 << (32 - kExternalPointerIndexShift)) == kMaxExternalPointers, "kExternalPointerTableReservationSize and " "kExternalPointerIndexShift don't match"); @@ -269,7 +331,7 @@ static_assert((1 << (32 - kExternalPointerIndexShift)) == kMaxExternalPointers, #else // !V8_COMPRESS_POINTERS // Needed for the V8.SandboxedExternalPointersCount histogram. -static const size_t kMaxExternalPointers = 0; +constexpr size_t kMaxExternalPointers = 0; #endif // V8_COMPRESS_POINTERS @@ -282,15 +344,110 @@ static const size_t kMaxExternalPointers = 0; // that it is smaller than the size of the table. using ExternalPointerHandle = uint32_t; -// ExternalPointers point to objects located outside the sandbox. When -// sandboxed external pointers are enabled, these are stored on heap as -// ExternalPointerHandles, otherwise they are simply raw pointers. +// ExternalPointers point to objects located outside the sandbox. When the V8 +// sandbox is enabled, these are stored on heap as ExternalPointerHandles, +// otherwise they are simply raw pointers. #ifdef V8_ENABLE_SANDBOX using ExternalPointer_t = ExternalPointerHandle; #else using ExternalPointer_t = Address; #endif +constexpr ExternalPointer_t kNullExternalPointer = 0; +constexpr ExternalPointerHandle kNullExternalPointerHandle = 0; + +// See `ExternalPointerHandle` for the main documentation. The difference to +// `ExternalPointerHandle` is that the handle does not represent an arbitrary +// external pointer but always refers to an object managed by `CppHeap`. The +// handles are using in combination with a dedicated table for `CppHeap` +// references. +using CppHeapPointerHandle = uint32_t; + +// The actual pointer to objects located on the `CppHeap`. When pointer +// compression is enabled these pointers are stored as `CppHeapPointerHandle`. +// In non-compressed configurations the pointers are simply stored as raw +// pointers. +#ifdef V8_COMPRESS_POINTERS +using CppHeapPointer_t = CppHeapPointerHandle; +#else +using CppHeapPointer_t = Address; +#endif + +constexpr CppHeapPointer_t kNullCppHeapPointer = 0; +constexpr CppHeapPointerHandle kNullCppHeapPointerHandle = 0; + +constexpr uint64_t kCppHeapPointerMarkBit = 1ULL; +constexpr uint64_t kCppHeapPointerTagShift = 1; +constexpr uint64_t kCppHeapPointerPayloadShift = 16; + +#ifdef V8_COMPRESS_POINTERS +// CppHeapPointers use a dedicated pointer table. These constants control the +// size and layout of the table. See the corresponding constants for the +// external pointer table for further details. +constexpr size_t kCppHeapPointerTableReservationSize = + kExternalPointerTableReservationSize; +constexpr uint32_t kCppHeapPointerIndexShift = kExternalPointerIndexShift; + +constexpr int kCppHeapPointerTableEntrySize = 8; +constexpr int kCppHeapPointerTableEntrySizeLog2 = 3; +constexpr size_t kMaxCppHeapPointers = + kCppHeapPointerTableReservationSize / kCppHeapPointerTableEntrySize; +static_assert((1 << (32 - kCppHeapPointerIndexShift)) == kMaxCppHeapPointers, + "kCppHeapPointerTableReservationSize and " + "kCppHeapPointerIndexShift don't match"); + +#else // !V8_COMPRESS_POINTERS + +// Needed for the V8.SandboxedCppHeapPointersCount histogram. +constexpr size_t kMaxCppHeapPointers = 0; + +#endif // V8_COMPRESS_POINTERS + +// See `ExternalPointerHandle` for the main documentation. The difference to +// `ExternalPointerHandle` is that the handle always refers to a +// (external pointer, size) tuple. The handles are used in combination with a +// dedicated external buffer table (EBT). +using ExternalBufferHandle = uint32_t; + +// ExternalBuffer point to buffer located outside the sandbox. When the V8 +// sandbox is enabled, these are stored on heap as ExternalBufferHandles, +// otherwise they are simply raw pointers. +#ifdef V8_ENABLE_SANDBOX +using ExternalBuffer_t = ExternalBufferHandle; +#else +using ExternalBuffer_t = Address; +#endif + +#ifdef V8_TARGET_OS_ANDROID +// The size of the virtual memory reservation for the external buffer table. +// As with the external pointer table, a maximum table size in combination with +// shifted indices allows omitting bounds checks. +constexpr size_t kExternalBufferTableReservationSize = 64 * MB; + +// The external buffer handles are stores shifted to the left by this amount +// to guarantee that they are smaller than the maximum table size. +constexpr uint32_t kExternalBufferHandleShift = 10; +#else +constexpr size_t kExternalBufferTableReservationSize = 128 * MB; +constexpr uint32_t kExternalBufferHandleShift = 9; +#endif // V8_TARGET_OS_ANDROID + +// A null handle always references an entry that contains nullptr. +constexpr ExternalBufferHandle kNullExternalBufferHandle = 0; + +// The maximum number of entries in an external buffer table. +constexpr int kExternalBufferTableEntrySize = 16; +constexpr int kExternalBufferTableEntrySizeLog2 = 4; +constexpr size_t kMaxExternalBufferPointers = + kExternalBufferTableReservationSize / kExternalBufferTableEntrySize; +static_assert((1 << (32 - kExternalBufferHandleShift)) == + kMaxExternalBufferPointers, + "kExternalBufferTableReservationSize and " + "kExternalBufferHandleShift don't match"); + +// +// External Pointers. +// // When the sandbox is enabled, external pointers are stored in an external // pointer table and are referenced from HeapObjects through an index (a // "handle"). When stored in the table, the pointers are tagged with per-type @@ -352,7 +509,7 @@ using ExternalPointer_t = Address; // extension (MTE) which would use bits [56, 60). // // External pointer tables are also available even when the sandbox is off but -// pointer compression is on. In that case, the mechanism can be used to easy +// pointer compression is on. In that case, the mechanism can be used to ease // alignment requirements as it turns unaligned 64-bit raw pointers into // aligned 32-bit indices. To "opt-in" to the external pointer table mechanism // for this purpose, instead of using the ExternalPointer accessors one needs to @@ -360,13 +517,14 @@ using ExternalPointer_t = Address; // ExternalPointerTable. constexpr uint64_t kExternalPointerMarkBit = 1ULL << 62; constexpr uint64_t kExternalPointerTagMask = 0x40ff000000000000; +constexpr uint64_t kExternalPointerTagMaskWithoutMarkBit = 0xff000000000000; constexpr uint64_t kExternalPointerTagShift = 48; // All possible 8-bit type tags. // These are sorted so that tags can be grouped together and it can efficiently // be checked if a tag belongs to a given group. See for example the // IsSharedExternalPointerType routine. -constexpr uint64_t kAllExternalPointerTypeTags[] = { +constexpr uint64_t kAllTagsForAndBasedTypeChecking[] = { 0b00001111, 0b00010111, 0b00011011, 0b00011101, 0b00011110, 0b00100111, 0b00101011, 0b00101101, 0b00101110, 0b00110011, 0b00110101, 0b00110110, 0b00111001, 0b00111010, 0b00111100, 0b01000111, 0b01001011, 0b01001101, @@ -380,8 +538,8 @@ constexpr uint64_t kAllExternalPointerTypeTags[] = { 0b11001100, 0b11010001, 0b11010010, 0b11010100, 0b11011000, 0b11100001, 0b11100010, 0b11100100, 0b11101000, 0b11110000}; -#define TAG(i) \ - ((kAllExternalPointerTypeTags[i] << kExternalPointerTagShift) | \ +#define TAG(i) \ + ((kAllTagsForAndBasedTypeChecking[i] << kExternalPointerTagShift) | \ kExternalPointerMarkBit) // clang-format off @@ -400,25 +558,73 @@ constexpr uint64_t kAllExternalPointerTypeTags[] = { V(kExternalStringResourceTag, TAG(1)) \ V(kExternalStringResourceDataTag, TAG(2)) \ V(kLastSharedTag, TAG(2)) + // Leave some space in the tag range here for future shared tags. // External pointers using these tags are kept in a per-Isolate external // pointer table and can only be accessed when this Isolate is active. #define PER_ISOLATE_EXTERNAL_POINTER_TAGS(V) \ - V(kForeignForeignAddressTag, TAG(10)) \ - V(kNativeContextMicrotaskQueueTag, TAG(11)) \ - V(kEmbedderDataSlotPayloadTag, TAG(12)) \ + V(kNativeContextMicrotaskQueueTag, TAG(5)) \ + V(kEmbedderDataSlotPayloadTag, TAG(6)) \ /* This tag essentially stands for a `void*` pointer in the V8 API, and */ \ /* it is the Embedder's responsibility to ensure type safety (against */ \ /* substitution) and lifetime validity of these objects. */ \ - V(kExternalObjectValueTag, TAG(13)) \ - V(kCallHandlerInfoCallbackTag, TAG(14)) \ - V(kAccessorInfoGetterTag, TAG(15)) \ - V(kAccessorInfoSetterTag, TAG(16)) \ - V(kWasmInternalFunctionCallTargetTag, TAG(17)) \ - V(kWasmTypeInfoNativeTypeTag, TAG(18)) \ - V(kWasmExportedFunctionDataSignatureTag, TAG(19)) \ - V(kWasmContinuationJmpbufTag, TAG(20)) \ - V(kArrayBufferExtensionTag, TAG(21)) + V(kExternalObjectValueTag, TAG(7)) \ + V(kFunctionTemplateInfoCallbackTag, TAG(8)) \ + V(kAccessorInfoGetterTag, TAG(9)) \ + V(kAccessorInfoSetterTag, TAG(10)) \ + V(kWasmInternalFunctionCallTargetTag, TAG(11)) \ + V(kWasmTypeInfoNativeTypeTag, TAG(12)) \ + V(kWasmExportedFunctionDataSignatureTag, TAG(13)) \ + V(kWasmContinuationJmpbufTag, TAG(14)) \ + V(kWasmStackMemoryTag, TAG(15)) \ + V(kWasmIndirectFunctionTargetTag, TAG(16)) \ + /* Foreigns */ \ + V(kGenericForeignTag, TAG(20)) \ + V(kApiNamedPropertyQueryCallbackTag, TAG(21)) \ + V(kApiNamedPropertyGetterCallbackTag, TAG(22)) \ + V(kApiNamedPropertySetterCallbackTag, TAG(23)) \ + V(kApiNamedPropertyDescriptorCallbackTag, TAG(24)) \ + V(kApiNamedPropertyDefinerCallbackTag, TAG(25)) \ + V(kApiNamedPropertyDeleterCallbackTag, TAG(26)) \ + V(kApiIndexedPropertyQueryCallbackTag, TAG(27)) \ + V(kApiIndexedPropertyGetterCallbackTag, TAG(28)) \ + V(kApiIndexedPropertySetterCallbackTag, TAG(29)) \ + V(kApiIndexedPropertyDescriptorCallbackTag, TAG(30)) \ + V(kApiIndexedPropertyDefinerCallbackTag, TAG(31)) \ + V(kApiIndexedPropertyDeleterCallbackTag, TAG(32)) \ + V(kApiIndexedPropertyEnumeratorCallbackTag, TAG(33)) \ + V(kApiAccessCheckCallbackTag, TAG(34)) \ + V(kApiAbortScriptExecutionCallbackTag, TAG(35)) \ + V(kSyntheticModuleTag, TAG(36)) \ + V(kMicrotaskCallbackTag, TAG(37)) \ + V(kMicrotaskCallbackDataTag, TAG(38)) \ + V(kCFunctionTag, TAG(39)) \ + V(kCFunctionInfoTag, TAG(40)) \ + V(kMessageListenerTag, TAG(41)) \ + V(kWaiterQueueForeignTag, TAG(42)) \ + /* Managed */ \ + V(kFirstManagedResourceTag, TAG(50)) \ + V(kGenericManagedTag, TAG(50)) \ + V(kWasmWasmStreamingTag, TAG(51)) \ + V(kWasmFuncDataTag, TAG(52)) \ + V(kWasmManagedDataTag, TAG(53)) \ + V(kWasmNativeModuleTag, TAG(54)) \ + V(kIcuBreakIteratorTag, TAG(55)) \ + V(kIcuUnicodeStringTag, TAG(56)) \ + V(kIcuListFormatterTag, TAG(57)) \ + V(kIcuLocaleTag, TAG(58)) \ + V(kIcuSimpleDateFormatTag, TAG(59)) \ + V(kIcuDateIntervalFormatTag, TAG(60)) \ + V(kIcuRelativeDateTimeFormatterTag, TAG(61)) \ + V(kIcuLocalizedNumberFormatterTag, TAG(62)) \ + V(kIcuPluralRulesTag, TAG(63)) \ + V(kIcuCollatorTag, TAG(64)) \ + V(kDisplayNamesInternalTag, TAG(65)) \ + /* External resources whose lifetime is tied to */ \ + /* their entry in the external pointer table but */ \ + /* which are not referenced via a Managed */ \ + V(kArrayBufferExtensionTag, TAG(66)) \ + V(kLastManagedResourceTag, TAG(66)) \ // All external pointer tags. #define ALL_EXTERNAL_POINTER_TAGS(V) \ @@ -431,15 +637,21 @@ constexpr uint64_t kAllExternalPointerTypeTags[] = { (HasMarkBit ? kExternalPointerMarkBit : 0)) enum ExternalPointerTag : uint64_t { // Empty tag value. Mostly used as placeholder. - kExternalPointerNullTag = MAKE_TAG(0, 0b00000000), + kExternalPointerNullTag = MAKE_TAG(1, 0b00000000), // External pointer tag that will match any external pointer. Use with care! kAnyExternalPointerTag = MAKE_TAG(1, 0b11111111), + // External pointer tag that will match any external pointer in a Foreign. + // Use with care! If desired, this could be made more fine-granular. + kAnyForeignTag = kAnyExternalPointerTag, // The free entry tag has all type bits set so every type check with a // different type fails. It also doesn't have the mark bit set as free // entries are (by definition) not alive. kExternalPointerFreeEntryTag = MAKE_TAG(0, 0b11111111), // Evacuation entries are used during external pointer table compaction. - kExternalPointerEvacuationEntryTag = MAKE_TAG(1, 0b11100111), + kExternalPointerEvacuationEntryTag = MAKE_TAG(1, 0b11111110), + // Tag for zapped/invalidated entries. Those are considered to no longer be + // in use and so have the marking bit cleared. + kExternalPointerZappedEntryTag = MAKE_TAG(0, 0b11111101), ALL_EXTERNAL_POINTER_TAGS(EXTERNAL_POINTER_TAG_ENUM) }; @@ -457,6 +669,24 @@ V8_INLINE static constexpr bool IsSharedExternalPointerType( return tag >= kFirstSharedTag && tag <= kLastSharedTag; } +// True if the external pointer may live in a read-only object, in which case +// the table entry will be in the shared read-only segment of the external +// pointer table. +V8_INLINE static constexpr bool IsMaybeReadOnlyExternalPointerType( + ExternalPointerTag tag) { + return tag == kAccessorInfoGetterTag || tag == kAccessorInfoSetterTag || + tag == kFunctionTemplateInfoCallbackTag; +} + +// True if the external pointer references an external object whose lifetime is +// tied to the entry in the external pointer table. +// In this case, the entry in the ExternalPointerTable always points to an +// object derived from ExternalPointerTable::ManagedResource. +V8_INLINE static constexpr bool IsManagedExternalPointerType( + ExternalPointerTag tag) { + return tag >= kFirstManagedResourceTag && tag <= kLastManagedResourceTag; +} + // Sanity checks. #define CHECK_SHARED_EXTERNAL_POINTER_TAGS(Tag, ...) \ static_assert(IsSharedExternalPointerType(Tag)); @@ -472,6 +702,149 @@ PER_ISOLATE_EXTERNAL_POINTER_TAGS(CHECK_NON_SHARED_EXTERNAL_POINTER_TAGS) #undef SHARED_EXTERNAL_POINTER_TAGS #undef EXTERNAL_POINTER_TAGS +// +// Indirect Pointers. +// +// When the sandbox is enabled, indirect pointers are used to reference +// HeapObjects that live outside of the sandbox (but are still managed by V8's +// garbage collector). When object A references an object B through an indirect +// pointer, object A will contain a IndirectPointerHandle, i.e. a shifted +// 32-bit index, which identifies an entry in a pointer table (either the +// trusted pointer table for TrustedObjects, or the code pointer table if it is +// a Code object). This table entry then contains the actual pointer to object +// B. Further, object B owns this pointer table entry, and it is responsible +// for updating the "self-pointer" in the entry when it is relocated in memory. +// This way, in contrast to "normal" pointers, indirect pointers never need to +// be tracked by the GC (i.e. there is no remembered set for them). +// These pointers do not exist when the sandbox is disabled. + +// An IndirectPointerHandle represents a 32-bit index into a pointer table. +using IndirectPointerHandle = uint32_t; + +// A null handle always references an entry that contains nullptr. +constexpr IndirectPointerHandle kNullIndirectPointerHandle = 0; + +// When the sandbox is enabled, indirect pointers are used to implement: +// - TrustedPointers: an indirect pointer using the trusted pointer table (TPT) +// and referencing a TrustedObject in one of the trusted heap spaces. +// - CodePointers, an indirect pointer using the code pointer table (CPT) and +// referencing a Code object together with its instruction stream. + +// +// Trusted Pointers. +// +// A pointer to a TrustedObject. +// When the sandbox is enabled, these are indirect pointers using the trusted +// pointer table (TPT). They are used to reference trusted objects (located in +// one of V8's trusted heap spaces, outside of the sandbox) from inside the +// sandbox in a memory-safe way. When the sandbox is disabled, these are +// regular tagged pointers. +using TrustedPointerHandle = IndirectPointerHandle; + +// The size of the virtual memory reservation for the trusted pointer table. +// As with the external pointer table, a maximum table size in combination with +// shifted indices allows omitting bounds checks. +constexpr size_t kTrustedPointerTableReservationSize = 64 * MB; + +// The trusted pointer handles are stores shifted to the left by this amount +// to guarantee that they are smaller than the maximum table size. +constexpr uint32_t kTrustedPointerHandleShift = 9; + +// A null handle always references an entry that contains nullptr. +constexpr TrustedPointerHandle kNullTrustedPointerHandle = + kNullIndirectPointerHandle; + +// The maximum number of entries in an trusted pointer table. +constexpr int kTrustedPointerTableEntrySize = 8; +constexpr int kTrustedPointerTableEntrySizeLog2 = 3; +constexpr size_t kMaxTrustedPointers = + kTrustedPointerTableReservationSize / kTrustedPointerTableEntrySize; +static_assert((1 << (32 - kTrustedPointerHandleShift)) == kMaxTrustedPointers, + "kTrustedPointerTableReservationSize and " + "kTrustedPointerHandleShift don't match"); + +// +// Code Pointers. +// +// A pointer to a Code object. +// Essentially a specialized version of a trusted pointer that (when the +// sandbox is enabled) uses the code pointer table (CPT) instead of the TPT. +// Each entry in the CPT contains both a pointer to a Code object as well as a +// pointer to the Code's entrypoint. This allows calling/jumping into Code with +// one fewer memory access (compared to the case where the entrypoint pointer +// first needs to be loaded from the Code object). As such, a CodePointerHandle +// can be used both to obtain the referenced Code object and to directly load +// its entrypoint. +// +// When the sandbox is disabled, these are regular tagged pointers. +using CodePointerHandle = IndirectPointerHandle; + +// The size of the virtual memory reservation for the code pointer table. +// As with the other tables, a maximum table size in combination with shifted +// indices allows omitting bounds checks. +constexpr size_t kCodePointerTableReservationSize = 128 * MB; + +// Code pointer handles are shifted by a different amount than indirect pointer +// handles as the tables have a different maximum size. +constexpr uint32_t kCodePointerHandleShift = 9; + +// A null handle always references an entry that contains nullptr. +constexpr CodePointerHandle kNullCodePointerHandle = kNullIndirectPointerHandle; + +// It can sometimes be necessary to distinguish a code pointer handle from a +// trusted pointer handle. A typical example would be a union trusted pointer +// field that can refer to both Code objects and other trusted objects. To +// support these use-cases, we use a simple marking scheme where some of the +// low bits of a code pointer handle are set, while they will be unset on a +// trusted pointer handle. This way, the correct table to resolve the handle +// can be determined even in the absence of a type tag. +constexpr uint32_t kCodePointerHandleMarker = 0x1; +static_assert(kCodePointerHandleShift > 0); +static_assert(kTrustedPointerHandleShift > 0); + +// The maximum number of entries in a code pointer table. +constexpr int kCodePointerTableEntrySize = 16; +constexpr int kCodePointerTableEntrySizeLog2 = 4; +constexpr size_t kMaxCodePointers = + kCodePointerTableReservationSize / kCodePointerTableEntrySize; +static_assert( + (1 << (32 - kCodePointerHandleShift)) == kMaxCodePointers, + "kCodePointerTableReservationSize and kCodePointerHandleShift don't match"); + +constexpr int kCodePointerTableEntryEntrypointOffset = 0; +constexpr int kCodePointerTableEntryCodeObjectOffset = 8; + +// Constants that can be used to mark places that should be modified once +// certain types of objects are moved out of the sandbox and into trusted space. +constexpr bool kRuntimeGeneratedCodeObjectsLiveInTrustedSpace = true; +constexpr bool kBuiltinCodeObjectsLiveInTrustedSpace = false; +constexpr bool kAllCodeObjectsLiveInTrustedSpace = + kRuntimeGeneratedCodeObjectsLiveInTrustedSpace && + kBuiltinCodeObjectsLiveInTrustedSpace; + +// +// JavaScript Dispatch Table +// +// A JSDispatchHandle represents a 32-bit index into a JSDispatchTable. +using JSDispatchHandle = uint32_t; + +constexpr JSDispatchHandle kNullJSDispatchHandle = 0; + +// The size of the virtual memory reservation for the JSDispatchTable. +// As with the other tables, a maximum table size in combination with shifted +// indices allows omitting bounds checks. +constexpr size_t kJSDispatchTableReservationSize = 128 * MB; +constexpr uint32_t kJSDispatchHandleShift = 9; + +// The maximum number of entries in a JSDispatchTable. +constexpr int kJSDispatchTableEntrySize = 16; +constexpr int kJSDispatchTableEntrySizeLog2 = 4; +constexpr size_t kMaxJSDispatchEntries = + kJSDispatchTableReservationSize / kJSDispatchTableEntrySize; +static_assert((1 << (32 - kJSDispatchHandleShift)) == kMaxJSDispatchEntries, + "kJSDispatchTableReservationSize and kJSDispatchEntryHandleShift " + "don't match"); + // {obj} must be the raw tagged pointer representation of a HeapObject // that's guaranteed to never be in ReadOnlySpace. V8_EXPORT internal::Isolate* IsolateFromNeverReadOnlySpaceObject(Address obj); @@ -479,7 +852,7 @@ V8_EXPORT internal::Isolate* IsolateFromNeverReadOnlySpaceObject(Address obj); // Returns if we need to throw when an error occurs. This infers the language // mode based on the current context and the closure. This returns true if the // language mode is strict. -V8_EXPORT bool ShouldThrowOnError(v8::internal::Isolate* isolate); +V8_EXPORT bool ShouldThrowOnError(internal::Isolate* isolate); /** * This class exports constants and functionality from within v8 that * is necessary to implement inline functions in the v8 api. Don't @@ -487,8 +860,7 @@ V8_EXPORT bool ShouldThrowOnError(v8::internal::Isolate* isolate); */ class Internals { #ifdef V8_MAP_PACKING - V8_INLINE static constexpr internal::Address UnpackMapWord( - internal::Address mapword) { + V8_INLINE static constexpr Address UnpackMapWord(Address mapword) { // TODO(wenyuzhao): Clear header metadata. return mapword ^ kMapWordXorMask; } @@ -504,6 +876,13 @@ class Internals { static const int kOddballKindOffset = 4 * kApiTaggedSize + kApiDoubleSize; static const int kJSObjectHeaderSize = 3 * kApiTaggedSize; +#ifdef V8_COMPRESS_POINTERS + static const int kJSAPIObjectWithEmbedderSlotsHeaderSize = + kJSObjectHeaderSize + kApiInt32Size; +#else // !V8_COMPRESS_POINTERS + static const int kJSAPIObjectWithEmbedderSlotsHeaderSize = + kJSObjectHeaderSize + kApiTaggedSize; +#endif // !V8_COMPRESS_POINTERS static const int kFixedArrayHeaderSize = 2 * kApiTaggedSize; static const int kEmbedderDataArrayHeaderSize = 2 * kApiTaggedSize; static const int kEmbedderDataSlotSize = kApiSystemPointerSize; @@ -519,13 +898,23 @@ class Internals { static const int kExternalOneByteRepresentationTag = 0x0a; static const uint32_t kNumIsolateDataSlots = 4; - static const int kStackGuardSize = 7 * kApiSystemPointerSize; + static const int kStackGuardSize = 8 * kApiSystemPointerSize; + static const int kNumberOfBooleanFlags = 6; + static const int kErrorMessageParamSize = 1; + static const int kTablesAlignmentPaddingSize = 1; static const int kBuiltinTier0EntryTableSize = 7 * kApiSystemPointerSize; static const int kBuiltinTier0TableSize = 7 * kApiSystemPointerSize; - - // ExternalPointerTable layout guarantees. - static const int kExternalPointerTableBufferOffset = 0; - static const int kExternalPointerTableSize = 4 * kApiSystemPointerSize; + static const int kLinearAllocationAreaSize = 3 * kApiSystemPointerSize; + static const int kThreadLocalTopSize = 30 * kApiSystemPointerSize; + static const int kHandleScopeDataSize = + 2 * kApiSystemPointerSize + 2 * kApiInt32Size; + + // ExternalPointerTable and TrustedPointerTable layout guarantees. + static const int kExternalPointerTableBasePointerOffset = 0; + static const int kExternalPointerTableSize = 2 * kApiSystemPointerSize; + static const int kExternalBufferTableSize = 2 * kApiSystemPointerSize; + static const int kTrustedPointerTableSize = 2 * kApiSystemPointerSize; + static const int kTrustedPointerTableBasePointerOffset = 0; // IsolateData layout guarantees. static const int kIsolateCageBaseOffset = 0; @@ -533,31 +922,96 @@ class Internals { kIsolateCageBaseOffset + kApiSystemPointerSize; static const int kVariousBooleanFlagsOffset = kIsolateStackGuardOffset + kStackGuardSize; - static const int kBuiltinTier0EntryTableOffset = - kVariousBooleanFlagsOffset + 8; + static const int kErrorMessageParamOffset = + kVariousBooleanFlagsOffset + kNumberOfBooleanFlags; + static const int kBuiltinTier0EntryTableOffset = kErrorMessageParamOffset + + kErrorMessageParamSize + + kTablesAlignmentPaddingSize; static const int kBuiltinTier0TableOffset = kBuiltinTier0EntryTableOffset + kBuiltinTier0EntryTableSize; - static const int kIsolateEmbedderDataOffset = + static const int kNewAllocationInfoOffset = kBuiltinTier0TableOffset + kBuiltinTier0TableSize; + static const int kOldAllocationInfoOffset = + kNewAllocationInfoOffset + kLinearAllocationAreaSize; + + static const int kFastCCallAlignmentPaddingSize = + kApiSystemPointerSize == 8 ? 0 : kApiSystemPointerSize; static const int kIsolateFastCCallCallerFpOffset = - kIsolateEmbedderDataOffset + kNumIsolateDataSlots * kApiSystemPointerSize; + kOldAllocationInfoOffset + kLinearAllocationAreaSize + + kFastCCallAlignmentPaddingSize; static const int kIsolateFastCCallCallerPcOffset = kIsolateFastCCallCallerFpOffset + kApiSystemPointerSize; static const int kIsolateFastApiCallTargetOffset = kIsolateFastCCallCallerPcOffset + kApiSystemPointerSize; static const int kIsolateLongTaskStatsCounterOffset = kIsolateFastApiCallTargetOffset + kApiSystemPointerSize; + static const int kIsolateThreadLocalTopOffset = + kIsolateLongTaskStatsCounterOffset + kApiSizetSize; + static const int kIsolateHandleScopeDataOffset = + kIsolateThreadLocalTopOffset + kThreadLocalTopSize; + static const int kIsolateEmbedderDataOffset = + kIsolateHandleScopeDataOffset + kHandleScopeDataSize; #ifdef V8_COMPRESS_POINTERS static const int kIsolateExternalPointerTableOffset = - kIsolateLongTaskStatsCounterOffset + kApiSizetSize; + kIsolateEmbedderDataOffset + kNumIsolateDataSlots * kApiSystemPointerSize; static const int kIsolateSharedExternalPointerTableAddressOffset = kIsolateExternalPointerTableOffset + kExternalPointerTableSize; - static const int kIsolateRootsOffset = + static const int kIsolateCppHeapPointerTableOffset = kIsolateSharedExternalPointerTableAddressOffset + kApiSystemPointerSize; +#ifdef V8_ENABLE_SANDBOX + static const int kIsolateTrustedCageBaseOffset = + kIsolateCppHeapPointerTableOffset + kExternalPointerTableSize; + static const int kIsolateTrustedPointerTableOffset = + kIsolateTrustedCageBaseOffset + kApiSystemPointerSize; + static const int kIsolateSharedTrustedPointerTableAddressOffset = + kIsolateTrustedPointerTableOffset + kTrustedPointerTableSize; + static const int kIsolateApiCallbackThunkArgumentOffset = + kIsolateSharedTrustedPointerTableAddressOffset + kApiSystemPointerSize; +#else + static const int kIsolateApiCallbackThunkArgumentOffset = + kIsolateCppHeapPointerTableOffset + kExternalPointerTableSize; +#endif // V8_ENABLE_SANDBOX #else + static const int kIsolateApiCallbackThunkArgumentOffset = + kIsolateEmbedderDataOffset + kNumIsolateDataSlots * kApiSystemPointerSize; +#endif // V8_COMPRESS_POINTERS + static const int kContinuationPreservedEmbedderDataOffset = + kIsolateApiCallbackThunkArgumentOffset + kApiSystemPointerSize; static const int kIsolateRootsOffset = - kIsolateLongTaskStatsCounterOffset + kApiSizetSize; -#endif + kContinuationPreservedEmbedderDataOffset + kApiSystemPointerSize; + + // Assert scopes + static const int kDisallowGarbageCollectionAlign = alignof(uint32_t); + static const int kDisallowGarbageCollectionSize = sizeof(uint32_t); + +#if V8_STATIC_ROOTS_BOOL + +// These constants are copied from static-roots.h and guarded by static asserts. +#define EXPORTED_STATIC_ROOTS_PTR_LIST(V) \ + V(UndefinedValue, 0x69) \ + V(NullValue, 0x85) \ + V(TrueValue, 0xc9) \ + V(FalseValue, 0xad) \ + V(EmptyString, 0xa1) \ + V(TheHoleValue, 0x791) + + using Tagged_t = uint32_t; + struct StaticReadOnlyRoot { +#define DEF_ROOT(name, value) static constexpr Tagged_t k##name = value; + EXPORTED_STATIC_ROOTS_PTR_LIST(DEF_ROOT) +#undef DEF_ROOT + + // Use 0 for kStringMapLowerBound since string maps are the first maps. + static constexpr Tagged_t kStringMapLowerBound = 0; + static constexpr Tagged_t kStringMapUpperBound = 0x47d; + +#define PLUSONE(...) +1 + static constexpr size_t kNumberOfExportedStaticRoots = + 2 + EXPORTED_STATIC_ROOTS_PTR_LIST(PLUSONE); +#undef PLUSONE + }; + +#endif // V8_STATIC_ROOTS_BOOL static const int kUndefinedValueRootIndex = 4; static const int kTheHoleValueRootIndex = 5; @@ -571,8 +1025,6 @@ class Internals { static const int kNodeStateMask = 0x3; static const int kNodeStateIsWeakValue = 2; - static const int kTracedNodeClassIdOffset = kApiSystemPointerSize; - static const int kFirstNonstringType = 0x80; static const int kOddballType = 0x83; static const int kForeignType = 0xcc; @@ -580,19 +1032,24 @@ class Internals { static const int kJSObjectType = 0x421; static const int kFirstJSApiObjectType = 0x422; static const int kLastJSApiObjectType = 0x80A; + // Defines a range [kFirstEmbedderJSApiObjectType, kJSApiObjectTypesCount] + // of JSApiObject instance type values that an embedder can use. + static const int kFirstEmbedderJSApiObjectType = 0; + static const int kLastEmbedderJSApiObjectType = + kLastJSApiObjectType - kFirstJSApiObjectType; - static const int kUndefinedOddballKind = 5; + static const int kUndefinedOddballKind = 4; static const int kNullOddballKind = 3; // Constants used by PropertyCallbackInfo to check if we should throw when an // error occurs. - static const int kThrowOnError = 0; - static const int kDontThrow = 1; + static const int kDontThrow = 0; + static const int kThrowOnError = 1; static const int kInferShouldThrowMode = 2; // Soft limit for AdjustAmountofExternalAllocatedMemory. Trigger an // incremental GC once the external memory reaches this limit. - static constexpr int kExternalAllocationSoftLimit = 64 * 1024 * 1024; + static constexpr size_t kExternalAllocationSoftLimit = 64 * 1024 * 1024; #ifdef V8_MAP_PACKING static const uintptr_t kMapWordMetadataMask = 0xffffULL << 48; @@ -612,32 +1069,77 @@ class Internals { #endif } - V8_INLINE static bool HasHeapObjectTag(const internal::Address value) { + V8_INLINE static constexpr bool HasHeapObjectTag(Address value) { return (value & kHeapObjectTagMask) == static_cast
(kHeapObjectTag); } - V8_INLINE static int SmiValue(const internal::Address value) { + V8_INLINE static constexpr int SmiValue(Address value) { return PlatformSmiTagging::SmiToInt(value); } - V8_INLINE static constexpr internal::Address IntToSmi(int value) { - return internal::IntToSmi(value); + V8_INLINE static constexpr Address AddressToSmi(Address value) { + return (value << (kSmiTagSize + PlatformSmiTagging::kSmiShiftSize)) | + kSmiTag; + } + + V8_INLINE static constexpr Address IntToSmi(int value) { + return AddressToSmi(static_cast
(value)); + } + + template >* = nullptr> + V8_INLINE static constexpr Address IntegralToSmi(T value) { + return AddressToSmi(static_cast
(value)); } - V8_INLINE static constexpr bool IsValidSmi(intptr_t value) { + template >* = nullptr> + V8_INLINE static constexpr bool IsValidSmi(T value) { return PlatformSmiTagging::IsValidSmi(value); } - V8_INLINE static int GetInstanceType(const internal::Address obj) { - typedef internal::Address A; - A map = ReadTaggedPointerField(obj, kHeapObjectMapOffset); + template >* = nullptr> + static constexpr std::optional
TryIntegralToSmi(T value) { + if (V8_LIKELY(PlatformSmiTagging::IsValidSmi(value))) { + return {AddressToSmi(static_cast
(value))}; + } + return {}; + } + +#if V8_STATIC_ROOTS_BOOL + V8_INLINE static bool is_identical(Address obj, Tagged_t constant) { + return static_cast(obj) == constant; + } + + V8_INLINE static bool CheckInstanceMapRange(Address obj, Tagged_t first_map, + Tagged_t last_map) { + auto map = ReadRawField(obj, kHeapObjectMapOffset); +#ifdef V8_MAP_PACKING + map = UnpackMapWord(map); +#endif + return map >= first_map && map <= last_map; + } +#endif + + V8_INLINE static int GetInstanceType(Address obj) { + Address map = ReadTaggedPointerField(obj, kHeapObjectMapOffset); #ifdef V8_MAP_PACKING map = UnpackMapWord(map); #endif return ReadRawField(map, kMapInstanceTypeOffset); } - V8_INLINE static int GetOddballKind(const internal::Address obj) { + V8_INLINE static Address LoadMap(Address obj) { + if (!HasHeapObjectTag(obj)) return kNullAddress; + Address map = ReadTaggedPointerField(obj, kHeapObjectMapOffset); +#ifdef V8_MAP_PACKING + map = UnpackMapWord(map); +#endif + return map; + } + + V8_INLINE static int GetOddballKind(Address obj) { return SmiValue(ReadTaggedSignedField(obj, kOddballKindOffset)); } @@ -658,80 +1160,92 @@ class Internals { static_cast(kLastJSApiObjectType - kJSObjectType)); } - V8_INLINE static uint8_t GetNodeFlag(internal::Address* obj, int shift) { + V8_INLINE static uint8_t GetNodeFlag(Address* obj, int shift) { uint8_t* addr = reinterpret_cast(obj) + kNodeFlagsOffset; return *addr & static_cast(1U << shift); } - V8_INLINE static void UpdateNodeFlag(internal::Address* obj, bool value, - int shift) { + V8_INLINE static void UpdateNodeFlag(Address* obj, bool value, int shift) { uint8_t* addr = reinterpret_cast(obj) + kNodeFlagsOffset; uint8_t mask = static_cast(1U << shift); *addr = static_cast((*addr & ~mask) | (value << shift)); } - V8_INLINE static uint8_t GetNodeState(internal::Address* obj) { + V8_INLINE static uint8_t GetNodeState(Address* obj) { uint8_t* addr = reinterpret_cast(obj) + kNodeFlagsOffset; return *addr & kNodeStateMask; } - V8_INLINE static void UpdateNodeState(internal::Address* obj, uint8_t value) { + V8_INLINE static void UpdateNodeState(Address* obj, uint8_t value) { uint8_t* addr = reinterpret_cast(obj) + kNodeFlagsOffset; *addr = static_cast((*addr & ~kNodeStateMask) | value); } V8_INLINE static void SetEmbedderData(v8::Isolate* isolate, uint32_t slot, void* data) { - internal::Address addr = reinterpret_cast(isolate) + - kIsolateEmbedderDataOffset + - slot * kApiSystemPointerSize; + Address addr = reinterpret_cast
(isolate) + + kIsolateEmbedderDataOffset + slot * kApiSystemPointerSize; *reinterpret_cast(addr) = data; } V8_INLINE static void* GetEmbedderData(const v8::Isolate* isolate, uint32_t slot) { - internal::Address addr = reinterpret_cast(isolate) + - kIsolateEmbedderDataOffset + - slot * kApiSystemPointerSize; + Address addr = reinterpret_cast
(isolate) + + kIsolateEmbedderDataOffset + slot * kApiSystemPointerSize; return *reinterpret_cast(addr); } V8_INLINE static void IncrementLongTasksStatsCounter(v8::Isolate* isolate) { - internal::Address addr = reinterpret_cast(isolate) + - kIsolateLongTaskStatsCounterOffset; + Address addr = + reinterpret_cast
(isolate) + kIsolateLongTaskStatsCounterOffset; ++(*reinterpret_cast(addr)); } - V8_INLINE static internal::Address* GetRoot(v8::Isolate* isolate, int index) { - internal::Address addr = reinterpret_cast(isolate) + - kIsolateRootsOffset + - index * kApiSystemPointerSize; - return reinterpret_cast(addr); + V8_INLINE static Address* GetRootSlot(v8::Isolate* isolate, int index) { + Address addr = reinterpret_cast
(isolate) + kIsolateRootsOffset + + index * kApiSystemPointerSize; + return reinterpret_cast(addr); + } + + V8_INLINE static Address GetRoot(v8::Isolate* isolate, int index) { +#if V8_STATIC_ROOTS_BOOL + Address base = *reinterpret_cast( + reinterpret_cast(isolate) + kIsolateCageBaseOffset); + switch (index) { +#define DECOMPRESS_ROOT(name, ...) \ + case k##name##RootIndex: \ + return base + StaticReadOnlyRoot::k##name; + EXPORTED_STATIC_ROOTS_PTR_LIST(DECOMPRESS_ROOT) +#undef DECOMPRESS_ROOT +#undef EXPORTED_STATIC_ROOTS_PTR_LIST + default: + break; + } +#endif // V8_STATIC_ROOTS_BOOL + return *GetRootSlot(isolate, index); } #ifdef V8_ENABLE_SANDBOX - V8_INLINE static internal::Address* GetExternalPointerTableBase( - v8::Isolate* isolate) { - internal::Address addr = reinterpret_cast(isolate) + - kIsolateExternalPointerTableOffset + - kExternalPointerTableBufferOffset; - return *reinterpret_cast(addr); + V8_INLINE static Address* GetExternalPointerTableBase(v8::Isolate* isolate) { + Address addr = reinterpret_cast
(isolate) + + kIsolateExternalPointerTableOffset + + kExternalPointerTableBasePointerOffset; + return *reinterpret_cast(addr); } - V8_INLINE static internal::Address* GetSharedExternalPointerTableBase( + V8_INLINE static Address* GetSharedExternalPointerTableBase( v8::Isolate* isolate) { - internal::Address addr = reinterpret_cast(isolate) + - kIsolateSharedExternalPointerTableAddressOffset; - addr = *reinterpret_cast(addr); - addr += kExternalPointerTableBufferOffset; - return *reinterpret_cast(addr); + Address addr = reinterpret_cast
(isolate) + + kIsolateSharedExternalPointerTableAddressOffset; + addr = *reinterpret_cast(addr); + addr += kExternalPointerTableBasePointerOffset; + return *reinterpret_cast(addr); } #endif template - V8_INLINE static T ReadRawField(internal::Address heap_object_ptr, - int offset) { - internal::Address addr = heap_object_ptr + offset - kHeapObjectTag; + V8_INLINE static T ReadRawField(Address heap_object_ptr, int offset) { + Address addr = heap_object_ptr + offset - kHeapObjectTag; #ifdef V8_COMPRESS_POINTERS if (sizeof(T) > kApiTaggedSize) { // TODO(ishell, v8:8875): When pointer compression is enabled 8-byte size @@ -746,29 +1260,28 @@ class Internals { return *reinterpret_cast(addr); } - V8_INLINE static internal::Address ReadTaggedPointerField( - internal::Address heap_object_ptr, int offset) { + V8_INLINE static Address ReadTaggedPointerField(Address heap_object_ptr, + int offset) { #ifdef V8_COMPRESS_POINTERS uint32_t value = ReadRawField(heap_object_ptr, offset); - internal::Address base = - GetPtrComprCageBaseFromOnHeapAddress(heap_object_ptr); - return base + static_cast(static_cast(value)); + Address base = GetPtrComprCageBaseFromOnHeapAddress(heap_object_ptr); + return base + static_cast
(static_cast(value)); #else - return ReadRawField(heap_object_ptr, offset); + return ReadRawField
(heap_object_ptr, offset); #endif } - V8_INLINE static internal::Address ReadTaggedSignedField( - internal::Address heap_object_ptr, int offset) { + V8_INLINE static Address ReadTaggedSignedField(Address heap_object_ptr, + int offset) { #ifdef V8_COMPRESS_POINTERS uint32_t value = ReadRawField(heap_object_ptr, offset); - return static_cast(static_cast(value)); + return static_cast
(static_cast(value)); #else - return ReadRawField(heap_object_ptr, offset); + return ReadRawField
(heap_object_ptr, offset); #endif } - V8_INLINE static v8::Isolate* GetIsolateForSandbox(internal::Address obj) { + V8_INLINE static v8::Isolate* GetIsolateForSandbox(Address obj) { #ifdef V8_ENABLE_SANDBOX return reinterpret_cast( internal::IsolateFromNeverReadOnlySpaceObject(obj)); @@ -779,22 +1292,22 @@ class Internals { } template - V8_INLINE static internal::Address ReadExternalPointerField( - v8::Isolate* isolate, internal::Address heap_object_ptr, int offset) { + V8_INLINE static Address ReadExternalPointerField(v8::Isolate* isolate, + Address heap_object_ptr, + int offset) { #ifdef V8_ENABLE_SANDBOX static_assert(tag != kExternalPointerNullTag); // See src/sandbox/external-pointer-table-inl.h. Logic duplicated here so // it can be inlined and doesn't require an additional call. - internal::Address* table = IsSharedExternalPointerType(tag) - ? GetSharedExternalPointerTableBase(isolate) - : GetExternalPointerTableBase(isolate); + Address* table = IsSharedExternalPointerType(tag) + ? GetSharedExternalPointerTableBase(isolate) + : GetExternalPointerTableBase(isolate); internal::ExternalPointerHandle handle = ReadRawField(heap_object_ptr, offset); uint32_t index = handle >> kExternalPointerIndexShift; - std::atomic* ptr = - reinterpret_cast*>(&table[index]); - internal::Address entry = - std::atomic_load_explicit(ptr, std::memory_order_relaxed); + std::atomic
* ptr = + reinterpret_cast*>(&table[index]); + Address entry = std::atomic_load_explicit(ptr, std::memory_order_relaxed); return entry & ~tag; #else return ReadRawField
(heap_object_ptr, offset); @@ -802,16 +1315,18 @@ class Internals { } #ifdef V8_COMPRESS_POINTERS - V8_INLINE static internal::Address GetPtrComprCageBaseFromOnHeapAddress( - internal::Address addr) { + V8_INLINE static Address GetPtrComprCageBaseFromOnHeapAddress(Address addr) { return addr & -static_cast(kPtrComprCageBaseAlignment); } - V8_INLINE static internal::Address DecompressTaggedAnyField( - internal::Address heap_object_ptr, uint32_t value) { - internal::Address base = - GetPtrComprCageBaseFromOnHeapAddress(heap_object_ptr); - return base + static_cast(static_cast(value)); + V8_INLINE static uint32_t CompressTagged(Address value) { + return static_cast(value); + } + + V8_INLINE static Address DecompressTaggedField(Address heap_object_ptr, + uint32_t value) { + Address base = GetPtrComprCageBaseFromOnHeapAddress(heap_object_ptr); + return base + static_cast
(static_cast(value)); } #endif // V8_COMPRESS_POINTERS @@ -849,8 +1364,351 @@ class BackingStoreBase {}; // This is needed for histograms sampling garbage collection reasons. constexpr int kGarbageCollectionReasonMaxValue = 27; -} // namespace internal +// Base class for the address block allocator compatible with standard +// containers, which registers its allocated range as strong roots. +class V8_EXPORT StrongRootAllocatorBase { + public: + Heap* heap() const { return heap_; } + + friend bool operator==(const StrongRootAllocatorBase& a, + const StrongRootAllocatorBase& b) { + // TODO(pkasting): Replace this body with `= default` after dropping support + // for old gcc versions. + return a.heap_ == b.heap_; + } + + protected: + explicit StrongRootAllocatorBase(Heap* heap) : heap_(heap) {} + explicit StrongRootAllocatorBase(Isolate* isolate); + + // Allocate/deallocate a range of n elements of type internal::Address. + Address* allocate_impl(size_t n); + void deallocate_impl(Address* p, size_t n) noexcept; + + private: + Heap* heap_; +}; + +// The general version of this template behaves just as std::allocator, with +// the exception that the constructor takes the isolate as parameter. Only +// specialized versions, e.g., internal::StrongRootAllocator +// and internal::StrongRootAllocator> register the allocated range +// as strong roots. +template +class StrongRootAllocator : private std::allocator { + public: + using value_type = T; + + explicit StrongRootAllocator(Heap* heap) {} + explicit StrongRootAllocator(Isolate* isolate) {} + explicit StrongRootAllocator(v8::Isolate* isolate) {} + template + StrongRootAllocator(const StrongRootAllocator& other) noexcept {} + + using std::allocator::allocate; + using std::allocator::deallocate; +}; + +// TODO(pkasting): Replace with `requires` clauses after dropping support for +// old gcc versions. +template +inline constexpr bool kHaveIteratorConcept = false; +template +inline constexpr bool kHaveIteratorConcept< + Iterator, std::void_t> = true; + +template +inline constexpr bool kHaveIteratorCategory = false; +template +inline constexpr bool kHaveIteratorCategory< + Iterator, std::void_t> = true; + +// Helper struct that contains an `iterator_concept` type alias only when either +// `Iterator` or `std::iterator_traits` do. +// Default: no alias. +template +struct MaybeDefineIteratorConcept {}; +// Use `Iterator::iterator_concept` if available. +template +struct MaybeDefineIteratorConcept< + Iterator, std::enable_if_t>> { + using iterator_concept = Iterator::iterator_concept; +}; +// Otherwise fall back to `std::iterator_traits` if possible. +template +struct MaybeDefineIteratorConcept< + Iterator, std::enable_if_t && + !kHaveIteratorConcept>> { + // There seems to be no feature-test macro covering this, so use the + // presence of `` as a crude proxy, since it was added to the + // standard as part of the Ranges papers. + // TODO(pkasting): Add this unconditionally after dropping support for old + // libstdc++ versions. +#if __has_include() + using iterator_concept = std::iterator_traits::iterator_concept; +#endif +}; + +// A class of iterators that wrap some different iterator type. +// If specified, ElementType is the type of element accessed by the wrapper +// iterator; in this case, the actual reference and pointer types of Iterator +// must be convertible to ElementType& and ElementType*, respectively. +template +class WrappedIterator : public MaybeDefineIteratorConcept { + public: + static_assert( + std::is_void_v || + (std::is_convertible_v::pointer, + std::add_pointer_t> && + std::is_convertible_v::reference, + std::add_lvalue_reference_t>)); + + using difference_type = + typename std::iterator_traits::difference_type; + using value_type = + std::conditional_t, + typename std::iterator_traits::value_type, + ElementType>; + using pointer = + std::conditional_t, + typename std::iterator_traits::pointer, + std::add_pointer_t>; + using reference = + std::conditional_t, + typename std::iterator_traits::reference, + std::add_lvalue_reference_t>; + using iterator_category = + typename std::iterator_traits::iterator_category; + + constexpr WrappedIterator() noexcept = default; + constexpr explicit WrappedIterator(Iterator it) noexcept : it_(it) {} + + // TODO(pkasting): Switch to `requires` and concepts after dropping support + // for old gcc and libstdc++ versions. + template >> + constexpr WrappedIterator( + const WrappedIterator& other) noexcept + : it_(other.base()) {} + + [[nodiscard]] constexpr reference operator*() const noexcept { return *it_; } + [[nodiscard]] constexpr pointer operator->() const noexcept { + return it_.operator->(); + } + + template + [[nodiscard]] constexpr bool operator==( + const WrappedIterator& other) + const noexcept { + return it_ == other.base(); + } +#if V8_HAVE_SPACESHIP_OPERATOR + template + [[nodiscard]] constexpr auto operator<=>( + const WrappedIterator& other) + const noexcept { + if constexpr (std::three_way_comparable_with) { + return it_ <=> other.base(); + } else if constexpr (std::totally_ordered_with) { + if (it_ < other.base()) { + return std::strong_ordering::less; + } + return (it_ > other.base()) ? std::strong_ordering::greater + : std::strong_ordering::equal; + } else { + if (it_ < other.base()) { + return std::partial_ordering::less; + } + if (other.base() < it_) { + return std::partial_ordering::greater; + } + return (it_ == other.base()) ? std::partial_ordering::equivalent + : std::partial_ordering::unordered; + } + } +#else + // Assume that if spaceship isn't present, operator rewriting might not be + // either. + template + [[nodiscard]] constexpr bool operator!=( + const WrappedIterator& other) + const noexcept { + return it_ != other.base(); + } + + template + [[nodiscard]] constexpr bool operator<( + const WrappedIterator& other) + const noexcept { + return it_ < other.base(); + } + template + [[nodiscard]] constexpr bool operator<=( + const WrappedIterator& other) + const noexcept { + return it_ <= other.base(); + } + template + [[nodiscard]] constexpr bool operator>( + const WrappedIterator& other) + const noexcept { + return it_ > other.base(); + } + template + [[nodiscard]] constexpr bool operator>=( + const WrappedIterator& other) + const noexcept { + return it_ >= other.base(); + } +#endif + + constexpr WrappedIterator& operator++() noexcept { + ++it_; + return *this; + } + constexpr WrappedIterator operator++(int) noexcept { + WrappedIterator result(*this); + ++(*this); + return result; + } + + constexpr WrappedIterator& operator--() noexcept { + --it_; + return *this; + } + constexpr WrappedIterator operator--(int) noexcept { + WrappedIterator result(*this); + --(*this); + return result; + } + [[nodiscard]] constexpr WrappedIterator operator+( + difference_type n) const noexcept { + WrappedIterator result(*this); + result += n; + return result; + } + [[nodiscard]] friend constexpr WrappedIterator operator+( + difference_type n, const WrappedIterator& x) noexcept { + return x + n; + } + constexpr WrappedIterator& operator+=(difference_type n) noexcept { + it_ += n; + return *this; + } + [[nodiscard]] constexpr WrappedIterator operator-( + difference_type n) const noexcept { + return *this + -n; + } + constexpr WrappedIterator& operator-=(difference_type n) noexcept { + return *this += -n; + } + template + [[nodiscard]] constexpr auto operator-( + const WrappedIterator& other) + const noexcept { + return it_ - other.base(); + } + [[nodiscard]] constexpr reference operator[]( + difference_type n) const noexcept { + return it_[n]; + } + + [[nodiscard]] constexpr const Iterator& base() const noexcept { return it_; } + + private: + Iterator it_; +}; + +// Helper functions about values contained in handles. +// A value is either an indirect pointer or a direct pointer, depending on +// whether direct local support is enabled. +class ValueHelper final { + public: +#ifdef V8_ENABLE_DIRECT_HANDLE + static constexpr Address kTaggedNullAddress = 1; + static constexpr Address kEmpty = kTaggedNullAddress; +#else + static constexpr Address kEmpty = kNullAddress; +#endif // V8_ENABLE_DIRECT_HANDLE + + template + V8_INLINE static bool IsEmpty(T* value) { + return reinterpret_cast
(value) == kEmpty; + } + + // Returns a handle's "value" for all kinds of abstract handles. For Local, + // it is equivalent to `*handle`. The variadic parameters support handle + // types with extra type parameters, like `Persistent`. + template