Skip to content

Commit

Permalink
Update V8 version to 11.6.189.19 (#52)
Browse files Browse the repository at this point in the history
Update V8 version to 11.6.189.19
  • Loading branch information
MeirShpilraien authored Aug 30, 2023
1 parent f3f45da commit fc6f616
Show file tree
Hide file tree
Showing 18 changed files with 322 additions and 147 deletions.
1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ lazy_static = "1"
[dev-dependencies]
v8_rs_derive = { path = "./v8-rs-derive/"}
lazy_static = "1"
ctor = "0.2.4"

[lib]
name = "v8_rs"
Expand Down
2 changes: 1 addition & 1 deletion build.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ lazy_static::lazy_static! {

static ref PROFILE: String = env::var("PROFILE").expect("PROFILE env var was not given");

static ref V8_DEFAULT_VERSION: &'static str = "11.5.150.22";
static ref V8_DEFAULT_VERSION: &'static str = "11.6.189.19";
static ref V8_VERSION: String = env::var("V8_VERSION").map(|v| if v == "default" {V8_DEFAULT_VERSION.to_string()} else {v}).unwrap_or(V8_DEFAULT_VERSION.to_string());
static ref V8_HEADERS_PATH: String = env::var("V8_HEADERS_PATH").unwrap_or("v8_c_api/libv8.include.zip".into());
static ref V8_HEADERS_URL: String = env::var("V8_HEADERS_URL").unwrap_or(format!("http://redismodules.s3.amazonaws.com/redisgears/dependencies/libv8.{}.include.zip", *V8_VERSION));
Expand Down
10 changes: 9 additions & 1 deletion src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -78,10 +78,18 @@ mod json_path_tests {
static ref IS_INITIALIZED: Mutex<bool> = Mutex::new(false);
}

#[ctor::ctor]
fn initialize_platform() {
/* V8 makes use of protection keys (https://man7.org/linux/man-pages/man7/pkeys.7.html)
* So in order to make sure all threads will inherit the pkey, we must initialise th paltform
* on the main thread. Currently there is not way to tell rust tests to perform some initialisation
* step on the main thread, to achieve that we use ctor. */
v8_init_platform(1, Some("--expose-gc")).unwrap();
}

fn initialize() {
let mut is_initialized = IS_INITIALIZED.lock().unwrap();
if !*is_initialized {
v8_init_platform(1, Some("--expose-gc")).unwrap();
v8_init().unwrap();
*is_initialized = true;
}
Expand Down
29 changes: 20 additions & 9 deletions v8_c_api/src/v8include/cppgc/internal/api-constants.h
Original file line number Diff line number Diff line change
Expand Up @@ -40,15 +40,6 @@ constexpr size_t kGuardPageSize = 4096;

static constexpr size_t kLargeObjectSizeThreshold = kPageSize / 2;

#if defined(CPPGC_CAGED_HEAP)
#if defined(CPPGC_2GB_CAGE)
constexpr size_t kCagedHeapReservationSize = static_cast<size_t>(2) * kGB;
#else // !defined(CPPGC_2GB_CAGE)
constexpr size_t kCagedHeapReservationSize = static_cast<size_t>(4) * kGB;
#endif // !defined(CPPGC_2GB_CAGE)
constexpr size_t kCagedHeapReservationAlignment = kCagedHeapReservationSize;
#endif // defined(CPPGC_CAGED_HEAP)

#if defined(CPPGC_POINTER_COMPRESSION)
#if defined(CPPGC_ENABLE_LARGER_CAGE)
constexpr unsigned kPointerCompressionShift = 3;
Expand All @@ -57,6 +48,26 @@ constexpr unsigned kPointerCompressionShift = 1;
#endif // !defined(CPPGC_ENABLE_LARGER_CAGE)
#endif // !defined(CPPGC_POINTER_COMPRESSION)

#if defined(CPPGC_CAGED_HEAP)
#if defined(CPPGC_2GB_CAGE)
constexpr size_t kCagedHeapDefaultReservationSize =
static_cast<size_t>(2) * kGB;
constexpr size_t kCagedHeapMaxReservationSize =
kCagedHeapDefaultReservationSize;
#else // !defined(CPPGC_2GB_CAGE)
constexpr size_t kCagedHeapDefaultReservationSize =
static_cast<size_t>(4) * kGB;
#if defined(CPPGC_POINTER_COMPRESSION)
constexpr size_t kCagedHeapMaxReservationSize =
size_t{1} << (31 + kPointerCompressionShift);
#else // !defined(CPPGC_POINTER_COMPRESSION)
constexpr size_t kCagedHeapMaxReservationSize =
kCagedHeapDefaultReservationSize;
#endif // !defined(CPPGC_POINTER_COMPRESSION)
#endif // !defined(CPPGC_2GB_CAGE)
constexpr size_t kCagedHeapReservationAlignment = kCagedHeapMaxReservationSize;
#endif // defined(CPPGC_CAGED_HEAP)

static constexpr size_t kDefaultAlignment = sizeof(void*);

// Maximum support alignment for a type as in `alignof(T)`.
Expand Down
22 changes: 16 additions & 6 deletions v8_c_api/src/v8include/cppgc/internal/caged-heap-local-data.h
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,11 @@ class V8_EXPORT AgeTable final {
enum class AdjacentCardsPolicy : uint8_t { kConsider, kIgnore };

static constexpr size_t kCardSizeInBytes =
api_constants::kCagedHeapReservationSize / kRequiredSize;
api_constants::kCagedHeapDefaultReservationSize / kRequiredSize;

static constexpr size_t CalculateAgeTableSizeForHeapSize(size_t heap_size) {
return heap_size / kCardSizeInBytes;
}

void SetAge(uintptr_t cage_offset, Age age) {
table_[card(cage_offset)] = age;
Expand Down Expand Up @@ -81,23 +85,29 @@ class V8_EXPORT AgeTable final {
#endif // !V8_HAS_BUILTIN_CTZ
static_assert((1 << kGranularityBits) == kCardSizeInBytes);
const size_t entry = offset >> kGranularityBits;
CPPGC_DCHECK(table_.size() > entry);
CPPGC_DCHECK(CagedHeapBase::GetAgeTableSize() > entry);
return entry;
}

std::array<Age, kRequiredSize> table_;
#if defined(V8_CC_GNU)
// gcc disallows flexible arrays in otherwise empty classes.
Age table_[0];
#else // !defined(V8_CC_GNU)
Age table_[];
#endif // !defined(V8_CC_GNU)
};

static_assert(sizeof(AgeTable) == 1 * api_constants::kMB,
"Size of AgeTable is 1MB");

#endif // CPPGC_YOUNG_GENERATION

struct CagedHeapLocalData final {
V8_INLINE static CagedHeapLocalData& Get() {
return *reinterpret_cast<CagedHeapLocalData*>(CagedHeapBase::GetBase());
}

static constexpr size_t CalculateLocalDataSizeForHeapSize(size_t heap_size) {
return AgeTable::CalculateAgeTableSizeForHeapSize(heap_size);
}

#if defined(CPPGC_YOUNG_GENERATION)
AgeTable age_table;
#endif
Expand Down
17 changes: 12 additions & 5 deletions v8_c_api/src/v8include/cppgc/internal/caged-heap.h
Original file line number Diff line number Diff line change
Expand Up @@ -33,24 +33,31 @@ class V8_EXPORT CagedHeapBase {

V8_INLINE static bool AreWithinCage(const void* addr1, const void* addr2) {
#if defined(CPPGC_2GB_CAGE)
static constexpr size_t kHalfWordShift = sizeof(uint32_t) * CHAR_BIT - 1;
static constexpr size_t kHeapBaseShift = sizeof(uint32_t) * CHAR_BIT - 1;
#else //! defined(CPPGC_2GB_CAGE)
static constexpr size_t kHalfWordShift = sizeof(uint32_t) * CHAR_BIT;
#if defined(CPPGC_POINTER_COMPRESSION)
static constexpr size_t kHeapBaseShift =
31 + api_constants::kPointerCompressionShift;
#else // !defined(CPPGC_POINTER_COMPRESSION)
static constexpr size_t kHeapBaseShift = sizeof(uint32_t) * CHAR_BIT;
#endif // !defined(CPPGC_POINTER_COMPRESSION)
#endif //! defined(CPPGC_2GB_CAGE)
static_assert((static_cast<size_t>(1) << kHalfWordShift) ==
api_constants::kCagedHeapReservationSize);
static_assert((static_cast<size_t>(1) << kHeapBaseShift) ==
api_constants::kCagedHeapMaxReservationSize);
CPPGC_DCHECK(g_heap_base_);
return !(((reinterpret_cast<uintptr_t>(addr1) ^ g_heap_base_) |
(reinterpret_cast<uintptr_t>(addr2) ^ g_heap_base_)) >>
kHalfWordShift);
kHeapBaseShift);
}

V8_INLINE static uintptr_t GetBase() { return g_heap_base_; }
V8_INLINE static size_t GetAgeTableSize() { return g_age_table_size_; }

private:
friend class CagedHeap;

static uintptr_t g_heap_base_;
static size_t g_age_table_size_;
};

} // namespace internal
Expand Down
173 changes: 82 additions & 91 deletions v8_c_api/src/v8include/cppgc/internal/gc-info.h
Original file line number Diff line number Diff line change
Expand Up @@ -24,89 +24,90 @@ struct V8_EXPORT EnsureGCInfoIndexTrait final {
// Acquires a new GC info object and updates `registered_index` with the index
// that identifies that new info accordingly.
template <typename T>
V8_INLINE static void EnsureIndex(
V8_INLINE static GCInfoIndex EnsureIndex(
std::atomic<GCInfoIndex>& registered_index) {
EnsureGCInfoIndexTraitDispatch<T>{}(registered_index);
return EnsureGCInfoIndexTraitDispatch<T>{}(registered_index);
}

private:
template <typename T, bool = std::is_polymorphic<T>::value,
bool = FinalizerTrait<T>::HasFinalizer(),
template <typename T, bool = FinalizerTrait<T>::HasFinalizer(),
bool = NameTrait<T>::HasNonHiddenName()>
struct EnsureGCInfoIndexTraitDispatch;

static void V8_PRESERVE_MOST
EnsureGCInfoIndexPolymorphic(std::atomic<GCInfoIndex>&, TraceCallback,
FinalizationCallback, NameCallback);
static void V8_PRESERVE_MOST EnsureGCInfoIndexPolymorphic(
static GCInfoIndex V8_PRESERVE_MOST
EnsureGCInfoIndex(std::atomic<GCInfoIndex>&, TraceCallback,
FinalizationCallback, NameCallback);
static GCInfoIndex V8_PRESERVE_MOST EnsureGCInfoIndex(
std::atomic<GCInfoIndex>&, TraceCallback, FinalizationCallback);
static void V8_PRESERVE_MOST EnsureGCInfoIndexPolymorphic(
std::atomic<GCInfoIndex>&, TraceCallback, NameCallback);
static void V8_PRESERVE_MOST
EnsureGCInfoIndexPolymorphic(std::atomic<GCInfoIndex>&, TraceCallback);
static void V8_PRESERVE_MOST
EnsureGCInfoIndexNonPolymorphic(std::atomic<GCInfoIndex>&, TraceCallback,
FinalizationCallback, NameCallback);
static void V8_PRESERVE_MOST EnsureGCInfoIndexNonPolymorphic(
std::atomic<GCInfoIndex>&, TraceCallback, FinalizationCallback);
static void V8_PRESERVE_MOST EnsureGCInfoIndexNonPolymorphic(
std::atomic<GCInfoIndex>&, TraceCallback, NameCallback);
static void V8_PRESERVE_MOST
EnsureGCInfoIndexNonPolymorphic(std::atomic<GCInfoIndex>&, TraceCallback);
static GCInfoIndex V8_PRESERVE_MOST
EnsureGCInfoIndex(std::atomic<GCInfoIndex>&, TraceCallback, NameCallback);
static GCInfoIndex V8_PRESERVE_MOST
EnsureGCInfoIndex(std::atomic<GCInfoIndex>&, TraceCallback);
};

#define DISPATCH(is_polymorphic, has_finalizer, has_non_hidden_name, function) \
template <typename T> \
struct EnsureGCInfoIndexTrait::EnsureGCInfoIndexTraitDispatch< \
T, is_polymorphic, has_finalizer, has_non_hidden_name> { \
V8_INLINE void operator()(std::atomic<GCInfoIndex>& registered_index) { \
function; \
} \
#define DISPATCH(has_finalizer, has_non_hidden_name, function) \
template <typename T> \
struct EnsureGCInfoIndexTrait::EnsureGCInfoIndexTraitDispatch< \
T, has_finalizer, has_non_hidden_name> { \
V8_INLINE GCInfoIndex \
operator()(std::atomic<GCInfoIndex>& registered_index) { \
return function; \
} \
};

// --------------------------------------------------------------------- //
// DISPATCH(is_polymorphic, has_finalizer, has_non_hidden_name, function)
// --------------------------------------------------------------------- //
DISPATCH(true, true, true, //
EnsureGCInfoIndexPolymorphic(registered_index, //
TraceTrait<T>::Trace, //
FinalizerTrait<T>::kCallback, //
NameTrait<T>::GetName)) //
DISPATCH(true, true, false, //
EnsureGCInfoIndexPolymorphic(registered_index, //
TraceTrait<T>::Trace, //
FinalizerTrait<T>::kCallback)) //
DISPATCH(true, false, true, //
EnsureGCInfoIndexPolymorphic(registered_index, //
TraceTrait<T>::Trace, //
NameTrait<T>::GetName)) //
DISPATCH(true, false, false, //
EnsureGCInfoIndexPolymorphic(registered_index, //
TraceTrait<T>::Trace)) //
DISPATCH(false, true, true, //
EnsureGCInfoIndexNonPolymorphic(registered_index, //
TraceTrait<T>::Trace, //
FinalizerTrait<T>::kCallback, //
NameTrait<T>::GetName)) //
DISPATCH(false, true, false, //
EnsureGCInfoIndexNonPolymorphic(registered_index, //
TraceTrait<T>::Trace, //
FinalizerTrait<T>::kCallback)) //
DISPATCH(false, false, true, //
EnsureGCInfoIndexNonPolymorphic(registered_index, //
TraceTrait<T>::Trace, //
NameTrait<T>::GetName)) //
DISPATCH(false, false, false, //
EnsureGCInfoIndexNonPolymorphic(registered_index, //
TraceTrait<T>::Trace)) //
// ------------------------------------------------------- //
// DISPATCH(has_finalizer, has_non_hidden_name, function) //
// ------------------------------------------------------- //
DISPATCH(true, true, //
EnsureGCInfoIndex(registered_index, //
TraceTrait<T>::Trace, //
FinalizerTrait<T>::kCallback, //
NameTrait<T>::GetName)) //
DISPATCH(true, false, //
EnsureGCInfoIndex(registered_index, //
TraceTrait<T>::Trace, //
FinalizerTrait<T>::kCallback)) //
DISPATCH(false, true, //
EnsureGCInfoIndex(registered_index, //
TraceTrait<T>::Trace, //
NameTrait<T>::GetName)) //
DISPATCH(false, false, //
EnsureGCInfoIndex(registered_index, //
TraceTrait<T>::Trace)) //

#undef DISPATCH

// Trait determines how the garbage collector treats objects wrt. to traversing,
// finalization, and naming.
template <typename T>
struct GCInfoTrait final {
V8_INLINE static GCInfoIndex Index() {
static_assert(sizeof(T), "T must be fully defined");
static std::atomic<GCInfoIndex>
registered_index; // Uses zero initialization.
GCInfoIndex index = registered_index.load(std::memory_order_acquire);
if (V8_UNLIKELY(!index)) {
index = EnsureGCInfoIndexTrait::EnsureIndex<T>(registered_index);
CPPGC_DCHECK(index != 0);
CPPGC_DCHECK(index == registered_index.load(std::memory_order_acquire));
}
return index;
}

static constexpr bool CheckCallbacksAreDefined() {
// No USE() macro available.
(void)static_cast<TraceCallback>(TraceTrait<T>::Trace);
(void)static_cast<FinalizationCallback>(FinalizerTrait<T>::kCallback);
(void)static_cast<NameCallback>(NameTrait<T>::GetName);
return true;
}
};

// Fold types based on finalizer behavior. Note that finalizer characteristics
// align with trace behavior, i.e., destructors are virtual when trace methods
// are and vice versa.
template <typename T, typename ParentMostGarbageCollectedType>
struct GCInfoFolding {
struct GCInfoFolding final {
static constexpr bool kHasVirtualDestructorAtBase =
std::has_virtual_destructor<ParentMostGarbageCollectedType>::value;
static constexpr bool kBothTypesAreTriviallyDestructible =
Expand All @@ -121,34 +122,24 @@ struct GCInfoFolding {
static constexpr bool kWantsDetailedObjectNames = false;
#endif // !CPPGC_SUPPORTS_OBJECT_NAMES

// Folding would regresses name resolution when deriving names from C++
// class names as it would just folds a name to the base class name.
using ResultType = std::conditional_t<(kHasVirtualDestructorAtBase ||
kBothTypesAreTriviallyDestructible ||
kHasCustomFinalizerDispatchAtBase) &&
!kWantsDetailedObjectNames,
ParentMostGarbageCollectedType, T>;
};
// Always true. Forces the compiler to resolve callbacks which ensures that
// both modes don't break without requiring compiling a separate
// configuration. Only a single GCInfo (for `ResultType` below) will actually
// be instantiated but existence (and well-formedness) of all callbacks is
// checked.
static constexpr bool kCheckTypeGuardAlwaysTrue =
GCInfoTrait<T>::CheckCallbacksAreDefined() &&
GCInfoTrait<ParentMostGarbageCollectedType>::CheckCallbacksAreDefined();

// Trait determines how the garbage collector treats objects wrt. to traversing,
// finalization, and naming.
template <typename T>
struct GCInfoTrait final {
V8_INLINE static GCInfoIndex Index() {
static_assert(sizeof(T), "T must be fully defined");
static std::atomic<GCInfoIndex>
registered_index; // Uses zero initialization.
GCInfoIndex index = registered_index.load(std::memory_order_acquire);
if (V8_UNLIKELY(!index)) {
EnsureGCInfoIndexTrait::EnsureIndex<T>(registered_index);
// Slow path call uses V8_PRESERVE_MOST which does not support return
// values (also preserves RAX). Avoid out parameter by just reloading the
// value here which at this point is guaranteed to be set.
index = registered_index.load(std::memory_order_acquire);
CPPGC_DCHECK(index != 0);
}
return index;
}
// Folding would regress name resolution when deriving names from C++
// class names as it would just folds a name to the base class name.
using ResultType =
std::conditional_t<kCheckTypeGuardAlwaysTrue &&
(kHasVirtualDestructorAtBase ||
kBothTypesAreTriviallyDestructible ||
kHasCustomFinalizerDispatchAtBase) &&
!kWantsDetailedObjectNames,
ParentMostGarbageCollectedType, T>;
};

} // namespace internal
Expand Down
Loading

0 comments on commit fc6f616

Please sign in to comment.