mirror of
https://github.com/denoland/rusty_v8.git
synced 2025-03-09 13:38:51 -04:00
feat: add more stats apis
This commit is contained in:
parent
3e08a6d76f
commit
bbb4fb5923
7 changed files with 168 additions and 176 deletions
2
build.rs
2
build.rs
|
@ -159,6 +159,8 @@ fn build_binding() {
|
|||
.generate_cstr(true)
|
||||
.rustified_enum(".*UseCounterFeature")
|
||||
.rustified_enum(".*ModuleImportPhase")
|
||||
.bitfield_enum(".*GCType")
|
||||
.bitfield_enum(".*GCCallbackFlags")
|
||||
.allowlist_item("v8__.*")
|
||||
.allowlist_item("cppgc__.*")
|
||||
.allowlist_item("RustObj")
|
||||
|
|
|
@ -307,12 +307,34 @@ void v8__Isolate__RemoveGCPrologueCallback(
|
|||
isolate->RemoveGCPrologueCallback(callback, data);
|
||||
}
|
||||
|
||||
void v8__Isolate__AddGCEpilogueCallback(
|
||||
v8::Isolate* isolate, v8::Isolate::GCCallbackWithData callback, void* data,
|
||||
v8::GCType gc_type_filter) {
|
||||
isolate->AddGCEpilogueCallback(callback, data, gc_type_filter);
|
||||
}
|
||||
|
||||
void v8__Isolate__RemoveGCEpilogueCallback(
|
||||
v8::Isolate* isolate, v8::Isolate::GCCallbackWithData callback,
|
||||
void* data) {
|
||||
isolate->RemoveGCEpilogueCallback(callback, data);
|
||||
}
|
||||
|
||||
void v8__Isolate__AddNearHeapLimitCallback(v8::Isolate* isolate,
|
||||
v8::NearHeapLimitCallback callback,
|
||||
void* data) {
|
||||
isolate->AddNearHeapLimitCallback(callback, data);
|
||||
}
|
||||
|
||||
size_t v8__Isolate__NumberOfHeapSpaces(v8::Isolate* isolate) {
|
||||
return isolate->NumberOfHeapSpaces();
|
||||
}
|
||||
|
||||
bool v8__Isolate__GetHeapSpaceStatistics(
|
||||
v8::Isolate* isolate, v8::HeapSpaceStatistics* space_statistics,
|
||||
size_t index) {
|
||||
return isolate->GetHeapSpaceStatistics(space_statistics, index);
|
||||
}
|
||||
|
||||
void v8__Isolate__RemoveNearHeapLimitCallback(
|
||||
v8::Isolate* isolate, v8::NearHeapLimitCallback callback,
|
||||
size_t heap_limit) {
|
||||
|
@ -3395,36 +3417,6 @@ void v8__HeapProfiler__TakeHeapSnapshot(v8::Isolate* isolate,
|
|||
const_cast<v8::HeapSnapshot*>(snapshot)->Delete();
|
||||
}
|
||||
|
||||
void v8__HeapStatistics__CONSTRUCT(uninit_t<v8::HeapStatistics>* buf) {
|
||||
// Should be <= than its counterpart in src/isolate.rs
|
||||
static_assert(sizeof(v8::HeapStatistics) <= sizeof(uintptr_t[16]),
|
||||
"HeapStatistics mismatch");
|
||||
construct_in_place<v8::HeapStatistics>(buf);
|
||||
}
|
||||
|
||||
// The const_cast doesn't violate const correctness, the methods
|
||||
// are simple getters that don't mutate the object or global state.
|
||||
#define V(name) \
|
||||
size_t v8__HeapStatistics__##name(const v8::HeapStatistics* s) { \
|
||||
return const_cast<v8::HeapStatistics*>(s)->name(); \
|
||||
}
|
||||
|
||||
V(total_heap_size)
|
||||
V(total_heap_size_executable)
|
||||
V(total_physical_size)
|
||||
V(total_available_size)
|
||||
V(total_global_handles_size)
|
||||
V(used_global_handles_size)
|
||||
V(used_heap_size)
|
||||
V(heap_size_limit)
|
||||
V(malloced_memory)
|
||||
V(external_memory)
|
||||
V(peak_malloced_memory)
|
||||
V(number_of_native_contexts)
|
||||
V(number_of_detached_contexts)
|
||||
V(does_zap_garbage) // Returns size_t, not bool like you'd expect.
|
||||
|
||||
#undef V
|
||||
} // extern "C"
|
||||
|
||||
// v8::ValueSerializer::Delegate
|
||||
|
|
|
@ -40,6 +40,10 @@ using v8__FastOneByteString = v8::FastOneByteString;
|
|||
using v8__Isolate__UseCounterFeature = v8::Isolate::UseCounterFeature;
|
||||
using v8__String__WriteFlags = v8::String::WriteFlags;
|
||||
using v8__ModuleImportPhase = v8::ModuleImportPhase;
|
||||
using v8__HeapStatistics = v8::HeapStatistics;
|
||||
using v8__HeapSpaceStatistics = v8::HeapSpaceStatistics;
|
||||
using v8__GCType = v8::GCType;
|
||||
using v8__GCCallbackFlags = v8::GCCallbackFlags;
|
||||
|
||||
static uint32_t v8__MAJOR_VERSION = V8_MAJOR_VERSION;
|
||||
static uint32_t v8__MINOR_VERSION = V8_MINOR_VERSION;
|
||||
|
|
74
src/gc.rs
74
src/gc.rs
|
@ -1,72 +1,2 @@
|
|||
/// Applications can register callback functions which will be called before and
|
||||
/// after certain garbage collection operations. Allocations are not allowed in
|
||||
/// the callback functions, you therefore cannot manipulate objects (set or
|
||||
/// delete properties for example) since it is possible such operations will
|
||||
/// result in the allocation of objects.
|
||||
#[repr(C)]
|
||||
#[derive(Debug, Eq, PartialEq, Clone, Copy)]
|
||||
pub struct GCType(u32);
|
||||
|
||||
impl GCType {
|
||||
pub const SCAVENGE: Self = Self(1 << 0);
|
||||
|
||||
pub const MINOR_MARK_COMPACT: Self = Self(1 << 1);
|
||||
|
||||
pub const MARK_SWEEP_COMPACT: Self = Self(1 << 2);
|
||||
|
||||
pub const INCREMENTAL_MARKING: Self = Self(1 << 3);
|
||||
|
||||
pub const PROCESS_WEAK_CALLBACKS: Self = Self(1 << 4);
|
||||
|
||||
pub const ALL: Self = Self(31);
|
||||
}
|
||||
|
||||
impl std::ops::BitOr for GCType {
|
||||
type Output = Self;
|
||||
|
||||
fn bitor(self, Self(rhs): Self) -> Self {
|
||||
let Self(lhs) = self;
|
||||
Self(lhs | rhs)
|
||||
}
|
||||
}
|
||||
|
||||
/// GCCallbackFlags is used to notify additional information about the GC
|
||||
/// callback.
|
||||
/// - GCCallbackFlagConstructRetainedObjectInfos: The GC callback is for
|
||||
/// constructing retained object infos.
|
||||
/// - GCCallbackFlagForced: The GC callback is for a forced GC for testing.
|
||||
/// - GCCallbackFlagSynchronousPhantomCallbackProcessing: The GC callback
|
||||
/// is called synchronously without getting posted to an idle task.
|
||||
/// - GCCallbackFlagCollectAllAvailableGarbage: The GC callback is called
|
||||
/// in a phase where V8 is trying to collect all available garbage
|
||||
/// (e.g., handling a low memory notification).
|
||||
/// - GCCallbackScheduleIdleGarbageCollection: The GC callback is called to
|
||||
/// trigger an idle garbage collection.
|
||||
#[repr(C)]
|
||||
#[derive(Debug, Eq, PartialEq, Clone, Copy)]
|
||||
pub struct GCCallbackFlags(u32);
|
||||
|
||||
impl GCCallbackFlags {
|
||||
pub const NONE: Self = Self(0);
|
||||
|
||||
pub const CONSTRUCT_RETAINED_OBJECT_INFOS: Self = Self(1 << 1);
|
||||
|
||||
pub const FORCED: Self = Self(1 << 2);
|
||||
|
||||
pub const SYNCHRONOUS_PHANTOM_CALLBACK_PROCESSING: Self = Self(1 << 3);
|
||||
|
||||
pub const COLLECT_ALL_AVAILABLE_GARBAGE: Self = Self(1 << 4);
|
||||
|
||||
pub const COLLECT_ALL_EXTERNAL_MEMORY: Self = Self(1 << 5);
|
||||
|
||||
pub const SCHEDULE_IDLE_GARBAGE_COLLECTION: Self = Self(1 << 6);
|
||||
}
|
||||
|
||||
impl std::ops::BitOr for GCCallbackFlags {
|
||||
type Output = Self;
|
||||
|
||||
fn bitor(self, Self(rhs): Self) -> Self {
|
||||
let Self(lhs) = self;
|
||||
Self(lhs | rhs)
|
||||
}
|
||||
}
|
||||
pub use crate::binding::v8__GCCallbackFlags as GCCallbackFlags;
|
||||
pub use crate::binding::v8__GCType as GCType;
|
||||
|
|
194
src/isolate.rs
194
src/isolate.rs
|
@ -17,6 +17,8 @@ use crate::PromiseResolver;
|
|||
use crate::StartupData;
|
||||
use crate::String;
|
||||
use crate::Value;
|
||||
use crate::binding::v8__HeapSpaceStatistics;
|
||||
use crate::binding::v8__HeapStatistics;
|
||||
use crate::binding::v8__Isolate__UseCounterFeature;
|
||||
pub use crate::binding::v8__ModuleImportPhase as ModuleImportPhase;
|
||||
use crate::cppgc::Heap;
|
||||
|
@ -39,8 +41,10 @@ use crate::support::UniqueRef;
|
|||
use crate::support::UnitType;
|
||||
use crate::support::char;
|
||||
use crate::support::int;
|
||||
use crate::support::size_t;
|
||||
use crate::wasm::WasmStreaming;
|
||||
use crate::wasm::trampoline;
|
||||
use std::ffi::CStr;
|
||||
|
||||
use std::any::Any;
|
||||
use std::any::TypeId;
|
||||
|
@ -550,15 +554,6 @@ pub struct OomDetails {
|
|||
pub type OomErrorCallback =
|
||||
unsafe extern "C" fn(location: *const char, details: &OomDetails);
|
||||
|
||||
/// Collection of V8 heap information.
|
||||
///
|
||||
/// Instances of this class can be passed to v8::Isolate::GetHeapStatistics to
|
||||
/// get heap statistics from V8.
|
||||
// Must be >= sizeof(v8::HeapStatistics), see v8__HeapStatistics__CONSTRUCT().
|
||||
#[repr(C)]
|
||||
#[derive(Debug)]
|
||||
pub struct HeapStatistics([usize; 16]);
|
||||
|
||||
// Windows x64 ABI: MaybeLocal<Value> returned on the stack.
|
||||
#[cfg(target_os = "windows")]
|
||||
pub type PrepareStackTraceCallback<'s> =
|
||||
|
@ -603,7 +598,10 @@ unsafe extern "C" {
|
|||
fn v8__Isolate__MemoryPressureNotification(this: *mut Isolate, level: u8);
|
||||
fn v8__Isolate__ClearKeptObjects(isolate: *mut Isolate);
|
||||
fn v8__Isolate__LowMemoryNotification(isolate: *mut Isolate);
|
||||
fn v8__Isolate__GetHeapStatistics(this: *mut Isolate, s: *mut HeapStatistics);
|
||||
fn v8__Isolate__GetHeapStatistics(
|
||||
this: *mut Isolate,
|
||||
s: *mut v8__HeapStatistics,
|
||||
);
|
||||
fn v8__Isolate__SetCaptureStackTraceForUncaughtExceptions(
|
||||
this: *mut Isolate,
|
||||
capture: bool,
|
||||
|
@ -629,6 +627,23 @@ unsafe extern "C" {
|
|||
callback: GcCallbackWithData,
|
||||
data: *mut c_void,
|
||||
);
|
||||
fn v8__Isolate__AddGCEpilogueCallback(
|
||||
isolate: *mut Isolate,
|
||||
callback: GcCallbackWithData,
|
||||
data: *mut c_void,
|
||||
gc_type_filter: GCType,
|
||||
);
|
||||
fn v8__Isolate__RemoveGCEpilogueCallback(
|
||||
isolate: *mut Isolate,
|
||||
callback: GcCallbackWithData,
|
||||
data: *mut c_void,
|
||||
);
|
||||
fn v8__Isolate__NumberOfHeapSpaces(isolate: *mut Isolate) -> size_t;
|
||||
fn v8__Isolate__GetHeapSpaceStatistics(
|
||||
isolate: *mut Isolate,
|
||||
space_statistics: *mut v8__HeapSpaceStatistics,
|
||||
index: size_t,
|
||||
) -> bool;
|
||||
fn v8__Isolate__AddNearHeapLimitCallback(
|
||||
isolate: *mut Isolate,
|
||||
callback: NearHeapLimitCallback,
|
||||
|
@ -738,37 +753,6 @@ unsafe extern "C" {
|
|||
callback: unsafe extern "C" fn(*mut c_void, *const u8, usize) -> bool,
|
||||
arg: *mut c_void,
|
||||
);
|
||||
|
||||
fn v8__HeapStatistics__CONSTRUCT(s: *mut MaybeUninit<HeapStatistics>);
|
||||
fn v8__HeapStatistics__total_heap_size(s: *const HeapStatistics) -> usize;
|
||||
fn v8__HeapStatistics__total_heap_size_executable(
|
||||
s: *const HeapStatistics,
|
||||
) -> usize;
|
||||
fn v8__HeapStatistics__total_physical_size(s: *const HeapStatistics)
|
||||
-> usize;
|
||||
fn v8__HeapStatistics__total_available_size(
|
||||
s: *const HeapStatistics,
|
||||
) -> usize;
|
||||
fn v8__HeapStatistics__total_global_handles_size(
|
||||
s: *const HeapStatistics,
|
||||
) -> usize;
|
||||
fn v8__HeapStatistics__used_global_handles_size(
|
||||
s: *const HeapStatistics,
|
||||
) -> usize;
|
||||
fn v8__HeapStatistics__used_heap_size(s: *const HeapStatistics) -> usize;
|
||||
fn v8__HeapStatistics__heap_size_limit(s: *const HeapStatistics) -> usize;
|
||||
fn v8__HeapStatistics__malloced_memory(s: *const HeapStatistics) -> usize;
|
||||
fn v8__HeapStatistics__external_memory(s: *const HeapStatistics) -> usize;
|
||||
fn v8__HeapStatistics__peak_malloced_memory(
|
||||
s: *const HeapStatistics,
|
||||
) -> usize;
|
||||
fn v8__HeapStatistics__number_of_native_contexts(
|
||||
s: *const HeapStatistics,
|
||||
) -> usize;
|
||||
fn v8__HeapStatistics__number_of_detached_contexts(
|
||||
s: *const HeapStatistics,
|
||||
) -> usize;
|
||||
fn v8__HeapStatistics__does_zap_garbage(s: *const HeapStatistics) -> usize;
|
||||
}
|
||||
|
||||
/// Isolate represents an isolated instance of the V8 engine. V8 isolates have
|
||||
|
@ -1122,8 +1106,41 @@ impl Isolate {
|
|||
|
||||
/// Get statistics about the heap memory usage.
|
||||
#[inline(always)]
|
||||
pub fn get_heap_statistics(&mut self, s: &mut HeapStatistics) {
|
||||
unsafe { v8__Isolate__GetHeapStatistics(self, s) }
|
||||
pub fn get_heap_statistics(&mut self) -> HeapStatistics {
|
||||
let inner = unsafe {
|
||||
let mut s = MaybeUninit::zeroed();
|
||||
v8__Isolate__GetHeapStatistics(self, s.as_mut_ptr());
|
||||
s.assume_init()
|
||||
};
|
||||
HeapStatistics(inner)
|
||||
}
|
||||
|
||||
/// Returns the number of spaces in the heap.
|
||||
#[inline(always)]
|
||||
pub fn number_of_heap_spaces(&mut self) -> usize {
|
||||
unsafe { v8__Isolate__NumberOfHeapSpaces(self) }
|
||||
}
|
||||
|
||||
/// Get the memory usage of a space in the heap.
|
||||
///
|
||||
/// \param space_statistics The HeapSpaceStatistics object to fill in
|
||||
/// statistics.
|
||||
/// \param index The index of the space to get statistics from, which ranges
|
||||
/// from 0 to NumberOfHeapSpaces() - 1.
|
||||
/// \returns true on success.
|
||||
#[inline(always)]
|
||||
pub fn get_heap_space_statistics(
|
||||
&mut self,
|
||||
index: usize,
|
||||
) -> Option<HeapSpaceStatistics> {
|
||||
let inner = unsafe {
|
||||
let mut s = MaybeUninit::zeroed();
|
||||
if !v8__Isolate__GetHeapSpaceStatistics(self, s.as_mut_ptr(), index) {
|
||||
return None;
|
||||
}
|
||||
s.assume_init()
|
||||
};
|
||||
Some(HeapSpaceStatistics(inner))
|
||||
}
|
||||
|
||||
/// Tells V8 to capture current stack trace when uncaught exception occurs
|
||||
|
@ -1361,6 +1378,33 @@ impl Isolate {
|
|||
unsafe { v8__Isolate__RemoveGCPrologueCallback(self, callback, data) }
|
||||
}
|
||||
|
||||
/// Enables the host application to receive a notification after a
|
||||
/// garbage collection.
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)] // False positive.
|
||||
#[inline(always)]
|
||||
pub fn add_gc_epilogue_callback(
|
||||
&mut self,
|
||||
callback: GcCallbackWithData,
|
||||
data: *mut c_void,
|
||||
gc_type_filter: GCType,
|
||||
) {
|
||||
unsafe {
|
||||
v8__Isolate__AddGCEpilogueCallback(self, callback, data, gc_type_filter);
|
||||
}
|
||||
}
|
||||
|
||||
/// This function removes a callback which was added by
|
||||
/// `AddGCEpilogueCallback`.
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)] // False positive.
|
||||
#[inline(always)]
|
||||
pub fn remove_gc_epilogue_callback(
|
||||
&mut self,
|
||||
callback: GcCallbackWithData,
|
||||
data: *mut c_void,
|
||||
) {
|
||||
unsafe { v8__Isolate__RemoveGCEpilogueCallback(self, callback, data) }
|
||||
}
|
||||
|
||||
/// Add a callback to invoke in case the heap size is close to the heap limit.
|
||||
/// If multiple callbacks are added, only the most recently added callback is
|
||||
/// invoked.
|
||||
|
@ -1904,87 +1948,107 @@ impl AsMut<Isolate> for Isolate {
|
|||
}
|
||||
}
|
||||
|
||||
/// Collection of V8 heap information.
|
||||
///
|
||||
/// Instances of this class can be passed to v8::Isolate::GetHeapStatistics to
|
||||
/// get heap statistics from V8.
|
||||
pub struct HeapStatistics(v8__HeapStatistics);
|
||||
|
||||
impl HeapStatistics {
|
||||
#[inline(always)]
|
||||
pub fn total_heap_size(&self) -> usize {
|
||||
unsafe { v8__HeapStatistics__total_heap_size(self) }
|
||||
self.0.total_heap_size_
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn total_heap_size_executable(&self) -> usize {
|
||||
unsafe { v8__HeapStatistics__total_heap_size_executable(self) }
|
||||
self.0.total_heap_size_executable_
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn total_physical_size(&self) -> usize {
|
||||
unsafe { v8__HeapStatistics__total_physical_size(self) }
|
||||
self.0.total_physical_size_
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn total_available_size(&self) -> usize {
|
||||
unsafe { v8__HeapStatistics__total_available_size(self) }
|
||||
self.0.total_available_size_
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn total_global_handles_size(&self) -> usize {
|
||||
unsafe { v8__HeapStatistics__total_global_handles_size(self) }
|
||||
self.0.total_global_handles_size_
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn used_global_handles_size(&self) -> usize {
|
||||
unsafe { v8__HeapStatistics__used_global_handles_size(self) }
|
||||
self.0.used_global_handles_size_
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn used_heap_size(&self) -> usize {
|
||||
unsafe { v8__HeapStatistics__used_heap_size(self) }
|
||||
self.0.used_heap_size_
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn heap_size_limit(&self) -> usize {
|
||||
unsafe { v8__HeapStatistics__heap_size_limit(self) }
|
||||
self.0.heap_size_limit_
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn malloced_memory(&self) -> usize {
|
||||
unsafe { v8__HeapStatistics__malloced_memory(self) }
|
||||
self.0.malloced_memory_
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn external_memory(&self) -> usize {
|
||||
unsafe { v8__HeapStatistics__external_memory(self) }
|
||||
self.0.external_memory_
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn peak_malloced_memory(&self) -> usize {
|
||||
unsafe { v8__HeapStatistics__peak_malloced_memory(self) }
|
||||
self.0.peak_malloced_memory_
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn number_of_native_contexts(&self) -> usize {
|
||||
unsafe { v8__HeapStatistics__number_of_native_contexts(self) }
|
||||
self.0.number_of_native_contexts_
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn number_of_detached_contexts(&self) -> usize {
|
||||
unsafe { v8__HeapStatistics__number_of_detached_contexts(self) }
|
||||
self.0.number_of_detached_contexts_
|
||||
}
|
||||
|
||||
/// Returns a 0/1 boolean, which signifies whether the V8 overwrite heap
|
||||
/// garbage with a bit pattern.
|
||||
#[inline(always)]
|
||||
pub fn does_zap_garbage(&self) -> usize {
|
||||
unsafe { v8__HeapStatistics__does_zap_garbage(self) }
|
||||
pub fn does_zap_garbage(&self) -> bool {
|
||||
self.0.does_zap_garbage_
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for HeapStatistics {
|
||||
fn default() -> Self {
|
||||
let mut s = MaybeUninit::<Self>::uninit();
|
||||
unsafe {
|
||||
v8__HeapStatistics__CONSTRUCT(&mut s);
|
||||
s.assume_init()
|
||||
}
|
||||
pub struct HeapSpaceStatistics(v8__HeapSpaceStatistics);
|
||||
|
||||
impl HeapSpaceStatistics {
|
||||
pub fn space_name(&self) -> &CStr {
|
||||
unsafe { CStr::from_ptr(self.0.space_name_) }
|
||||
}
|
||||
|
||||
pub fn space_size(&self) -> usize {
|
||||
self.0.space_size_
|
||||
}
|
||||
|
||||
pub fn space_used_size(&self) -> usize {
|
||||
self.0.space_used_size_
|
||||
}
|
||||
|
||||
pub fn space_available_size(&self) -> usize {
|
||||
self.0.space_available_size_
|
||||
}
|
||||
|
||||
pub fn physical_space_size(&self) -> usize {
|
||||
self.0.physical_space_size_
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -104,6 +104,7 @@ pub use handle::Local;
|
|||
pub use handle::TracedReference;
|
||||
pub use handle::Weak;
|
||||
pub use isolate::GarbageCollectionType;
|
||||
pub use isolate::HeapSpaceStatistics;
|
||||
pub use isolate::HeapStatistics;
|
||||
pub use isolate::HostCreateShadowRealmContextCallback;
|
||||
pub use isolate::HostImportModuleDynamicallyCallback;
|
||||
|
|
|
@ -7617,8 +7617,7 @@ fn heap_statistics() {
|
|||
let params = v8::CreateParams::default().heap_limits(0, 10 << 20); // 10 MB.
|
||||
let isolate = &mut v8::Isolate::new(params);
|
||||
|
||||
let mut s = v8::HeapStatistics::default();
|
||||
isolate.get_heap_statistics(&mut s);
|
||||
let s = isolate.get_heap_statistics();
|
||||
|
||||
assert!(s.used_heap_size() > 0);
|
||||
assert!(s.total_heap_size() > 0);
|
||||
|
@ -7644,7 +7643,7 @@ fn heap_statistics() {
|
|||
let local = eval(scope, "").unwrap();
|
||||
let _global = v8::Global::new(scope, local);
|
||||
|
||||
scope.get_heap_statistics(&mut s);
|
||||
let s = scope.get_heap_statistics();
|
||||
|
||||
assert_ne!(s.used_global_handles_size(), 0);
|
||||
assert_ne!(s.total_global_handles_size(), 0);
|
||||
|
@ -9026,8 +9025,7 @@ fn run_with_rust_allocator() {
|
|||
let result = script.run(scope).unwrap();
|
||||
assert_eq!(result.to_rust_string_lossy(scope), "OK");
|
||||
}
|
||||
let mut stats = v8::HeapStatistics::default();
|
||||
isolate.get_heap_statistics(&mut stats);
|
||||
let stats = isolate.get_heap_statistics();
|
||||
let count_loaded = count.load(Ordering::SeqCst);
|
||||
assert!(count_loaded > 0);
|
||||
assert!(count_loaded <= stats.external_memory());
|
||||
|
@ -11346,7 +11344,7 @@ fn gc_callbacks() {
|
|||
data: *mut c_void,
|
||||
) {
|
||||
// We should get a mark-sweep GC here.
|
||||
assert_eq!(r#type, v8::GCType::MARK_SWEEP_COMPACT);
|
||||
assert_eq!(r#type, v8::GCType::kGCTypeMarkSweepCompact);
|
||||
let state = unsafe { &mut *(data as *mut GCCallbackState) };
|
||||
state.mark_sweep_calls += 1;
|
||||
}
|
||||
|
@ -11358,7 +11356,7 @@ fn gc_callbacks() {
|
|||
data: *mut c_void,
|
||||
) {
|
||||
// We should get a mark-sweep GC here.
|
||||
assert_eq!(r#type, v8::GCType::INCREMENTAL_MARKING);
|
||||
assert_eq!(r#type, v8::GCType::kGCTypeIncrementalMarking);
|
||||
let state = unsafe { &mut *(data as *mut GCCallbackState) };
|
||||
state.incremental_marking_calls += 1;
|
||||
}
|
||||
|
@ -11366,11 +11364,12 @@ fn gc_callbacks() {
|
|||
let mut state = GCCallbackState::default();
|
||||
let state_ptr = &mut state as *mut _ as *mut c_void;
|
||||
let isolate = &mut v8::Isolate::new(Default::default());
|
||||
isolate.add_gc_prologue_callback(callback, state_ptr, v8::GCType::ALL);
|
||||
isolate.add_gc_prologue_callback(callback, state_ptr, v8::GCType::kGCTypeAll);
|
||||
isolate.add_gc_prologue_callback(
|
||||
callback2,
|
||||
state_ptr,
|
||||
v8::GCType::INCREMENTAL_MARKING | v8::GCType::PROCESS_WEAK_CALLBACKS,
|
||||
v8::GCType::kGCTypeIncrementalMarking
|
||||
| v8::GCType::kGCTypeProcessWeakCallbacks,
|
||||
);
|
||||
|
||||
{
|
||||
|
|
Loading…
Add table
Reference in a new issue