2021-02-13 07:31:18 -05:00
|
|
|
// Copyright 2019-2021 the Deno authors. All rights reserved. MIT license.
|
2020-12-31 16:06:29 +01:00
|
|
|
use crate::function::FunctionCallbackInfo;
|
2020-04-20 21:18:03 +02:00
|
|
|
use crate::isolate_create_params::raw;
|
|
|
|
use crate::isolate_create_params::CreateParams;
|
2019-12-19 14:13:33 +01:00
|
|
|
use crate::promise::PromiseRejectMessage;
|
2020-06-03 07:38:34 +02:00
|
|
|
use crate::scope::data::ScopeData;
|
2020-09-07 19:49:49 +02:00
|
|
|
use crate::support::BuildTypeIdHasher;
|
2021-02-08 13:11:48 +01:00
|
|
|
use crate::support::MapFnFrom;
|
|
|
|
use crate::support::MapFnTo;
|
2019-11-15 16:21:34 -08:00
|
|
|
use crate::support::Opaque;
|
2021-02-08 13:11:48 +01:00
|
|
|
use crate::support::ToCFn;
|
2020-12-31 16:06:29 +01:00
|
|
|
use crate::support::UnitType;
|
|
|
|
use crate::wasm::trampoline;
|
|
|
|
use crate::wasm::WasmStreaming;
|
2021-02-08 13:11:48 +01:00
|
|
|
use crate::Array;
|
|
|
|
use crate::CallbackScope;
|
2019-12-24 16:40:41 -05:00
|
|
|
use crate::Context;
|
2022-03-09 14:41:46 +01:00
|
|
|
use crate::Data;
|
2021-02-05 00:22:26 +01:00
|
|
|
use crate::FixedArray;
|
2020-01-02 10:15:31 -05:00
|
|
|
use crate::Function;
|
2021-02-08 13:11:48 +01:00
|
|
|
use crate::HandleScope;
|
2019-12-19 21:34:07 -05:00
|
|
|
use crate::Local;
|
2019-12-20 08:47:20 -05:00
|
|
|
use crate::Message;
|
2019-12-24 16:40:41 -05:00
|
|
|
use crate::Module;
|
|
|
|
use crate::Object;
|
2019-12-26 10:45:55 -05:00
|
|
|
use crate::Promise;
|
|
|
|
use crate::String;
|
2019-12-19 21:34:07 -05:00
|
|
|
use crate::Value;
|
2020-02-12 11:33:58 -05:00
|
|
|
|
2020-04-20 21:18:03 +02:00
|
|
|
use std::any::Any;
|
2020-04-23 09:34:28 +02:00
|
|
|
use std::any::TypeId;
|
2020-09-07 19:49:49 +02:00
|
|
|
|
2020-04-23 09:34:28 +02:00
|
|
|
use std::collections::HashMap;
|
2019-12-25 10:56:27 -05:00
|
|
|
use std::ffi::c_void;
|
2020-11-18 15:17:25 +01:00
|
|
|
use std::fmt::{self, Debug, Formatter};
|
2020-08-23 16:16:45 +02:00
|
|
|
use std::mem::MaybeUninit;
|
2019-12-20 08:47:20 -05:00
|
|
|
use std::ops::Deref;
|
|
|
|
use std::ops::DerefMut;
|
2021-01-16 17:30:38 +08:00
|
|
|
use std::os::raw::c_char;
|
2020-02-12 11:33:58 -05:00
|
|
|
use std::ptr::null_mut;
|
2019-12-20 08:47:20 -05:00
|
|
|
use std::ptr::NonNull;
|
2020-02-12 11:33:58 -05:00
|
|
|
use std::sync::Arc;
|
|
|
|
use std::sync::Mutex;
|
2019-12-19 21:34:07 -05:00
|
|
|
|
2020-08-23 16:25:16 +02:00
|
|
|
/// Policy for running microtasks:
|
|
|
|
/// - explicit: microtasks are invoked with the
|
|
|
|
/// Isolate::PerformMicrotaskCheckpoint() method;
|
|
|
|
/// - auto: microtasks are invoked when the script call depth decrements
|
|
|
|
/// to zero.
|
|
|
|
#[derive(Debug, Clone, Copy, PartialEq)]
|
|
|
|
#[repr(C)]
|
|
|
|
pub enum MicrotasksPolicy {
|
|
|
|
Explicit = 0,
|
|
|
|
// Scoped = 1 (RAII) is omitted for now, doesn't quite map to idiomatic Rust.
|
|
|
|
Auto = 2,
|
|
|
|
}
|
|
|
|
|
2020-10-12 22:33:46 +02:00
|
|
|
/// PromiseHook with type Init is called when a new promise is
|
|
|
|
/// created. When a new promise is created as part of the chain in the
|
|
|
|
/// case of Promise.then or in the intermediate promises created by
|
|
|
|
/// Promise.{race, all}/AsyncFunctionAwait, we pass the parent promise
|
|
|
|
/// otherwise we pass undefined.
|
|
|
|
///
|
|
|
|
/// PromiseHook with type Resolve is called at the beginning of
|
|
|
|
/// resolve or reject function defined by CreateResolvingFunctions.
|
|
|
|
///
|
|
|
|
/// PromiseHook with type Before is called at the beginning of the
|
|
|
|
/// PromiseReactionJob.
|
|
|
|
///
|
|
|
|
/// PromiseHook with type After is called right at the end of the
|
|
|
|
/// PromiseReactionJob.
|
2020-12-04 13:56:27 +01:00
|
|
|
#[derive(Debug, Clone, Copy, PartialEq)]
|
2020-10-12 22:33:46 +02:00
|
|
|
#[repr(C)]
|
|
|
|
pub enum PromiseHookType {
|
|
|
|
Init,
|
|
|
|
Resolve,
|
|
|
|
Before,
|
|
|
|
After,
|
|
|
|
}
|
|
|
|
|
2019-12-26 10:45:55 -05:00
|
|
|
pub type MessageCallback = extern "C" fn(Local<Message>, Local<Value>);
|
2019-11-15 16:21:34 -08:00
|
|
|
|
2020-10-12 22:33:46 +02:00
|
|
|
pub type PromiseHook =
|
|
|
|
extern "C" fn(PromiseHookType, Local<Promise>, Local<Value>);
|
|
|
|
|
2019-12-26 10:45:55 -05:00
|
|
|
pub type PromiseRejectCallback = extern "C" fn(PromiseRejectMessage);
|
2019-12-19 14:13:33 +01:00
|
|
|
|
2019-12-26 10:45:55 -05:00
|
|
|
/// HostInitializeImportMetaObjectCallback is called the first time import.meta
|
|
|
|
/// is accessed for a module. Subsequent access will reuse the same value.
|
|
|
|
///
|
|
|
|
/// The method combines two implementation-defined abstract operations into one:
|
|
|
|
/// HostGetImportMetaProperties and HostFinalizeImportMeta.
|
|
|
|
///
|
|
|
|
/// The embedder should use v8::Object::CreateDataProperty to add properties on
|
|
|
|
/// the meta object.
|
|
|
|
pub type HostInitializeImportMetaObjectCallback =
|
2019-12-24 16:40:41 -05:00
|
|
|
extern "C" fn(Local<Context>, Local<Module>, Local<Object>);
|
|
|
|
|
2022-03-09 14:41:46 +01:00
|
|
|
/// HostImportModuleDynamicallyCallback is called when we require the
|
2019-12-26 10:45:55 -05:00
|
|
|
/// embedder to load a module. This is used as part of the dynamic
|
|
|
|
/// import syntax.
|
|
|
|
///
|
|
|
|
/// The referrer contains metadata about the script/module that calls
|
|
|
|
/// import.
|
|
|
|
///
|
|
|
|
/// The specifier is the name of the module that should be imported.
|
|
|
|
///
|
|
|
|
/// The embedder must compile, instantiate, evaluate the Module, and
|
|
|
|
/// obtain it's namespace object.
|
|
|
|
///
|
|
|
|
/// The Promise returned from this function is forwarded to userland
|
|
|
|
/// JavaScript. The embedder must resolve this promise with the module
|
|
|
|
/// namespace object. In case of an exception, the embedder must reject
|
|
|
|
/// this promise with the exception. If the promise creation itself
|
|
|
|
/// fails (e.g. due to stack overflow), the embedder must propagate
|
|
|
|
/// that exception by returning an empty MaybeLocal.
|
2022-03-09 14:41:46 +01:00
|
|
|
pub type HostImportModuleDynamicallyCallback = extern "C" fn(
|
|
|
|
Local<Context>,
|
|
|
|
Local<Data>,
|
|
|
|
Local<Value>,
|
|
|
|
Local<String>,
|
|
|
|
Local<FixedArray>,
|
|
|
|
) -> *mut Promise;
|
2019-12-26 10:45:55 -05:00
|
|
|
|
2020-01-15 15:33:47 -05:00
|
|
|
pub type InterruptCallback =
|
|
|
|
extern "C" fn(isolate: &mut Isolate, data: *mut c_void);
|
|
|
|
|
2020-07-30 18:40:18 +02:00
|
|
|
pub type NearHeapLimitCallback = extern "C" fn(
|
|
|
|
data: *mut c_void,
|
|
|
|
current_heap_limit: usize,
|
|
|
|
initial_heap_limit: usize,
|
|
|
|
) -> usize;
|
|
|
|
|
2021-03-28 04:16:35 +09:00
|
|
|
pub type OomErrorCallback =
|
2021-01-16 17:30:38 +08:00
|
|
|
extern "C" fn(location: *const c_char, is_heap_oom: bool);
|
|
|
|
|
2020-08-23 16:16:45 +02:00
|
|
|
/// Collection of V8 heap information.
|
|
|
|
///
|
|
|
|
/// Instances of this class can be passed to v8::Isolate::GetHeapStatistics to
|
|
|
|
/// get heap statistics from V8.
|
|
|
|
// Must be >= sizeof(v8::HeapStatistics), see v8__HeapStatistics__CONSTRUCT().
|
|
|
|
#[repr(C)]
|
2020-11-18 15:17:25 +01:00
|
|
|
#[derive(Debug)]
|
2020-08-23 16:16:45 +02:00
|
|
|
pub struct HeapStatistics([usize; 16]);
|
|
|
|
|
2021-02-08 13:11:48 +01:00
|
|
|
// Windows x64 ABI: MaybeLocal<Value> returned on the stack.
|
|
|
|
#[cfg(target_os = "windows")]
|
|
|
|
pub type PrepareStackTraceCallback<'s> = extern "C" fn(
|
|
|
|
*mut *const Value,
|
|
|
|
Local<'s, Context>,
|
|
|
|
Local<'s, Value>,
|
|
|
|
Local<'s, Array>,
|
|
|
|
) -> *mut *const Value;
|
|
|
|
|
|
|
|
// System V ABI: MaybeLocal<Value> returned in a register.
|
|
|
|
#[cfg(not(target_os = "windows"))]
|
|
|
|
pub type PrepareStackTraceCallback<'s> = extern "C" fn(
|
|
|
|
Local<'s, Context>,
|
|
|
|
Local<'s, Value>,
|
|
|
|
Local<'s, Array>,
|
|
|
|
) -> *const Value;
|
|
|
|
|
2019-11-15 16:21:34 -08:00
|
|
|
extern "C" {
|
2020-04-20 21:18:03 +02:00
|
|
|
fn v8__Isolate__New(params: *const raw::CreateParams) -> *mut Isolate;
|
2019-12-19 19:15:52 -05:00
|
|
|
fn v8__Isolate__Dispose(this: *mut Isolate);
|
2019-12-25 10:56:27 -05:00
|
|
|
fn v8__Isolate__SetData(this: *mut Isolate, slot: u32, data: *mut c_void);
|
|
|
|
fn v8__Isolate__GetData(this: *const Isolate, slot: u32) -> *mut c_void;
|
|
|
|
fn v8__Isolate__GetNumberOfDataSlots(this: *const Isolate) -> u32;
|
2019-12-19 19:15:52 -05:00
|
|
|
fn v8__Isolate__Enter(this: *mut Isolate);
|
|
|
|
fn v8__Isolate__Exit(this: *mut Isolate);
|
2020-10-21 08:00:44 +02:00
|
|
|
fn v8__Isolate__ClearKeptObjects(isolate: *mut Isolate);
|
2020-09-07 12:00:35 +01:00
|
|
|
fn v8__Isolate__LowMemoryNotification(isolate: *mut Isolate);
|
2020-08-23 16:16:45 +02:00
|
|
|
fn v8__Isolate__GetHeapStatistics(this: *mut Isolate, s: *mut HeapStatistics);
|
2019-12-18 17:17:38 -05:00
|
|
|
fn v8__Isolate__SetCaptureStackTraceForUncaughtExceptions(
|
2019-12-19 19:15:52 -05:00
|
|
|
this: *mut Isolate,
|
2019-12-18 17:17:38 -05:00
|
|
|
caputre: bool,
|
|
|
|
frame_limit: i32,
|
|
|
|
);
|
2019-12-19 21:34:07 -05:00
|
|
|
fn v8__Isolate__AddMessageListener(
|
2020-04-13 14:43:56 +02:00
|
|
|
isolate: *mut Isolate,
|
2019-12-19 21:34:07 -05:00
|
|
|
callback: MessageCallback,
|
|
|
|
) -> bool;
|
2020-07-30 18:40:18 +02:00
|
|
|
fn v8__Isolate__AddNearHeapLimitCallback(
|
|
|
|
isolate: *mut Isolate,
|
|
|
|
callback: NearHeapLimitCallback,
|
|
|
|
data: *mut c_void,
|
|
|
|
);
|
|
|
|
fn v8__Isolate__RemoveNearHeapLimitCallback(
|
|
|
|
isolate: *mut Isolate,
|
|
|
|
callback: NearHeapLimitCallback,
|
|
|
|
heap_limit: usize,
|
|
|
|
);
|
2021-01-16 17:30:38 +08:00
|
|
|
fn v8__Isolate__SetOOMErrorHandler(
|
|
|
|
isolate: *mut Isolate,
|
2021-03-28 04:16:35 +09:00
|
|
|
callback: OomErrorCallback,
|
2021-01-16 17:30:38 +08:00
|
|
|
);
|
2022-01-23 22:49:29 +05:30
|
|
|
fn v8__Isolate__AdjustAmountOfExternalAllocatedMemory(
|
|
|
|
isolate: *mut Isolate,
|
|
|
|
change_in_bytes: i64,
|
|
|
|
) -> i64;
|
2021-02-08 13:11:48 +01:00
|
|
|
fn v8__Isolate__SetPrepareStackTraceCallback(
|
|
|
|
isolate: *mut Isolate,
|
|
|
|
callback: PrepareStackTraceCallback,
|
|
|
|
);
|
2020-10-12 22:33:46 +02:00
|
|
|
fn v8__Isolate__SetPromiseHook(isolate: *mut Isolate, hook: PromiseHook);
|
2019-12-19 14:13:33 +01:00
|
|
|
fn v8__Isolate__SetPromiseRejectCallback(
|
2019-12-19 19:15:52 -05:00
|
|
|
isolate: *mut Isolate,
|
2019-12-19 14:13:33 +01:00
|
|
|
callback: PromiseRejectCallback,
|
2019-12-19 19:15:52 -05:00
|
|
|
);
|
2019-12-24 16:40:41 -05:00
|
|
|
fn v8__Isolate__SetHostInitializeImportMetaObjectCallback(
|
|
|
|
isolate: *mut Isolate,
|
|
|
|
callback: HostInitializeImportMetaObjectCallback,
|
|
|
|
);
|
2019-12-26 10:45:55 -05:00
|
|
|
fn v8__Isolate__SetHostImportModuleDynamicallyCallback(
|
|
|
|
isolate: *mut Isolate,
|
2022-03-09 14:41:46 +01:00
|
|
|
callback: HostImportModuleDynamicallyCallback,
|
2019-12-26 10:45:55 -05:00
|
|
|
);
|
2020-01-15 15:33:47 -05:00
|
|
|
fn v8__Isolate__RequestInterrupt(
|
|
|
|
isolate: *const Isolate,
|
|
|
|
callback: InterruptCallback,
|
|
|
|
data: *mut c_void,
|
|
|
|
);
|
2020-01-17 15:17:48 +01:00
|
|
|
fn v8__Isolate__TerminateExecution(isolate: *const Isolate);
|
|
|
|
fn v8__Isolate__IsExecutionTerminating(isolate: *const Isolate) -> bool;
|
|
|
|
fn v8__Isolate__CancelTerminateExecution(isolate: *const Isolate);
|
2020-08-23 16:25:16 +02:00
|
|
|
fn v8__Isolate__GetMicrotasksPolicy(
|
|
|
|
isolate: *const Isolate,
|
|
|
|
) -> MicrotasksPolicy;
|
|
|
|
fn v8__Isolate__SetMicrotasksPolicy(
|
|
|
|
isolate: *mut Isolate,
|
|
|
|
policy: MicrotasksPolicy,
|
|
|
|
);
|
|
|
|
fn v8__Isolate__PerformMicrotaskCheckpoint(isolate: *mut Isolate);
|
2020-01-17 15:17:48 +01:00
|
|
|
fn v8__Isolate__EnqueueMicrotask(
|
|
|
|
isolate: *mut Isolate,
|
2020-04-13 14:43:56 +02:00
|
|
|
function: *const Function,
|
2020-01-17 15:17:48 +01:00
|
|
|
);
|
2020-10-15 22:12:44 +02:00
|
|
|
fn v8__Isolate__SetAllowAtomicsWait(isolate: *mut Isolate, allow: bool);
|
2020-12-31 16:06:29 +01:00
|
|
|
fn v8__Isolate__SetWasmStreamingCallback(
|
|
|
|
isolate: *mut Isolate,
|
|
|
|
callback: extern "C" fn(*const FunctionCallbackInfo),
|
|
|
|
);
|
2021-07-02 20:17:48 +02:00
|
|
|
fn v8__Isolate__HasPendingBackgroundTasks(isolate: *const Isolate) -> bool;
|
2020-12-31 16:06:29 +01:00
|
|
|
|
2020-03-09 18:30:25 +01:00
|
|
|
fn v8__HeapProfiler__TakeHeapSnapshot(
|
|
|
|
isolate: *mut Isolate,
|
|
|
|
callback: extern "C" fn(*mut c_void, *const u8, usize) -> bool,
|
|
|
|
arg: *mut c_void,
|
|
|
|
);
|
2020-08-23 16:16:45 +02:00
|
|
|
|
|
|
|
fn v8__HeapStatistics__CONSTRUCT(s: *mut MaybeUninit<HeapStatistics>);
|
|
|
|
fn v8__HeapStatistics__total_heap_size(s: *const HeapStatistics) -> usize;
|
|
|
|
fn v8__HeapStatistics__total_heap_size_executable(
|
|
|
|
s: *const HeapStatistics,
|
|
|
|
) -> usize;
|
|
|
|
fn v8__HeapStatistics__total_physical_size(s: *const HeapStatistics)
|
|
|
|
-> usize;
|
|
|
|
fn v8__HeapStatistics__total_available_size(
|
|
|
|
s: *const HeapStatistics,
|
|
|
|
) -> usize;
|
|
|
|
fn v8__HeapStatistics__total_global_handles_size(
|
|
|
|
s: *const HeapStatistics,
|
|
|
|
) -> usize;
|
|
|
|
fn v8__HeapStatistics__used_global_handles_size(
|
|
|
|
s: *const HeapStatistics,
|
|
|
|
) -> usize;
|
|
|
|
fn v8__HeapStatistics__used_heap_size(s: *const HeapStatistics) -> usize;
|
|
|
|
fn v8__HeapStatistics__heap_size_limit(s: *const HeapStatistics) -> usize;
|
|
|
|
fn v8__HeapStatistics__malloced_memory(s: *const HeapStatistics) -> usize;
|
|
|
|
fn v8__HeapStatistics__external_memory(s: *const HeapStatistics) -> usize;
|
|
|
|
fn v8__HeapStatistics__peak_malloced_memory(
|
|
|
|
s: *const HeapStatistics,
|
|
|
|
) -> usize;
|
|
|
|
fn v8__HeapStatistics__number_of_native_contexts(
|
|
|
|
s: *const HeapStatistics,
|
|
|
|
) -> usize;
|
|
|
|
fn v8__HeapStatistics__number_of_detached_contexts(
|
|
|
|
s: *const HeapStatistics,
|
|
|
|
) -> usize;
|
|
|
|
fn v8__HeapStatistics__does_zap_garbage(s: *const HeapStatistics) -> usize;
|
2019-11-15 16:21:34 -08:00
|
|
|
}
|
|
|
|
|
2019-12-20 08:47:20 -05:00
|
|
|
/// Isolate represents an isolated instance of the V8 engine. V8 isolates have
|
|
|
|
/// completely separate states. Objects from one isolate must not be used in
|
|
|
|
/// other isolates. The embedder can create multiple isolates and use them in
|
|
|
|
/// parallel in multiple threads. An isolate can be entered by at most one
|
|
|
|
/// thread at any given time. The Locker/Unlocker API must be used to
|
|
|
|
/// synchronize.
|
2021-02-19 17:01:18 -05:00
|
|
|
///
|
|
|
|
/// rusty_v8 note: Unlike in the C++ API, the Isolate is entered when it is
|
|
|
|
/// constructed and exited when dropped.
|
2020-11-18 15:17:25 +01:00
|
|
|
#[repr(C)]
|
|
|
|
#[derive(Debug)]
|
2019-12-19 19:15:52 -05:00
|
|
|
pub struct Isolate(Opaque);
|
2019-11-27 07:14:39 -08:00
|
|
|
|
2019-11-15 16:21:34 -08:00
|
|
|
impl Isolate {
|
2020-06-03 07:38:34 +02:00
|
|
|
const ANNEX_SLOT: u32 = 0;
|
|
|
|
const CURRENT_SCOPE_DATA_SLOT: u32 = 1;
|
|
|
|
const INTERNAL_SLOT_COUNT: u32 = 2;
|
|
|
|
|
2019-12-04 00:57:06 -05:00
|
|
|
/// Creates a new isolate. Does not change the currently entered
|
|
|
|
/// isolate.
|
|
|
|
///
|
|
|
|
/// When an isolate is no longer used its resources should be freed
|
|
|
|
/// by calling V8::dispose(). Using the delete operator is not allowed.
|
|
|
|
///
|
|
|
|
/// V8::initialize() must have run prior to this.
|
2019-12-19 19:15:52 -05:00
|
|
|
#[allow(clippy::new_ret_no_self)]
|
2020-04-20 21:18:03 +02:00
|
|
|
pub fn new(params: CreateParams) -> OwnedIsolate {
|
2019-11-30 09:16:25 -08:00
|
|
|
crate::V8::assert_initialized();
|
2020-04-20 21:18:03 +02:00
|
|
|
let (raw_create_params, create_param_allocations) = params.finalize();
|
|
|
|
let cxx_isolate = unsafe { v8__Isolate__New(&raw_create_params) };
|
2020-04-23 20:14:53 +02:00
|
|
|
let mut owned_isolate = OwnedIsolate::new(cxx_isolate);
|
2020-06-03 07:38:34 +02:00
|
|
|
ScopeData::new_root(&mut owned_isolate);
|
2020-04-23 20:14:53 +02:00
|
|
|
owned_isolate.create_annex(create_param_allocations);
|
2021-02-19 17:01:18 -05:00
|
|
|
unsafe {
|
|
|
|
owned_isolate.enter();
|
|
|
|
}
|
2020-04-23 09:34:28 +02:00
|
|
|
owned_isolate
|
2019-11-15 16:21:34 -08:00
|
|
|
}
|
2019-12-04 00:57:06 -05:00
|
|
|
|
|
|
|
/// Initial configuration parameters for a new Isolate.
|
2020-04-20 21:18:03 +02:00
|
|
|
pub fn create_params() -> CreateParams {
|
|
|
|
CreateParams::default()
|
2019-12-04 00:57:06 -05:00
|
|
|
}
|
2019-12-09 02:26:58 +01:00
|
|
|
|
2020-12-31 08:30:30 +01:00
|
|
|
pub fn thread_safe_handle(&self) -> IsolateHandle {
|
2020-02-12 11:33:58 -05:00
|
|
|
IsolateHandle::new(self)
|
|
|
|
}
|
|
|
|
|
2020-12-25 22:45:57 +01:00
|
|
|
/// See [`IsolateHandle::terminate_execution`]
|
2020-12-31 08:30:30 +01:00
|
|
|
pub fn terminate_execution(&self) -> bool {
|
2020-12-25 22:45:57 +01:00
|
|
|
self.thread_safe_handle().terminate_execution()
|
|
|
|
}
|
|
|
|
|
|
|
|
/// See [`IsolateHandle::cancel_terminate_execution`]
|
2020-12-31 08:30:30 +01:00
|
|
|
pub fn cancel_terminate_execution(&self) -> bool {
|
2020-12-25 22:45:57 +01:00
|
|
|
self.thread_safe_handle().cancel_terminate_execution()
|
|
|
|
}
|
|
|
|
|
|
|
|
/// See [`IsolateHandle::is_execution_terminating`]
|
2020-12-31 08:30:30 +01:00
|
|
|
pub fn is_execution_terminating(&self) -> bool {
|
2020-12-25 22:45:57 +01:00
|
|
|
self.thread_safe_handle().is_execution_terminating()
|
|
|
|
}
|
|
|
|
|
2020-05-06 19:26:23 +02:00
|
|
|
pub(crate) fn create_annex(
|
|
|
|
&mut self,
|
|
|
|
create_param_allocations: Box<dyn Any>,
|
|
|
|
) {
|
2020-04-23 20:14:53 +02:00
|
|
|
let annex_arc = Arc::new(IsolateAnnex::new(self, create_param_allocations));
|
2020-04-23 09:34:28 +02:00
|
|
|
let annex_ptr = Arc::into_raw(annex_arc);
|
2020-06-03 07:38:34 +02:00
|
|
|
unsafe {
|
|
|
|
assert!(v8__Isolate__GetData(self, Self::ANNEX_SLOT).is_null());
|
|
|
|
v8__Isolate__SetData(self, Self::ANNEX_SLOT, annex_ptr as *mut c_void);
|
|
|
|
};
|
2020-02-13 15:03:25 -05:00
|
|
|
}
|
|
|
|
|
2020-04-23 09:34:28 +02:00
|
|
|
fn get_annex(&self) -> &IsolateAnnex {
|
|
|
|
unsafe {
|
2020-06-03 07:38:34 +02:00
|
|
|
&*(v8__Isolate__GetData(self, Self::ANNEX_SLOT) as *const _
|
|
|
|
as *const IsolateAnnex)
|
2020-04-23 09:34:28 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn get_annex_mut(&mut self) -> &mut IsolateAnnex {
|
2020-06-03 07:38:34 +02:00
|
|
|
unsafe {
|
|
|
|
&mut *(v8__Isolate__GetData(self, Self::ANNEX_SLOT) as *mut IsolateAnnex)
|
|
|
|
}
|
2020-04-23 09:34:28 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
fn get_annex_arc(&self) -> Arc<IsolateAnnex> {
|
|
|
|
let annex_ptr = self.get_annex();
|
|
|
|
let annex_arc = unsafe { Arc::from_raw(annex_ptr) };
|
2022-03-02 17:30:58 +05:30
|
|
|
let _ = Arc::into_raw(annex_arc.clone());
|
2020-04-23 09:34:28 +02:00
|
|
|
annex_arc
|
2020-02-13 15:03:25 -05:00
|
|
|
}
|
|
|
|
|
2020-06-03 07:38:34 +02:00
|
|
|
/// Associate embedder-specific data with the isolate. `slot` has to be
|
|
|
|
/// between 0 and `Isolate::get_number_of_data_slots()`.
|
2020-05-29 18:42:54 -04:00
|
|
|
unsafe fn set_data(&mut self, slot: u32, ptr: *mut c_void) {
|
2020-06-03 07:38:34 +02:00
|
|
|
v8__Isolate__SetData(self, slot + Self::INTERNAL_SLOT_COUNT, ptr)
|
2019-12-25 10:56:27 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Retrieve embedder-specific data from the isolate.
|
2020-06-03 07:38:34 +02:00
|
|
|
/// Returns NULL if SetData has never been called for the given `slot`.
|
2021-10-28 16:29:04 -04:00
|
|
|
#[allow(dead_code)]
|
2020-05-29 18:42:54 -04:00
|
|
|
fn get_data(&self, slot: u32) -> *mut c_void {
|
2020-06-03 07:38:34 +02:00
|
|
|
unsafe { v8__Isolate__GetData(self, slot + Self::INTERNAL_SLOT_COUNT) }
|
2019-12-25 10:56:27 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns the maximum number of available embedder data slots. Valid slots
|
2020-06-03 07:38:34 +02:00
|
|
|
/// are in the range of 0 - `Isolate::get_number_of_data_slots() - 1`.
|
2021-10-28 16:29:04 -04:00
|
|
|
#[allow(dead_code)]
|
2020-05-29 18:42:54 -04:00
|
|
|
fn get_number_of_data_slots(&self) -> u32 {
|
2020-06-03 07:38:34 +02:00
|
|
|
unsafe {
|
|
|
|
v8__Isolate__GetNumberOfDataSlots(self) - Self::INTERNAL_SLOT_COUNT
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns a pointer to the `ScopeData` struct for the current scope.
|
|
|
|
pub(crate) fn get_current_scope_data(&self) -> Option<NonNull<ScopeData>> {
|
|
|
|
let scope_data_ptr =
|
|
|
|
unsafe { v8__Isolate__GetData(self, Self::CURRENT_SCOPE_DATA_SLOT) };
|
|
|
|
NonNull::new(scope_data_ptr).map(NonNull::cast)
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Updates the slot that stores a `ScopeData` pointer for the current scope.
|
|
|
|
pub(crate) fn set_current_scope_data(
|
|
|
|
&mut self,
|
|
|
|
scope_data: Option<NonNull<ScopeData>>,
|
|
|
|
) {
|
|
|
|
let scope_data_ptr = scope_data
|
|
|
|
.map(NonNull::cast)
|
|
|
|
.map(NonNull::as_ptr)
|
|
|
|
.unwrap_or_else(null_mut);
|
|
|
|
unsafe {
|
|
|
|
v8__Isolate__SetData(self, Self::CURRENT_SCOPE_DATA_SLOT, scope_data_ptr)
|
|
|
|
};
|
2019-12-25 10:56:27 -05:00
|
|
|
}
|
|
|
|
|
2020-09-07 19:49:49 +02:00
|
|
|
/// Get a reference to embedder data added with `set_slot()`.
|
|
|
|
pub fn get_slot<T: 'static>(&self) -> Option<&T> {
|
|
|
|
let b = self.get_annex().slots.get(&TypeId::of::<T>())?;
|
2021-04-29 15:12:54 -05:00
|
|
|
let r = <dyn Any>::downcast_ref::<T>(&**b).unwrap();
|
2020-09-07 19:49:49 +02:00
|
|
|
Some(r)
|
2020-04-23 09:34:28 +02:00
|
|
|
}
|
|
|
|
|
2020-09-07 19:49:49 +02:00
|
|
|
/// Get a mutable reference to embedder data added with `set_slot()`.
|
|
|
|
pub fn get_slot_mut<T: 'static>(&mut self) -> Option<&mut T> {
|
|
|
|
let b = self.get_annex_mut().slots.get_mut(&TypeId::of::<T>())?;
|
2021-04-29 15:12:54 -05:00
|
|
|
let r = <dyn Any>::downcast_mut::<T>(&mut **b).unwrap();
|
2020-09-07 19:49:49 +02:00
|
|
|
Some(r)
|
2020-04-23 09:34:28 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Use with Isolate::get_slot and Isolate::get_slot_mut to associate state
|
|
|
|
/// with an Isolate.
|
|
|
|
///
|
|
|
|
/// This method gives ownership of value to the Isolate. Exactly one object of
|
|
|
|
/// each type can be associated with an Isolate. If called more than once with
|
|
|
|
/// an object of the same type, the earlier version will be dropped and
|
|
|
|
/// replaced.
|
|
|
|
///
|
|
|
|
/// Returns true if value was set without replacing an existing value.
|
|
|
|
///
|
|
|
|
/// The value will be dropped when the isolate is dropped.
|
|
|
|
pub fn set_slot<T: 'static>(&mut self, value: T) -> bool {
|
|
|
|
self
|
|
|
|
.get_annex_mut()
|
|
|
|
.slots
|
2020-09-07 19:49:49 +02:00
|
|
|
.insert(Any::type_id(&value), Box::new(value))
|
2020-04-23 09:34:28 +02:00
|
|
|
.is_none()
|
|
|
|
}
|
|
|
|
|
2021-09-09 01:41:50 +03:00
|
|
|
/// Removes the embedder data added with `set_slot()` and returns it if it exists.
|
|
|
|
pub fn remove_slot<T: 'static>(&mut self) -> Option<T> {
|
|
|
|
let b = self.get_annex_mut().slots.remove(&TypeId::of::<T>())?;
|
|
|
|
let v: T = *b.downcast::<T>().unwrap();
|
|
|
|
Some(v)
|
|
|
|
}
|
|
|
|
|
2019-12-09 02:26:58 +01:00
|
|
|
/// Sets this isolate as the entered one for the current thread.
|
|
|
|
/// Saves the previously entered one (if any), so that it can be
|
|
|
|
/// restored when exiting. Re-entering an isolate is allowed.
|
2021-02-19 17:01:18 -05:00
|
|
|
///
|
|
|
|
/// rusty_v8 note: Unlike in the C++ API, the isolate is entered when it is
|
|
|
|
/// constructed and exited when dropped.
|
|
|
|
pub unsafe fn enter(&mut self) {
|
|
|
|
v8__Isolate__Enter(self)
|
2019-12-09 02:26:58 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Exits this isolate by restoring the previously entered one in the
|
|
|
|
/// current thread. The isolate may still stay the same, if it was
|
|
|
|
/// entered more than once.
|
|
|
|
///
|
|
|
|
/// Requires: self == Isolate::GetCurrent().
|
2021-02-19 17:01:18 -05:00
|
|
|
///
|
|
|
|
/// rusty_v8 note: Unlike in the C++ API, the isolate is entered when it is
|
|
|
|
/// constructed and exited when dropped.
|
|
|
|
pub unsafe fn exit(&mut self) {
|
|
|
|
v8__Isolate__Exit(self)
|
2019-12-09 02:26:58 +01:00
|
|
|
}
|
2019-12-18 17:17:38 -05:00
|
|
|
|
2020-10-21 08:00:44 +02:00
|
|
|
/// Clears the set of objects held strongly by the heap. This set of
|
|
|
|
/// objects are originally built when a WeakRef is created or
|
|
|
|
/// successfully dereferenced.
|
|
|
|
///
|
|
|
|
/// This is invoked automatically after microtasks are run. See
|
|
|
|
/// MicrotasksPolicy for when microtasks are run.
|
|
|
|
///
|
|
|
|
/// This needs to be manually invoked only if the embedder is manually
|
|
|
|
/// running microtasks via a custom MicrotaskQueue class's PerformCheckpoint.
|
|
|
|
/// In that case, it is the embedder's responsibility to make this call at a
|
|
|
|
/// time which does not interrupt synchronous ECMAScript code execution.
|
|
|
|
pub fn clear_kept_objects(&mut self) {
|
|
|
|
unsafe { v8__Isolate__ClearKeptObjects(self) }
|
|
|
|
}
|
|
|
|
|
2020-09-07 12:00:35 +01:00
|
|
|
/// Optional notification that the system is running low on memory.
|
|
|
|
/// V8 uses these notifications to attempt to free memory.
|
|
|
|
pub fn low_memory_notification(&mut self) {
|
|
|
|
unsafe { v8__Isolate__LowMemoryNotification(self) }
|
|
|
|
}
|
|
|
|
|
2020-08-23 16:16:45 +02:00
|
|
|
/// Get statistics about the heap memory usage.
|
|
|
|
pub fn get_heap_statistics(&mut self, s: &mut HeapStatistics) {
|
|
|
|
unsafe { v8__Isolate__GetHeapStatistics(self, s) }
|
|
|
|
}
|
|
|
|
|
2019-12-18 17:17:38 -05:00
|
|
|
/// Tells V8 to capture current stack trace when uncaught exception occurs
|
|
|
|
/// and report it to the message listeners. The option is off by default.
|
|
|
|
pub fn set_capture_stack_trace_for_uncaught_exceptions(
|
|
|
|
&mut self,
|
|
|
|
capture: bool,
|
|
|
|
frame_limit: i32,
|
|
|
|
) {
|
|
|
|
unsafe {
|
|
|
|
v8__Isolate__SetCaptureStackTraceForUncaughtExceptions(
|
2019-12-19 19:15:52 -05:00
|
|
|
self,
|
2019-12-18 17:17:38 -05:00
|
|
|
capture,
|
|
|
|
frame_limit,
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}
|
2019-12-19 14:13:33 +01:00
|
|
|
|
2019-12-19 21:34:07 -05:00
|
|
|
/// Adds a message listener (errors only).
|
|
|
|
///
|
|
|
|
/// The same message listener can be added more than once and in that
|
|
|
|
/// case it will be called more than once for each message.
|
|
|
|
///
|
|
|
|
/// The exception object will be passed to the callback.
|
|
|
|
pub fn add_message_listener(&mut self, callback: MessageCallback) -> bool {
|
|
|
|
unsafe { v8__Isolate__AddMessageListener(self, callback) }
|
|
|
|
}
|
|
|
|
|
2021-02-08 13:11:48 +01:00
|
|
|
/// This specifies the callback called when the stack property of Error
|
|
|
|
/// is accessed.
|
|
|
|
///
|
|
|
|
/// PrepareStackTraceCallback is called when the stack property of an error is
|
|
|
|
/// first accessed. The return value will be used as the stack value. If this
|
|
|
|
/// callback is registed, the |Error.prepareStackTrace| API will be disabled.
|
|
|
|
/// |sites| is an array of call sites, specified in
|
|
|
|
/// https://v8.dev/docs/stack-trace-api
|
|
|
|
pub fn set_prepare_stack_trace_callback<'s>(
|
|
|
|
&mut self,
|
|
|
|
callback: impl MapFnTo<PrepareStackTraceCallback<'s>>,
|
|
|
|
) {
|
|
|
|
// Note: the C++ API returns a MaybeLocal but V8 asserts at runtime when
|
|
|
|
// it's empty. That is, you can't return None and that's why the Rust API
|
|
|
|
// expects Local<Value> instead of Option<Local<Value>>.
|
|
|
|
unsafe {
|
|
|
|
v8__Isolate__SetPrepareStackTraceCallback(self, callback.map_fn_to())
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2020-10-12 22:33:46 +02:00
|
|
|
/// Set the PromiseHook callback for various promise lifecycle
|
|
|
|
/// events.
|
|
|
|
pub fn set_promise_hook(&mut self, hook: PromiseHook) {
|
|
|
|
unsafe { v8__Isolate__SetPromiseHook(self, hook) }
|
|
|
|
}
|
|
|
|
|
2019-12-19 14:13:33 +01:00
|
|
|
/// Set callback to notify about promise reject with no handler, or
|
|
|
|
/// revocation of such a previous notification once the handler is added.
|
|
|
|
pub fn set_promise_reject_callback(
|
|
|
|
&mut self,
|
|
|
|
callback: PromiseRejectCallback,
|
|
|
|
) {
|
2019-12-19 19:15:52 -05:00
|
|
|
unsafe { v8__Isolate__SetPromiseRejectCallback(self, callback) }
|
|
|
|
}
|
2019-12-24 16:40:41 -05:00
|
|
|
/// This specifies the callback called by the upcoming importa.meta
|
|
|
|
/// language feature to retrieve host-defined meta data for a module.
|
|
|
|
pub fn set_host_initialize_import_meta_object_callback(
|
|
|
|
&mut self,
|
|
|
|
callback: HostInitializeImportMetaObjectCallback,
|
|
|
|
) {
|
|
|
|
unsafe {
|
|
|
|
v8__Isolate__SetHostInitializeImportMetaObjectCallback(self, callback)
|
|
|
|
}
|
|
|
|
}
|
2019-12-19 19:15:52 -05:00
|
|
|
|
2019-12-26 10:45:55 -05:00
|
|
|
/// This specifies the callback called by the upcoming dynamic
|
|
|
|
/// import() language feature to load modules.
|
|
|
|
pub fn set_host_import_module_dynamically_callback(
|
|
|
|
&mut self,
|
2022-03-09 14:41:46 +01:00
|
|
|
callback: HostImportModuleDynamicallyCallback,
|
2019-12-26 10:45:55 -05:00
|
|
|
) {
|
|
|
|
unsafe {
|
|
|
|
v8__Isolate__SetHostImportModuleDynamicallyCallback(self, callback)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-07-30 18:40:18 +02:00
|
|
|
/// Add a callback to invoke in case the heap size is close to the heap limit.
|
|
|
|
/// If multiple callbacks are added, only the most recently added callback is
|
|
|
|
/// invoked.
|
|
|
|
#[allow(clippy::not_unsafe_ptr_arg_deref)] // False positive.
|
|
|
|
pub fn add_near_heap_limit_callback(
|
|
|
|
&mut self,
|
|
|
|
callback: NearHeapLimitCallback,
|
|
|
|
data: *mut c_void,
|
|
|
|
) {
|
|
|
|
unsafe { v8__Isolate__AddNearHeapLimitCallback(self, callback, data) };
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Remove the given callback and restore the heap limit to the given limit.
|
|
|
|
/// If the given limit is zero, then it is ignored. If the current heap size
|
|
|
|
/// is greater than the given limit, then the heap limit is restored to the
|
|
|
|
/// minimal limit that is possible for the current heap size.
|
|
|
|
pub fn remove_near_heap_limit_callback(
|
|
|
|
&mut self,
|
|
|
|
callback: NearHeapLimitCallback,
|
|
|
|
heap_limit: usize,
|
|
|
|
) {
|
|
|
|
unsafe {
|
|
|
|
v8__Isolate__RemoveNearHeapLimitCallback(self, callback, heap_limit)
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2022-01-23 22:49:29 +05:30
|
|
|
/// Adjusts the amount of registered external memory. Used to give V8 an
|
|
|
|
/// indication of the amount of externally allocated memory that is kept
|
|
|
|
/// alive by JavaScript objects. V8 uses this to decide when to perform
|
|
|
|
/// global garbage collections. Registering externally allocated memory
|
|
|
|
/// will trigger global garbage collections more often than it would
|
|
|
|
/// otherwise in an attempt to garbage collect the JavaScript objects
|
|
|
|
/// that keep the externally allocated memory alive.
|
|
|
|
pub fn adjust_amount_of_external_allocated_memory(
|
|
|
|
&mut self,
|
|
|
|
change_in_bytes: i64,
|
|
|
|
) -> i64 {
|
|
|
|
unsafe {
|
|
|
|
v8__Isolate__AdjustAmountOfExternalAllocatedMemory(self, change_in_bytes)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-03-28 04:16:35 +09:00
|
|
|
pub fn set_oom_error_handler(&mut self, callback: OomErrorCallback) {
|
2021-01-16 17:30:38 +08:00
|
|
|
unsafe { v8__Isolate__SetOOMErrorHandler(self, callback) };
|
|
|
|
}
|
|
|
|
|
2020-08-23 16:25:16 +02:00
|
|
|
/// Returns the policy controlling how Microtasks are invoked.
|
|
|
|
pub fn get_microtasks_policy(&self) -> MicrotasksPolicy {
|
|
|
|
unsafe { v8__Isolate__GetMicrotasksPolicy(self) }
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns the policy controlling how Microtasks are invoked.
|
|
|
|
pub fn set_microtasks_policy(&mut self, policy: MicrotasksPolicy) {
|
|
|
|
unsafe { v8__Isolate__SetMicrotasksPolicy(self, policy) }
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Runs the default MicrotaskQueue until it gets empty and perform other
|
|
|
|
/// microtask checkpoint steps, such as calling ClearKeptObjects. Asserts that
|
|
|
|
/// the MicrotasksPolicy is not kScoped. Any exceptions thrown by microtask
|
|
|
|
/// callbacks are swallowed.
|
|
|
|
pub fn perform_microtask_checkpoint(&mut self) {
|
|
|
|
unsafe { v8__Isolate__PerformMicrotaskCheckpoint(self) }
|
|
|
|
}
|
|
|
|
|
|
|
|
/// An alias for PerformMicrotaskCheckpoint.
|
|
|
|
#[deprecated(note = "Use Isolate::perform_microtask_checkpoint() instead")]
|
2020-02-12 11:33:58 -05:00
|
|
|
pub fn run_microtasks(&mut self) {
|
2020-08-23 16:25:16 +02:00
|
|
|
self.perform_microtask_checkpoint()
|
2020-02-12 11:33:58 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Enqueues the callback to the default MicrotaskQueue
|
|
|
|
pub fn enqueue_microtask(&mut self, microtask: Local<Function>) {
|
2020-04-13 14:43:56 +02:00
|
|
|
unsafe { v8__Isolate__EnqueueMicrotask(self, &*microtask) }
|
2020-02-12 11:33:58 -05:00
|
|
|
}
|
|
|
|
|
2020-10-15 22:12:44 +02:00
|
|
|
/// Set whether calling Atomics.wait (a function that may block) is allowed in
|
|
|
|
/// this isolate. This can also be configured via
|
|
|
|
/// CreateParams::allow_atomics_wait.
|
|
|
|
pub fn set_allow_atomics_wait(&mut self, allow: bool) {
|
|
|
|
unsafe { v8__Isolate__SetAllowAtomicsWait(self, allow) }
|
|
|
|
}
|
|
|
|
|
2020-12-31 16:06:29 +01:00
|
|
|
/// Embedder injection point for `WebAssembly.compileStreaming(source)`.
|
|
|
|
/// The expectation is that the embedder sets it at most once.
|
|
|
|
///
|
|
|
|
/// The callback receives the source argument (string, Promise, etc.)
|
|
|
|
/// and an instance of [WasmStreaming]. The [WasmStreaming] instance
|
|
|
|
/// can outlive the callback and is used to feed data chunks to V8
|
|
|
|
/// asynchronously.
|
|
|
|
pub fn set_wasm_streaming_callback<F>(&mut self, _: F)
|
|
|
|
where
|
|
|
|
F: UnitType + Fn(&mut HandleScope, Local<Value>, WasmStreaming),
|
|
|
|
{
|
|
|
|
unsafe { v8__Isolate__SetWasmStreamingCallback(self, trampoline::<F>()) }
|
|
|
|
}
|
|
|
|
|
2021-07-02 20:17:48 +02:00
|
|
|
/// Returns true if there is ongoing background work within V8 that will
|
|
|
|
/// eventually post a foreground task, like asynchronous WebAssembly
|
|
|
|
/// compilation.
|
|
|
|
pub fn has_pending_background_tasks(&self) -> bool {
|
|
|
|
unsafe { v8__Isolate__HasPendingBackgroundTasks(self) }
|
|
|
|
}
|
|
|
|
|
2020-02-12 11:33:58 -05:00
|
|
|
/// Disposes the isolate. The isolate must not be entered by any
|
|
|
|
/// thread to be disposable.
|
|
|
|
unsafe fn dispose(&mut self) {
|
2020-06-03 07:38:34 +02:00
|
|
|
// Drop the scope stack.
|
|
|
|
ScopeData::drop_root(self);
|
2020-04-23 19:48:07 +02:00
|
|
|
|
|
|
|
// Set the `isolate` pointer inside the annex struct to null, so any
|
|
|
|
// IsolateHandle that outlives the isolate will know that it can't call
|
|
|
|
// methods on the isolate.
|
2020-06-03 07:38:34 +02:00
|
|
|
let annex = self.get_annex_mut();
|
2020-04-23 19:48:07 +02:00
|
|
|
{
|
|
|
|
let _lock = annex.isolate_mutex.lock().unwrap();
|
|
|
|
annex.isolate = null_mut();
|
|
|
|
}
|
|
|
|
|
2020-04-23 20:14:53 +02:00
|
|
|
// Clear slots and drop owned objects that were taken out of `CreateParams`.
|
|
|
|
annex.create_param_allocations = Box::new(());
|
2020-04-23 19:48:07 +02:00
|
|
|
annex.slots.clear();
|
|
|
|
|
|
|
|
// Subtract one from the Arc<IsolateAnnex> reference count.
|
|
|
|
Arc::from_raw(annex);
|
|
|
|
self.set_data(0, null_mut());
|
2020-02-19 22:55:44 -05:00
|
|
|
|
|
|
|
// No test case in rusty_v8 show this, but there have been situations in
|
|
|
|
// deno where dropping Annex before the states causes a segfault.
|
2020-02-12 11:33:58 -05:00
|
|
|
v8__Isolate__Dispose(self)
|
|
|
|
}
|
2020-03-09 18:30:25 +01:00
|
|
|
|
|
|
|
/// Take a heap snapshot. The callback is invoked one or more times
|
|
|
|
/// with byte slices containing the snapshot serialized as JSON.
|
|
|
|
/// It's the callback's responsibility to reassemble them into
|
|
|
|
/// a single document, e.g., by writing them to a file.
|
|
|
|
/// Note that Chrome DevTools refuses to load snapshots without
|
|
|
|
/// a .heapsnapshot suffix.
|
|
|
|
pub fn take_heap_snapshot<F>(&mut self, mut callback: F)
|
|
|
|
where
|
|
|
|
F: FnMut(&[u8]) -> bool,
|
|
|
|
{
|
|
|
|
extern "C" fn trampoline<F>(
|
|
|
|
arg: *mut c_void,
|
|
|
|
data: *const u8,
|
|
|
|
size: usize,
|
|
|
|
) -> bool
|
|
|
|
where
|
|
|
|
F: FnMut(&[u8]) -> bool,
|
|
|
|
{
|
|
|
|
let p = arg as *mut F;
|
|
|
|
let callback = unsafe { &mut *p };
|
|
|
|
let slice = unsafe { std::slice::from_raw_parts(data, size) };
|
|
|
|
callback(slice)
|
|
|
|
}
|
|
|
|
|
|
|
|
let arg = &mut callback as *mut F as *mut c_void;
|
|
|
|
unsafe { v8__HeapProfiler__TakeHeapSnapshot(self, trampoline::<F>, arg) }
|
|
|
|
}
|
2020-02-12 11:33:58 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
pub(crate) struct IsolateAnnex {
|
2020-04-23 20:14:53 +02:00
|
|
|
create_param_allocations: Box<dyn Any>,
|
2020-09-07 19:49:49 +02:00
|
|
|
slots: HashMap<TypeId, Box<dyn Any>, BuildTypeIdHasher>,
|
2020-04-23 19:18:06 +02:00
|
|
|
// The `isolate` and `isolate_mutex` fields are there so an `IsolateHandle`
|
2020-06-17 05:05:01 +02:00
|
|
|
// (which may outlive the isolate itself) can determine whether the isolate
|
|
|
|
// is still alive, and if so, get a reference to it. Safety rules:
|
2020-04-23 19:18:06 +02:00
|
|
|
// - The 'main thread' must lock the mutex and reset `isolate` to null just
|
|
|
|
// before the isolate is disposed.
|
|
|
|
// - Any other thread must lock the mutex while it's reading/using the
|
|
|
|
// `isolate` pointer.
|
|
|
|
isolate: *mut Isolate,
|
|
|
|
isolate_mutex: Mutex<()>,
|
2020-02-12 11:33:58 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
impl IsolateAnnex {
|
2020-04-23 20:14:53 +02:00
|
|
|
fn new(
|
|
|
|
isolate: &mut Isolate,
|
|
|
|
create_param_allocations: Box<dyn Any>,
|
|
|
|
) -> Self {
|
2020-02-12 11:33:58 -05:00
|
|
|
Self {
|
2020-04-23 20:14:53 +02:00
|
|
|
create_param_allocations,
|
2020-09-07 19:49:49 +02:00
|
|
|
slots: HashMap::default(),
|
2020-04-23 19:18:06 +02:00
|
|
|
isolate,
|
|
|
|
isolate_mutex: Mutex::new(()),
|
2020-02-12 11:33:58 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-11-18 15:17:25 +01:00
|
|
|
impl Debug for IsolateAnnex {
|
|
|
|
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
|
|
|
f.debug_struct("IsolateAnnex")
|
|
|
|
.field("isolate", &self.isolate)
|
|
|
|
.field("isolate_mutex", &self.isolate_mutex)
|
|
|
|
.finish()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-04-23 16:46:53 -04:00
|
|
|
/// IsolateHandle is a thread-safe reference to an Isolate. It's main use is to
|
|
|
|
/// terminate execution of a running isolate from another thread.
|
|
|
|
///
|
|
|
|
/// It is created with Isolate::thread_safe_handle().
|
|
|
|
///
|
|
|
|
/// IsolateHandle is Cloneable, Send, and Sync.
|
2020-11-18 15:17:25 +01:00
|
|
|
#[derive(Clone, Debug)]
|
2020-02-12 11:33:58 -05:00
|
|
|
pub struct IsolateHandle(Arc<IsolateAnnex>);
|
|
|
|
|
|
|
|
unsafe impl Send for IsolateHandle {}
|
|
|
|
unsafe impl Sync for IsolateHandle {}
|
|
|
|
|
|
|
|
impl IsolateHandle {
|
|
|
|
// This function is marked unsafe because it must be called only with either
|
|
|
|
// IsolateAnnex::mutex locked, or from the main thread associated with the V8
|
|
|
|
// isolate.
|
|
|
|
pub(crate) unsafe fn get_isolate_ptr(&self) -> *mut Isolate {
|
|
|
|
self.0.isolate
|
|
|
|
}
|
|
|
|
|
2020-12-31 08:30:30 +01:00
|
|
|
fn new(isolate: &Isolate) -> Self {
|
2020-04-23 09:34:28 +02:00
|
|
|
Self(isolate.get_annex_arc())
|
2020-02-12 11:33:58 -05:00
|
|
|
}
|
|
|
|
|
2019-12-31 03:11:43 -08:00
|
|
|
/// Forcefully terminate the current thread of JavaScript execution
|
|
|
|
/// in the given isolate.
|
|
|
|
///
|
|
|
|
/// This method can be used by any thread even if that thread has not
|
|
|
|
/// acquired the V8 lock with a Locker object.
|
|
|
|
///
|
2020-02-12 11:33:58 -05:00
|
|
|
/// Returns false if Isolate was already destroyed.
|
|
|
|
pub fn terminate_execution(&self) -> bool {
|
2020-04-23 19:18:06 +02:00
|
|
|
let _lock = self.0.isolate_mutex.lock().unwrap();
|
2020-02-12 11:33:58 -05:00
|
|
|
if self.0.isolate.is_null() {
|
|
|
|
false
|
|
|
|
} else {
|
|
|
|
unsafe { v8__Isolate__TerminateExecution(self.0.isolate) };
|
|
|
|
true
|
|
|
|
}
|
2019-12-31 03:11:43 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Resume execution capability in the given isolate, whose execution
|
|
|
|
/// was previously forcefully terminated using TerminateExecution().
|
|
|
|
///
|
|
|
|
/// When execution is forcefully terminated using TerminateExecution(),
|
|
|
|
/// the isolate can not resume execution until all JavaScript frames
|
|
|
|
/// have propagated the uncatchable exception which is generated. This
|
|
|
|
/// method allows the program embedding the engine to handle the
|
|
|
|
/// termination event and resume execution capability, even if
|
|
|
|
/// JavaScript frames remain on the stack.
|
|
|
|
///
|
|
|
|
/// This method can be used by any thread even if that thread has not
|
|
|
|
/// acquired the V8 lock with a Locker object.
|
2020-02-12 11:33:58 -05:00
|
|
|
///
|
|
|
|
/// Returns false if Isolate was already destroyed.
|
|
|
|
pub fn cancel_terminate_execution(&self) -> bool {
|
2020-04-23 19:18:06 +02:00
|
|
|
let _lock = self.0.isolate_mutex.lock().unwrap();
|
2020-02-12 11:33:58 -05:00
|
|
|
if self.0.isolate.is_null() {
|
|
|
|
false
|
|
|
|
} else {
|
|
|
|
unsafe { v8__Isolate__CancelTerminateExecution(self.0.isolate) };
|
|
|
|
true
|
|
|
|
}
|
2020-01-02 10:15:31 -05:00
|
|
|
}
|
|
|
|
|
2020-02-12 11:33:58 -05:00
|
|
|
/// Is V8 terminating JavaScript execution.
|
|
|
|
///
|
|
|
|
/// Returns true if JavaScript execution is currently terminating
|
|
|
|
/// because of a call to TerminateExecution. In that case there are
|
|
|
|
/// still JavaScript frames on the stack and the termination
|
|
|
|
/// exception is still active.
|
|
|
|
///
|
|
|
|
/// Returns false if Isolate was already destroyed.
|
|
|
|
pub fn is_execution_terminating(&self) -> bool {
|
2020-04-23 19:18:06 +02:00
|
|
|
let _lock = self.0.isolate_mutex.lock().unwrap();
|
2020-02-12 11:33:58 -05:00
|
|
|
if self.0.isolate.is_null() {
|
|
|
|
false
|
|
|
|
} else {
|
|
|
|
unsafe { v8__Isolate__IsExecutionTerminating(self.0.isolate) }
|
|
|
|
}
|
2020-01-02 10:15:31 -05:00
|
|
|
}
|
|
|
|
|
2020-01-15 15:33:47 -05:00
|
|
|
/// Request V8 to interrupt long running JavaScript code and invoke
|
|
|
|
/// the given |callback| passing the given |data| to it. After |callback|
|
|
|
|
/// returns control will be returned to the JavaScript code.
|
|
|
|
/// There may be a number of interrupt requests in flight.
|
|
|
|
/// Can be called from another thread without acquiring a |Locker|.
|
|
|
|
/// Registered |callback| must not reenter interrupted Isolate.
|
2020-02-12 11:33:58 -05:00
|
|
|
///
|
|
|
|
/// Returns false if Isolate was already destroyed.
|
2020-01-15 15:33:47 -05:00
|
|
|
// Clippy warns that this method is dereferencing a raw pointer, but it is
|
|
|
|
// not: https://github.com/rust-lang/rust-clippy/issues/3045
|
|
|
|
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
|
|
|
pub fn request_interrupt(
|
|
|
|
&self,
|
|
|
|
callback: InterruptCallback,
|
|
|
|
data: *mut c_void,
|
2020-02-12 11:33:58 -05:00
|
|
|
) -> bool {
|
2020-04-23 19:18:06 +02:00
|
|
|
let _lock = self.0.isolate_mutex.lock().unwrap();
|
2020-02-12 11:33:58 -05:00
|
|
|
if self.0.isolate.is_null() {
|
|
|
|
false
|
|
|
|
} else {
|
|
|
|
unsafe { v8__Isolate__RequestInterrupt(self.0.isolate, callback, data) };
|
|
|
|
true
|
|
|
|
}
|
2019-12-19 14:13:33 +01:00
|
|
|
}
|
2019-11-15 16:21:34 -08:00
|
|
|
}
|
|
|
|
|
2019-12-19 19:15:52 -05:00
|
|
|
/// Same as Isolate but gets disposed when it goes out of scope.
|
2020-11-18 15:17:25 +01:00
|
|
|
#[derive(Debug)]
|
2020-04-20 21:18:03 +02:00
|
|
|
pub struct OwnedIsolate {
|
|
|
|
cxx_isolate: NonNull<Isolate>,
|
|
|
|
}
|
2019-12-19 19:15:52 -05:00
|
|
|
|
2020-04-20 21:18:03 +02:00
|
|
|
impl OwnedIsolate {
|
2020-04-23 20:14:53 +02:00
|
|
|
pub(crate) fn new(cxx_isolate: *mut Isolate) -> Self {
|
2020-04-20 21:18:03 +02:00
|
|
|
let cxx_isolate = NonNull::new(cxx_isolate).unwrap();
|
2020-04-23 20:14:53 +02:00
|
|
|
Self { cxx_isolate }
|
2020-04-20 21:18:03 +02:00
|
|
|
}
|
|
|
|
}
|
2020-02-19 22:55:44 -05:00
|
|
|
|
2019-12-19 19:15:52 -05:00
|
|
|
impl Drop for OwnedIsolate {
|
2019-11-15 16:21:34 -08:00
|
|
|
fn drop(&mut self) {
|
2021-02-19 17:01:18 -05:00
|
|
|
unsafe {
|
|
|
|
self.exit();
|
|
|
|
self.cxx_isolate.as_mut().dispose()
|
|
|
|
}
|
2019-11-15 16:21:34 -08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-12-19 19:15:52 -05:00
|
|
|
impl Deref for OwnedIsolate {
|
|
|
|
type Target = Isolate;
|
2019-11-30 16:31:51 +01:00
|
|
|
fn deref(&self) -> &Self::Target {
|
2020-04-20 21:18:03 +02:00
|
|
|
unsafe { self.cxx_isolate.as_ref() }
|
2019-11-15 16:21:34 -08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-12-19 19:15:52 -05:00
|
|
|
impl DerefMut for OwnedIsolate {
|
2019-11-30 16:31:51 +01:00
|
|
|
fn deref_mut(&mut self) -> &mut Self::Target {
|
2020-04-20 21:18:03 +02:00
|
|
|
unsafe { self.cxx_isolate.as_mut() }
|
2019-11-20 13:34:32 -08:00
|
|
|
}
|
|
|
|
}
|
2020-08-23 16:16:45 +02:00
|
|
|
|
|
|
|
impl HeapStatistics {
|
|
|
|
pub fn total_heap_size(&self) -> usize {
|
|
|
|
unsafe { v8__HeapStatistics__total_heap_size(self) }
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn total_heap_size_executable(&self) -> usize {
|
|
|
|
unsafe { v8__HeapStatistics__total_heap_size_executable(self) }
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn total_physical_size(&self) -> usize {
|
|
|
|
unsafe { v8__HeapStatistics__total_physical_size(self) }
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn total_available_size(&self) -> usize {
|
|
|
|
unsafe { v8__HeapStatistics__total_available_size(self) }
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn total_global_handles_size(&self) -> usize {
|
|
|
|
unsafe { v8__HeapStatistics__total_global_handles_size(self) }
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn used_global_handles_size(&self) -> usize {
|
|
|
|
unsafe { v8__HeapStatistics__used_global_handles_size(self) }
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn used_heap_size(&self) -> usize {
|
|
|
|
unsafe { v8__HeapStatistics__used_heap_size(self) }
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn heap_size_limit(&self) -> usize {
|
|
|
|
unsafe { v8__HeapStatistics__heap_size_limit(self) }
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn malloced_memory(&self) -> usize {
|
|
|
|
unsafe { v8__HeapStatistics__malloced_memory(self) }
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn external_memory(&self) -> usize {
|
|
|
|
unsafe { v8__HeapStatistics__external_memory(self) }
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn peak_malloced_memory(&self) -> usize {
|
|
|
|
unsafe { v8__HeapStatistics__peak_malloced_memory(self) }
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn number_of_native_contexts(&self) -> usize {
|
|
|
|
unsafe { v8__HeapStatistics__number_of_native_contexts(self) }
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn number_of_detached_contexts(&self) -> usize {
|
|
|
|
unsafe { v8__HeapStatistics__number_of_detached_contexts(self) }
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns a 0/1 boolean, which signifies whether the V8 overwrite heap
|
|
|
|
/// garbage with a bit pattern.
|
|
|
|
pub fn does_zap_garbage(&self) -> usize {
|
|
|
|
unsafe { v8__HeapStatistics__does_zap_garbage(self) }
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Default for HeapStatistics {
|
|
|
|
fn default() -> Self {
|
|
|
|
let mut s = MaybeUninit::<Self>::uninit();
|
|
|
|
unsafe {
|
|
|
|
v8__HeapStatistics__CONSTRUCT(&mut s);
|
|
|
|
s.assume_init()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2021-02-08 13:11:48 +01:00
|
|
|
|
|
|
|
impl<'s, F> MapFnFrom<F> for PrepareStackTraceCallback<'s>
|
|
|
|
where
|
|
|
|
F: UnitType
|
|
|
|
+ Fn(
|
|
|
|
&mut HandleScope<'s>,
|
|
|
|
Local<'s, Value>,
|
|
|
|
Local<'s, Array>,
|
|
|
|
) -> Local<'s, Value>,
|
|
|
|
{
|
|
|
|
// Windows x64 ABI: MaybeLocal<Value> returned on the stack.
|
|
|
|
#[cfg(target_os = "windows")]
|
|
|
|
fn mapping() -> Self {
|
|
|
|
let f = |ret_ptr, context, error, sites| {
|
|
|
|
let mut scope: CallbackScope = unsafe { CallbackScope::new(context) };
|
|
|
|
let r = (F::get())(&mut scope, error, sites);
|
|
|
|
unsafe { std::ptr::write(ret_ptr, &*r as *const _) };
|
|
|
|
ret_ptr
|
|
|
|
};
|
|
|
|
f.to_c_fn()
|
|
|
|
}
|
|
|
|
|
|
|
|
// System V ABI: MaybeLocal<Value> returned in a register.
|
|
|
|
#[cfg(not(target_os = "windows"))]
|
|
|
|
fn mapping() -> Self {
|
|
|
|
let f = |context, error, sites| {
|
|
|
|
let mut scope: CallbackScope = unsafe { CallbackScope::new(context) };
|
|
|
|
let r = (F::get())(&mut scope, error, sites);
|
|
|
|
&*r as *const _
|
|
|
|
};
|
|
|
|
f.to_c_fn()
|
|
|
|
}
|
|
|
|
}
|