Files
electron/patches/node/support_v8_sandboxed_pointers.patch
electron-roller[bot] f149efe32e chore: bump node to v24.13.0 (main) (#49189)
* chore: bump node in DEPS to v24.12.0

* src: use CP_UTF8 for wide file names on win32

https://github.com/nodejs/node/pull/60575

* test,crypto: handle a few more BoringSSL tests

https://github.com/nodejs/node/pull/59030

* chore: fixup patch indices

* chore: re-add accidentally removed sslv23 test

* chore: fixup crypto patch rebase

* test: make buffer sizes 32bit-aware in test-internal-util-construct-sab

https://github.com/nodejs/node/pull/61026

* src: add internal binding for constructing SharedArrayBuffers

https://github.com/nodejs/node/pull/60497

* chore: bump node in DEPS to v24.13.0

* chore: fixup patch indices

* chore: fixup sandboxed pointers patch

* tls: route callback exceptions through error handlers

https://github.com/nodejs-private/node-private/pull/782

---------

Co-authored-by: electron-roller[bot] <84116207+electron-roller[bot]@users.noreply.github.com>
Co-authored-by: Shelley Vohr <shelley.vohr@gmail.com>
2026-01-16 11:08:38 -05:00

518 lines
21 KiB
Diff

From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: Jeremy Rose <japthorp@slack-corp.com>
Date: Tue, 21 Jun 2022 10:04:21 -0700
Subject: support V8 sandboxed pointers
This refactors several allocators to allocate within the V8 memory cage,
allowing them to be compatible with the V8_SANDBOXED_POINTERS feature.
diff --git a/src/api/environment.cc b/src/api/environment.cc
index 53f05293bd94e159dfedf48735989e668acdd08e..5c8bc870dcf2e974036cf3bcb60fd288e59045d9 100644
--- a/src/api/environment.cc
+++ b/src/api/environment.cc
@@ -111,6 +111,14 @@ MaybeLocal<Value> PrepareStackTraceCallback(Local<Context> context,
return result;
}
+NodeArrayBufferAllocator::NodeArrayBufferAllocator() {
+ zero_fill_field_ = static_cast<uint32_t*>(allocator_->Allocate(sizeof(*zero_fill_field_)));
+}
+
+NodeArrayBufferAllocator::~NodeArrayBufferAllocator() {
+ allocator_->Free(zero_fill_field_, sizeof(*zero_fill_field_));
+}
+
void* NodeArrayBufferAllocator::Allocate(size_t size) {
void* ret;
COUNT_GENERIC_USAGE("NodeArrayBufferAllocator.Allocate.ZeroFilled");
@@ -337,6 +345,12 @@ Isolate* NewIsolate(Isolate::CreateParams* params,
// but also otherwise just doesn't work, and the only real alternative
// is disabling shared-readonly-heap mode altogether.
static Isolate::CreateParams first_params = *params;
+ // Clear allocator pointers to prevent use-after-free during static
+ // destruction. The static first_params can outlive V8's internal
+ // allocator systems, causing crashes when its destructor tries to
+ // free resources after V8 has shut down.
+ first_params.array_buffer_allocator = nullptr;
+ first_params.array_buffer_allocator_shared.reset();
params->snapshot_blob = first_params.snapshot_blob;
params->external_references = first_params.external_references;
}
diff --git a/src/crypto/crypto_dh.cc b/src/crypto/crypto_dh.cc
index 46a7d1396dc1a175ae99f4e403721f1730fdd320..bbb0abb3b9563074d350578e0f5a8fa211046b17 100644
--- a/src/crypto/crypto_dh.cc
+++ b/src/crypto/crypto_dh.cc
@@ -61,17 +61,22 @@ MaybeLocal<Value> DataPointerToBuffer(Environment* env, DataPointer&& data) {
bool secure;
};
#ifdef V8_ENABLE_SANDBOX
- auto backing = ArrayBuffer::NewBackingStore(
- env->isolate(),
- data.size(),
- BackingStoreInitializationMode::kUninitialized,
- BackingStoreOnFailureMode::kReturnNull);
- if (!backing) {
- THROW_ERR_MEMORY_ALLOCATION_FAILED(env);
- return MaybeLocal<Value>();
- }
+ std::unique_ptr<v8::BackingStore> backing;
if (data.size() > 0) {
- memcpy(backing->Data(), data.get(), data.size());
+ std::unique_ptr<ArrayBuffer::Allocator> allocator(ArrayBuffer::Allocator::NewDefaultAllocator());
+ void* v8_data = allocator->Allocate(data.size());
+ CHECK(v8_data);
+ memcpy(v8_data, data.get(), data.size());
+ backing = ArrayBuffer::NewBackingStore(
+ v8_data,
+ data.size(),
+ [](void* data, size_t length, void*) {
+ std::unique_ptr<ArrayBuffer::Allocator> allocator(ArrayBuffer::Allocator::NewDefaultAllocator());
+ allocator->Free(data, length);
+ }, nullptr);
+ } else {
+ NoArrayBufferZeroFillScope no_zero_fill_scope(env->isolate_data());
+ backing = v8::ArrayBuffer::NewBackingStore(env->isolate(), data.size());
}
#else
auto backing = ArrayBuffer::NewBackingStore(
diff --git a/src/crypto/crypto_util.cc b/src/crypto/crypto_util.cc
index 12b0d804c6f1d4998b85160b0aac8eb7a3b5576b..27bd93769233dc65a064710db4095d9cdc3a8b1a 100644
--- a/src/crypto/crypto_util.cc
+++ b/src/crypto/crypto_util.cc
@@ -346,24 +346,30 @@ std::unique_ptr<BackingStore> ByteSource::ReleaseToBackingStore(
// It's ok for allocated_data_ to be nullptr but
// only if size_ is zero.
CHECK_IMPLIES(size_ > 0, allocated_data_ != nullptr);
-#ifdef V8_ENABLE_SANDBOX
- // If the v8 sandbox is enabled, then all array buffers must be allocated
- // via the isolate. External buffers are not allowed. So, instead of wrapping
- // the allocated data we'll copy it instead.
-
- // TODO(@jasnell): It would be nice to use an abstracted utility to do this
- // branch instead of duplicating the V8_ENABLE_SANDBOX check each time.
+#if defined(V8_ENABLE_SANDBOX)
+ // When V8 sandboxed pointers are enabled, we have to copy into the memory
+ // cage. We still want to ensure we erase the data on free though, so
+ // provide a custom deleter that calls OPENSSL_cleanse.
+ if (!size())
+ return ArrayBuffer::NewBackingStore(env->isolate(), 0);
+ std::unique_ptr<ArrayBuffer::Allocator> allocator(ArrayBuffer::Allocator::NewDefaultAllocator());
+ void* v8_data = allocator->Allocate(size());
+ CHECK(v8_data);
+ memcpy(v8_data, allocated_data_, size());
+ OPENSSL_clear_free(allocated_data_, size());
std::unique_ptr<BackingStore> ptr = ArrayBuffer::NewBackingStore(
- env->isolate(),
+ v8_data,
size(),
- BackingStoreInitializationMode::kUninitialized,
- BackingStoreOnFailureMode::kReturnNull);
- if (!ptr) {
- THROW_ERR_MEMORY_ALLOCATION_FAILED(env);
- return nullptr;
- }
- memcpy(ptr->Data(), allocated_data_, size());
- OPENSSL_clear_free(allocated_data_, size_);
+ [](void* data, size_t length, void*) {
+ OPENSSL_cleanse(data, length);
+ std::unique_ptr<ArrayBuffer::Allocator> allocator(ArrayBuffer::Allocator::NewDefaultAllocator());
+ allocator->Free(data, length);
+ }, nullptr);
+ CHECK(ptr);
+ allocated_data_ = nullptr;
+ data_ = nullptr;
+ size_ = 0;
+ return ptr;
#else
std::unique_ptr<BackingStore> ptr = ArrayBuffer::NewBackingStore(
allocated_data_,
@@ -662,23 +668,16 @@ namespace {
// using OPENSSL_malloc. However, if the secure heap is
// initialized, SecureBuffer will automatically use it.
void SecureBuffer(const FunctionCallbackInfo<Value>& args) {
- Environment* env = Environment::GetCurrent(args);
+ CHECK(args[0]->IsUint32());
#ifdef V8_ENABLE_SANDBOX
- // The v8 sandbox is enabled, so we cannot use the secure heap because
- // the sandbox requires that all array buffers be allocated via the isolate.
- // That is fundamentally incompatible with the secure heap which allocates
- // in openssl's secure heap area. Instead we'll just throw an error here.
- //
- // That said, we really shouldn't get here in the first place since the
- // option to enable the secure heap is only available when the sandbox
- // is disabled.
- UNREACHABLE();
+ uint32_t len = args[0].As<Uint32>()->Value();
+ Local<ArrayBuffer> buffer = ArrayBuffer::New(args.GetIsolate(), len);
+ args.GetReturnValue().Set(Uint8Array::New(buffer, 0, len));
#else
- CHECK(args[0]->IsUint32());
+ Environment* env = Environment::GetCurrent(args);
uint32_t len = args[0].As<Uint32>()->Value();
auto data = DataPointer::SecureAlloc(len);
- CHECK(data.isSecure());
if (!data) {
return THROW_ERR_OPERATION_FAILED(env, "Allocation failed");
}
diff --git a/src/crypto/crypto_x509.cc b/src/crypto/crypto_x509.cc
index b30297eac08ad9587642b723f91d7e3b954294d4..4c5427596d1c90d3a413cdd9ff4f1151e657073d 100644
--- a/src/crypto/crypto_x509.cc
+++ b/src/crypto/crypto_x509.cc
@@ -135,19 +135,17 @@ MaybeLocal<Value> ToBuffer(Environment* env, BIOPointer* bio) {
return {};
BUF_MEM* mem = *bio;
#ifdef V8_ENABLE_SANDBOX
- // If the v8 sandbox is enabled, then all array buffers must be allocated
- // via the isolate. External buffers are not allowed. So, instead of wrapping
- // the BIOPointer we'll copy it instead.
- auto backing = ArrayBuffer::NewBackingStore(
- env->isolate(),
+ std::unique_ptr<ArrayBuffer::Allocator> allocator(ArrayBuffer::Allocator::NewDefaultAllocator());
+ void* v8_data = allocator->Allocate(mem->length);
+ CHECK(v8_data);
+ memcpy(v8_data, mem->data, mem->length);
+ std::unique_ptr<v8::BackingStore> backing = ArrayBuffer::NewBackingStore(
+ v8_data,
mem->length,
- BackingStoreInitializationMode::kUninitialized,
- BackingStoreOnFailureMode::kReturnNull);
- if (!backing) {
- THROW_ERR_MEMORY_ALLOCATION_FAILED(env);
- return MaybeLocal<Value>();
- }
- memcpy(backing->Data(), mem->data, mem->length);
+ [](void* data, size_t length, void*) {
+ std::unique_ptr<ArrayBuffer::Allocator> allocator(ArrayBuffer::Allocator::NewDefaultAllocator());
+ allocator->Free(data, length);
+ }, nullptr);
#else
auto backing = ArrayBuffer::NewBackingStore(
mem->data,
diff --git a/src/env-inl.h b/src/env-inl.h
index 97c43afb487b58c0c77bd59b4a6b6d7a13690053..5dfbd564d5bbd22ebf3b529a07b73e85cbe51986 100644
--- a/src/env-inl.h
+++ b/src/env-inl.h
@@ -44,6 +44,16 @@
namespace node {
+NoArrayBufferZeroFillScope::NoArrayBufferZeroFillScope(
+ IsolateData* isolate_data)
+ : node_allocator_(isolate_data->node_allocator()) {
+ if (node_allocator_ != nullptr) node_allocator_->zero_fill_field()[0] = 0;
+}
+
+NoArrayBufferZeroFillScope::~NoArrayBufferZeroFillScope() {
+ if (node_allocator_ != nullptr) node_allocator_->zero_fill_field()[0] = 1;
+}
+
inline v8::Isolate* IsolateData::isolate() const {
return isolate_;
}
diff --git a/src/env.h b/src/env.h
index 84a650885a79bc5c49efdc26f62ec8db48de775c..ba442937bf0d7831c9a84b5a57211e9f90c81705 100644
--- a/src/env.h
+++ b/src/env.h
@@ -111,6 +111,19 @@ class ModuleWrap;
class Environment;
class Realm;
+// Disables zero-filling for ArrayBuffer allocations in this scope. This is
+// similar to how we implement Buffer.allocUnsafe() in JS land.
+class NoArrayBufferZeroFillScope {
+ public:
+ inline explicit NoArrayBufferZeroFillScope(IsolateData* isolate_data);
+ inline ~NoArrayBufferZeroFillScope();
+
+ private:
+ NodeArrayBufferAllocator* node_allocator_;
+
+ friend class Environment;
+};
+
struct IsolateDataSerializeInfo {
std::vector<SnapshotIndex> primitive_values;
std::vector<PropInfo> template_values;
diff --git a/src/node_buffer.cc b/src/node_buffer.cc
index 49df0b4284748effb27b05ecd69f05699dd8be75..eb104ab80fd2052d1523cb2f2ecdb6d23ac3be98 100644
--- a/src/node_buffer.cc
+++ b/src/node_buffer.cc
@@ -81,6 +81,7 @@ using v8::SharedArrayBuffer;
using v8::String;
using v8::Uint32;
using v8::Uint8Array;
+using v8::Uint32Array;
using v8::Value;
namespace {
@@ -1243,6 +1244,45 @@ void SetBufferPrototype(const FunctionCallbackInfo<Value>& args) {
realm->set_buffer_prototype_object(proto);
}
+void GetZeroFillToggle(const FunctionCallbackInfo<Value>& args) {
+ Environment* env = Environment::GetCurrent(args);
+ NodeArrayBufferAllocator* allocator = env->isolate_data()->node_allocator();
+ Local<ArrayBuffer> ab;
+ // It can be a nullptr when running inside an isolate where we
+ // do not own the ArrayBuffer allocator.
+ if (allocator == nullptr || env->isolate_data()->is_building_snapshot()) {
+ // Create a dummy Uint32Array - the JS land can only toggle the C++ land
+ // setting when the allocator uses our toggle. With this the toggle in JS
+ // land results in no-ops.
+ // When building a snapshot, just use a dummy toggle as well to avoid
+ // introducing the dynamic external reference. We'll re-initialize the
+ // toggle with a real one connected to the C++ allocator after snapshot
+ // deserialization.
+
+ ab = ArrayBuffer::New(env->isolate(), sizeof(uint32_t));
+ } else {
+ // TODO(joyeecheung): save ab->GetBackingStore()->Data() in the Node.js
+ // array buffer allocator and include it into the C++ toggle while the
+ // Environment is still alive.
+ uint32_t* zero_fill_field = allocator->zero_fill_field();
+ std::unique_ptr<BackingStore> backing =
+ ArrayBuffer::NewBackingStore(zero_fill_field,
+ sizeof(*zero_fill_field),
+ [](void*, size_t, void*) {},
+ nullptr);
+ ab = ArrayBuffer::New(env->isolate(), std::move(backing));
+ }
+
+ if (ab->SetPrivate(env->context(),
+ env->untransferable_object_private_symbol(),
+ True(env->isolate()))
+ .IsNothing()) {
+ return;
+ }
+
+ args.GetReturnValue().Set(Uint32Array::New(ab, 0, 1));
+}
+
static void Btoa(const FunctionCallbackInfo<Value>& args) {
CHECK_EQ(args.Length(), 1);
Environment* env = Environment::GetCurrent(args);
@@ -1420,7 +1460,7 @@ inline size_t CheckNumberToSize(Local<Value> number) {
CHECK(value >= 0 && value < maxSize);
size_t size = static_cast<size_t>(value);
#ifdef V8_ENABLE_SANDBOX
- CHECK_LE(size, kMaxSafeBufferSizeForSandbox);
+ CHECK_LE(size, v8::internal::kMaxSafeBufferSizeForSandbox);
#endif
return size;
}
@@ -1638,6 +1678,7 @@ void Initialize(Local<Object> target,
"utf8WriteStatic",
SlowWriteString<UTF8>,
&fast_write_string_utf8);
+ SetMethod(context, target, "getZeroFillToggle", GetZeroFillToggle);
}
} // anonymous namespace
@@ -1686,6 +1727,7 @@ void RegisterExternalReferences(ExternalReferenceRegistry* registry) {
registry->Register(StringWrite<HEX>);
registry->Register(StringWrite<UCS2>);
registry->Register(StringWrite<UTF8>);
+ registry->Register(GetZeroFillToggle);
registry->Register(CopyArrayBuffer);
registry->Register(CreateUnsafeArrayBuffer);
diff --git a/src/node_i18n.cc b/src/node_i18n.cc
index 3c4f419aa29470b3280174b58680b9421b0340b5..3b24ad2a2316f89d98b067e2c13988f87a9a00d2 100644
--- a/src/node_i18n.cc
+++ b/src/node_i18n.cc
@@ -105,7 +105,7 @@ namespace {
template <typename T>
MaybeLocal<Object> ToBufferEndian(Environment* env, MaybeStackBuffer<T>* buf) {
Local<Object> ret;
- if (!Buffer::New(env, buf).ToLocal(&ret)) {
+ if (!Buffer::Copy(env, reinterpret_cast<char*>(buf->out()), buf->length() * sizeof(T)).ToLocal(&ret)) {
return {};
}
@@ -182,7 +182,7 @@ MaybeLocal<Object> TranscodeLatin1ToUcs2(Environment* env,
return {};
}
- return Buffer::New(env, &destbuf);
+ return Buffer::Copy(env, reinterpret_cast<char*>(destbuf.out()), destbuf.length() * sizeof(UChar));
}
MaybeLocal<Object> TranscodeFromUcs2(Environment* env,
@@ -227,7 +227,7 @@ MaybeLocal<Object> TranscodeUcs2FromUtf8(Environment* env,
return {};
}
- return Buffer::New(env, &destbuf);
+ return Buffer::Copy(env, reinterpret_cast<char*>(destbuf.out()), destbuf.length() * sizeof(UChar));
}
MaybeLocal<Object> TranscodeUtf8FromUcs2(Environment* env,
@@ -251,7 +251,7 @@ MaybeLocal<Object> TranscodeUtf8FromUcs2(Environment* env,
return {};
}
- return Buffer::New(env, &destbuf);
+ return Buffer::Copy(env, reinterpret_cast<char*>(destbuf.out()), destbuf.length() * sizeof(char));
}
constexpr const char* EncodingName(const enum encoding encoding) {
diff --git a/src/node_internals.h b/src/node_internals.h
index 61a58b6ccfb26efefd6d3b61a1c8741f9550ae8d..29d1ecc2b209c9c3c2e956263ba2d57fb688b34c 100644
--- a/src/node_internals.h
+++ b/src/node_internals.h
@@ -124,6 +124,9 @@ v8::MaybeLocal<v8::Object> InitializePrivateSymbols(
class NodeArrayBufferAllocator : public ArrayBufferAllocator {
public:
+ NodeArrayBufferAllocator();
+ ~NodeArrayBufferAllocator() override;
+ inline uint32_t* zero_fill_field() { return zero_fill_field_; }
void* Allocate(size_t size) override; // Defined in src/node.cc
void* AllocateUninitialized(size_t size) override;
void Free(void* data, size_t size) override;
@@ -140,6 +143,7 @@ class NodeArrayBufferAllocator : public ArrayBufferAllocator {
}
private:
+ uint32_t* zero_fill_field_ = nullptr; // Boolean but exposed as uint32 to JS land.
std::atomic<size_t> total_mem_usage_ {0};
// Delegate to V8's allocator for compatibility with the V8 memory cage.
diff --git a/src/node_serdes.cc b/src/node_serdes.cc
index 00fcd4b6afccce47ff21c4447d9cd60f25c11835..5f96ee2051e5339456185efddb149c4d43093f31 100644
--- a/src/node_serdes.cc
+++ b/src/node_serdes.cc
@@ -29,6 +29,26 @@ using v8::ValueSerializer;
namespace serdes {
+v8::ArrayBuffer::Allocator* GetAllocator() {
+ static v8::ArrayBuffer::Allocator* allocator = v8::ArrayBuffer::Allocator::NewDefaultAllocator();
+ return allocator;
+}
+
+void* Reallocate(void* data, size_t old_length,
+ size_t new_length) {
+ if (old_length == new_length) return data;
+ uint8_t* new_data =
+ reinterpret_cast<uint8_t*>(GetAllocator()->AllocateUninitialized(new_length));
+ if (new_data == nullptr) return nullptr;
+ size_t bytes_to_copy = std::min(old_length, new_length);
+ memcpy(new_data, data, bytes_to_copy);
+ if (new_length > bytes_to_copy) {
+ memset(new_data + bytes_to_copy, 0, new_length - bytes_to_copy);
+ }
+ GetAllocator()->Free(data, old_length);
+ return new_data;
+}
+
class SerializerContext : public BaseObject,
public ValueSerializer::Delegate {
public:
@@ -37,10 +57,15 @@ class SerializerContext : public BaseObject,
~SerializerContext() override = default;
+ // v8::ValueSerializer::Delegate
void ThrowDataCloneError(Local<String> message) override;
Maybe<bool> WriteHostObject(Isolate* isolate, Local<Object> object) override;
Maybe<uint32_t> GetSharedArrayBufferId(
Isolate* isolate, Local<SharedArrayBuffer> shared_array_buffer) override;
+ void* ReallocateBufferMemory(void* old_buffer,
+ size_t old_length,
+ size_t* new_length) override;
+ void FreeBufferMemory(void* buffer) override;
static void SetTreatArrayBufferViewsAsHostObjects(
const FunctionCallbackInfo<Value>& args);
@@ -61,6 +86,7 @@ class SerializerContext : public BaseObject,
private:
ValueSerializer serializer_;
+ size_t last_length_ = 0;
};
class DeserializerContext : public BaseObject,
@@ -145,6 +171,24 @@ Maybe<uint32_t> SerializerContext::GetSharedArrayBufferId(
return id->Uint32Value(env()->context());
}
+void* SerializerContext::ReallocateBufferMemory(void* old_buffer,
+ size_t requested_size,
+ size_t* new_length) {
+ *new_length = std::max(static_cast<size_t>(4096), requested_size);
+ if (old_buffer) {
+ void* ret = Reallocate(old_buffer, last_length_, *new_length);
+ last_length_ = *new_length;
+ return ret;
+ } else {
+ last_length_ = *new_length;
+ return GetAllocator()->Allocate(*new_length);
+ }
+}
+
+void SerializerContext::FreeBufferMemory(void* buffer) {
+ GetAllocator()->Free(buffer, last_length_);
+}
+
Maybe<bool> SerializerContext::WriteHostObject(Isolate* isolate,
Local<Object> input) {
Local<Value> args[1] = { input };
@@ -211,10 +255,17 @@ void SerializerContext::ReleaseBuffer(const FunctionCallbackInfo<Value>& args) {
// Note: Both ValueSerializer and this Buffer::New() variant use malloc()
// as the underlying allocator.
std::pair<uint8_t*, size_t> ret = ctx->serializer_.Release();
- Local<Object> buf;
- if (Buffer::New(ctx->env(), reinterpret_cast<char*>(ret.first), ret.second)
- .ToLocal(&buf)) {
- args.GetReturnValue().Set(buf);
+ std::unique_ptr<v8::BackingStore> bs =
+ v8::ArrayBuffer::NewBackingStore(reinterpret_cast<char*>(ret.first), ret.second,
+ [](void* data, size_t length, void* deleter_data) {
+ if (data) GetAllocator()->Free(reinterpret_cast<char*>(data), length);
+ }, nullptr);
+ Local<ArrayBuffer> ab = v8::ArrayBuffer::New(ctx->env()->isolate(), std::move(bs));
+
+ auto buf = Buffer::New(ctx->env(), ab, 0, ret.second);
+
+ if (!buf.IsEmpty()) {
+ args.GetReturnValue().Set(buf.ToLocalChecked());
}
}
diff --git a/src/node_trace_events.cc b/src/node_trace_events.cc
index ef659f1c39f7ee958879bf395377bc99911fc346..225b1465b7c97d972a38968faf6d685017a80bf0 100644
--- a/src/node_trace_events.cc
+++ b/src/node_trace_events.cc
@@ -129,12 +129,28 @@ static void GetCategoryEnabledBuffer(const FunctionCallbackInfo<Value>& args) {
const uint8_t* enabled_pointer =
TRACE_EVENT_API_GET_CATEGORY_GROUP_ENABLED(category_name.out());
uint8_t* enabled_pointer_cast = const_cast<uint8_t*>(enabled_pointer);
+ uint8_t size = sizeof(*enabled_pointer_cast);
+#if defined(V8_ENABLE_SANDBOX)
+ std::unique_ptr<ArrayBuffer::Allocator> allocator(ArrayBuffer::Allocator::NewDefaultAllocator());
+ void* v8_data = allocator->Allocate(size);
+ CHECK(v8_data);
+ memcpy(v8_data, enabled_pointer_cast, size);
+ std::unique_ptr<BackingStore> bs = ArrayBuffer::NewBackingStore(
+ v8_data,
+ size,
+ [](void* data, size_t length, void*) {
+ std::unique_ptr<ArrayBuffer::Allocator> allocator(ArrayBuffer::Allocator::NewDefaultAllocator());
+ allocator->Free(data, length);
+ }, nullptr);
+#else
std::unique_ptr<BackingStore> bs = ArrayBuffer::NewBackingStore(
enabled_pointer_cast,
- sizeof(*enabled_pointer_cast),
+ size,
[](void*, size_t, void*) {},
nullptr);
+#endif
+
auto ab = ArrayBuffer::New(isolate, std::move(bs));
v8::Local<Uint8Array> u8 = v8::Uint8Array::New(ab, 0, 1);