From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001 From: Jeremy Rose Date: Tue, 21 Jun 2022 10:04:21 -0700 Subject: support V8 sandboxed pointers This refactors several allocators to allocate within the V8 memory cage, allowing them to be compatible with the V8_SANDBOXED_POINTERS feature. diff --git a/src/api/environment.cc b/src/api/environment.cc index 53f05293bd94e159dfedf48735989e668acdd08e..5c8bc870dcf2e974036cf3bcb60fd288e59045d9 100644 --- a/src/api/environment.cc +++ b/src/api/environment.cc @@ -111,6 +111,14 @@ MaybeLocal PrepareStackTraceCallback(Local context, return result; } +NodeArrayBufferAllocator::NodeArrayBufferAllocator() { + zero_fill_field_ = static_cast(allocator_->Allocate(sizeof(*zero_fill_field_))); +} + +NodeArrayBufferAllocator::~NodeArrayBufferAllocator() { + allocator_->Free(zero_fill_field_, sizeof(*zero_fill_field_)); +} + void* NodeArrayBufferAllocator::Allocate(size_t size) { void* ret; COUNT_GENERIC_USAGE("NodeArrayBufferAllocator.Allocate.ZeroFilled"); @@ -337,6 +345,12 @@ Isolate* NewIsolate(Isolate::CreateParams* params, // but also otherwise just doesn't work, and the only real alternative // is disabling shared-readonly-heap mode altogether. static Isolate::CreateParams first_params = *params; + // Clear allocator pointers to prevent use-after-free during static + // destruction. The static first_params can outlive V8's internal + // allocator systems, causing crashes when its destructor tries to + // free resources after V8 has shut down. + first_params.array_buffer_allocator = nullptr; + first_params.array_buffer_allocator_shared.reset(); params->snapshot_blob = first_params.snapshot_blob; params->external_references = first_params.external_references; } diff --git a/src/crypto/crypto_dh.cc b/src/crypto/crypto_dh.cc index 46a7d1396dc1a175ae99f4e403721f1730fdd320..bbb0abb3b9563074d350578e0f5a8fa211046b17 100644 --- a/src/crypto/crypto_dh.cc +++ b/src/crypto/crypto_dh.cc @@ -61,17 +61,22 @@ MaybeLocal DataPointerToBuffer(Environment* env, DataPointer&& data) { bool secure; }; #ifdef V8_ENABLE_SANDBOX - auto backing = ArrayBuffer::NewBackingStore( - env->isolate(), - data.size(), - BackingStoreInitializationMode::kUninitialized, - BackingStoreOnFailureMode::kReturnNull); - if (!backing) { - THROW_ERR_MEMORY_ALLOCATION_FAILED(env); - return MaybeLocal(); - } + std::unique_ptr backing; if (data.size() > 0) { - memcpy(backing->Data(), data.get(), data.size()); + std::unique_ptr allocator(ArrayBuffer::Allocator::NewDefaultAllocator()); + void* v8_data = allocator->Allocate(data.size()); + CHECK(v8_data); + memcpy(v8_data, data.get(), data.size()); + backing = ArrayBuffer::NewBackingStore( + v8_data, + data.size(), + [](void* data, size_t length, void*) { + std::unique_ptr allocator(ArrayBuffer::Allocator::NewDefaultAllocator()); + allocator->Free(data, length); + }, nullptr); + } else { + NoArrayBufferZeroFillScope no_zero_fill_scope(env->isolate_data()); + backing = v8::ArrayBuffer::NewBackingStore(env->isolate(), data.size()); } #else auto backing = ArrayBuffer::NewBackingStore( diff --git a/src/crypto/crypto_util.cc b/src/crypto/crypto_util.cc index 12b0d804c6f1d4998b85160b0aac8eb7a3b5576b..27bd93769233dc65a064710db4095d9cdc3a8b1a 100644 --- a/src/crypto/crypto_util.cc +++ b/src/crypto/crypto_util.cc @@ -346,24 +346,30 @@ std::unique_ptr ByteSource::ReleaseToBackingStore( // It's ok for allocated_data_ to be nullptr but // only if size_ is zero. CHECK_IMPLIES(size_ > 0, allocated_data_ != nullptr); -#ifdef V8_ENABLE_SANDBOX - // If the v8 sandbox is enabled, then all array buffers must be allocated - // via the isolate. External buffers are not allowed. So, instead of wrapping - // the allocated data we'll copy it instead. - - // TODO(@jasnell): It would be nice to use an abstracted utility to do this - // branch instead of duplicating the V8_ENABLE_SANDBOX check each time. +#if defined(V8_ENABLE_SANDBOX) + // When V8 sandboxed pointers are enabled, we have to copy into the memory + // cage. We still want to ensure we erase the data on free though, so + // provide a custom deleter that calls OPENSSL_cleanse. + if (!size()) + return ArrayBuffer::NewBackingStore(env->isolate(), 0); + std::unique_ptr allocator(ArrayBuffer::Allocator::NewDefaultAllocator()); + void* v8_data = allocator->Allocate(size()); + CHECK(v8_data); + memcpy(v8_data, allocated_data_, size()); + OPENSSL_clear_free(allocated_data_, size()); std::unique_ptr ptr = ArrayBuffer::NewBackingStore( - env->isolate(), + v8_data, size(), - BackingStoreInitializationMode::kUninitialized, - BackingStoreOnFailureMode::kReturnNull); - if (!ptr) { - THROW_ERR_MEMORY_ALLOCATION_FAILED(env); - return nullptr; - } - memcpy(ptr->Data(), allocated_data_, size()); - OPENSSL_clear_free(allocated_data_, size_); + [](void* data, size_t length, void*) { + OPENSSL_cleanse(data, length); + std::unique_ptr allocator(ArrayBuffer::Allocator::NewDefaultAllocator()); + allocator->Free(data, length); + }, nullptr); + CHECK(ptr); + allocated_data_ = nullptr; + data_ = nullptr; + size_ = 0; + return ptr; #else std::unique_ptr ptr = ArrayBuffer::NewBackingStore( allocated_data_, @@ -662,23 +668,16 @@ namespace { // using OPENSSL_malloc. However, if the secure heap is // initialized, SecureBuffer will automatically use it. void SecureBuffer(const FunctionCallbackInfo& args) { - Environment* env = Environment::GetCurrent(args); + CHECK(args[0]->IsUint32()); #ifdef V8_ENABLE_SANDBOX - // The v8 sandbox is enabled, so we cannot use the secure heap because - // the sandbox requires that all array buffers be allocated via the isolate. - // That is fundamentally incompatible with the secure heap which allocates - // in openssl's secure heap area. Instead we'll just throw an error here. - // - // That said, we really shouldn't get here in the first place since the - // option to enable the secure heap is only available when the sandbox - // is disabled. - UNREACHABLE(); + uint32_t len = args[0].As()->Value(); + Local buffer = ArrayBuffer::New(args.GetIsolate(), len); + args.GetReturnValue().Set(Uint8Array::New(buffer, 0, len)); #else - CHECK(args[0]->IsUint32()); + Environment* env = Environment::GetCurrent(args); uint32_t len = args[0].As()->Value(); auto data = DataPointer::SecureAlloc(len); - CHECK(data.isSecure()); if (!data) { return THROW_ERR_OPERATION_FAILED(env, "Allocation failed"); } diff --git a/src/crypto/crypto_x509.cc b/src/crypto/crypto_x509.cc index b30297eac08ad9587642b723f91d7e3b954294d4..4c5427596d1c90d3a413cdd9ff4f1151e657073d 100644 --- a/src/crypto/crypto_x509.cc +++ b/src/crypto/crypto_x509.cc @@ -135,19 +135,17 @@ MaybeLocal ToBuffer(Environment* env, BIOPointer* bio) { return {}; BUF_MEM* mem = *bio; #ifdef V8_ENABLE_SANDBOX - // If the v8 sandbox is enabled, then all array buffers must be allocated - // via the isolate. External buffers are not allowed. So, instead of wrapping - // the BIOPointer we'll copy it instead. - auto backing = ArrayBuffer::NewBackingStore( - env->isolate(), + std::unique_ptr allocator(ArrayBuffer::Allocator::NewDefaultAllocator()); + void* v8_data = allocator->Allocate(mem->length); + CHECK(v8_data); + memcpy(v8_data, mem->data, mem->length); + std::unique_ptr backing = ArrayBuffer::NewBackingStore( + v8_data, mem->length, - BackingStoreInitializationMode::kUninitialized, - BackingStoreOnFailureMode::kReturnNull); - if (!backing) { - THROW_ERR_MEMORY_ALLOCATION_FAILED(env); - return MaybeLocal(); - } - memcpy(backing->Data(), mem->data, mem->length); + [](void* data, size_t length, void*) { + std::unique_ptr allocator(ArrayBuffer::Allocator::NewDefaultAllocator()); + allocator->Free(data, length); + }, nullptr); #else auto backing = ArrayBuffer::NewBackingStore( mem->data, diff --git a/src/env-inl.h b/src/env-inl.h index 74bbb9fb83246a90bc425e259150f0868020ac9e..777335321fc9037d91d88fb5852bbf5b05f50d0a 100644 --- a/src/env-inl.h +++ b/src/env-inl.h @@ -44,6 +44,16 @@ namespace node { +NoArrayBufferZeroFillScope::NoArrayBufferZeroFillScope( + IsolateData* isolate_data) + : node_allocator_(isolate_data->node_allocator()) { + if (node_allocator_ != nullptr) node_allocator_->zero_fill_field()[0] = 0; +} + +NoArrayBufferZeroFillScope::~NoArrayBufferZeroFillScope() { + if (node_allocator_ != nullptr) node_allocator_->zero_fill_field()[0] = 1; +} + inline v8::Isolate* IsolateData::isolate() const { return isolate_; } diff --git a/src/env.h b/src/env.h index 754ddf7b331465c56081db05d6fd2a45fe50596a..db1ed241f730791ba3e3f93349cb5ff3437c738d 100644 --- a/src/env.h +++ b/src/env.h @@ -111,6 +111,19 @@ class ModuleWrap; class Environment; class Realm; +// Disables zero-filling for ArrayBuffer allocations in this scope. This is +// similar to how we implement Buffer.allocUnsafe() in JS land. +class NoArrayBufferZeroFillScope { + public: + inline explicit NoArrayBufferZeroFillScope(IsolateData* isolate_data); + inline ~NoArrayBufferZeroFillScope(); + + private: + NodeArrayBufferAllocator* node_allocator_; + + friend class Environment; +}; + struct IsolateDataSerializeInfo { std::vector primitive_values; std::vector template_values; diff --git a/src/node_buffer.cc b/src/node_buffer.cc index ddee7b7e40c3ee4054b2b15b75154607aa6431ed..9b74343d01913a27bde608d73d890ae127143960 100644 --- a/src/node_buffer.cc +++ b/src/node_buffer.cc @@ -81,6 +81,7 @@ using v8::SharedArrayBuffer; using v8::String; using v8::Uint32; using v8::Uint8Array; +using v8::Uint32Array; using v8::Value; namespace { @@ -1243,6 +1244,45 @@ void SetBufferPrototype(const FunctionCallbackInfo& args) { realm->set_buffer_prototype_object(proto); } +void GetZeroFillToggle(const FunctionCallbackInfo& args) { + Environment* env = Environment::GetCurrent(args); + NodeArrayBufferAllocator* allocator = env->isolate_data()->node_allocator(); + Local ab; + // It can be a nullptr when running inside an isolate where we + // do not own the ArrayBuffer allocator. + if (allocator == nullptr || env->isolate_data()->is_building_snapshot()) { + // Create a dummy Uint32Array - the JS land can only toggle the C++ land + // setting when the allocator uses our toggle. With this the toggle in JS + // land results in no-ops. + // When building a snapshot, just use a dummy toggle as well to avoid + // introducing the dynamic external reference. We'll re-initialize the + // toggle with a real one connected to the C++ allocator after snapshot + // deserialization. + + ab = ArrayBuffer::New(env->isolate(), sizeof(uint32_t)); + } else { + // TODO(joyeecheung): save ab->GetBackingStore()->Data() in the Node.js + // array buffer allocator and include it into the C++ toggle while the + // Environment is still alive. + uint32_t* zero_fill_field = allocator->zero_fill_field(); + std::unique_ptr backing = + ArrayBuffer::NewBackingStore(zero_fill_field, + sizeof(*zero_fill_field), + [](void*, size_t, void*) {}, + nullptr); + ab = ArrayBuffer::New(env->isolate(), std::move(backing)); + } + + if (ab->SetPrivate(env->context(), + env->untransferable_object_private_symbol(), + True(env->isolate())) + .IsNothing()) { + return; + } + + args.GetReturnValue().Set(Uint32Array::New(ab, 0, 1)); +} + static void Btoa(const FunctionCallbackInfo& args) { CHECK_EQ(args.Length(), 1); Environment* env = Environment::GetCurrent(args); @@ -1420,7 +1460,7 @@ inline size_t CheckNumberToSize(Local number) { CHECK(value >= 0 && value < maxSize); size_t size = static_cast(value); #ifdef V8_ENABLE_SANDBOX - CHECK_LE(size, kMaxSafeBufferSizeForSandbox); + CHECK_LE(size, v8::internal::kMaxSafeBufferSizeForSandbox); #endif return size; } @@ -1638,6 +1678,7 @@ void Initialize(Local target, "utf8WriteStatic", SlowWriteString, &fast_write_string_utf8); + SetMethod(context, target, "getZeroFillToggle", GetZeroFillToggle); } } // anonymous namespace @@ -1686,6 +1727,7 @@ void RegisterExternalReferences(ExternalReferenceRegistry* registry) { registry->Register(StringWrite); registry->Register(StringWrite); registry->Register(StringWrite); + registry->Register(GetZeroFillToggle); registry->Register(CopyArrayBuffer); registry->Register(CreateUnsafeArrayBuffer); diff --git a/src/node_i18n.cc b/src/node_i18n.cc index 3c4f419aa29470b3280174b58680b9421b0340b5..3b24ad2a2316f89d98b067e2c13988f87a9a00d2 100644 --- a/src/node_i18n.cc +++ b/src/node_i18n.cc @@ -105,7 +105,7 @@ namespace { template MaybeLocal ToBufferEndian(Environment* env, MaybeStackBuffer* buf) { Local ret; - if (!Buffer::New(env, buf).ToLocal(&ret)) { + if (!Buffer::Copy(env, reinterpret_cast(buf->out()), buf->length() * sizeof(T)).ToLocal(&ret)) { return {}; } @@ -182,7 +182,7 @@ MaybeLocal TranscodeLatin1ToUcs2(Environment* env, return {}; } - return Buffer::New(env, &destbuf); + return Buffer::Copy(env, reinterpret_cast(destbuf.out()), destbuf.length() * sizeof(UChar)); } MaybeLocal TranscodeFromUcs2(Environment* env, @@ -227,7 +227,7 @@ MaybeLocal TranscodeUcs2FromUtf8(Environment* env, return {}; } - return Buffer::New(env, &destbuf); + return Buffer::Copy(env, reinterpret_cast(destbuf.out()), destbuf.length() * sizeof(UChar)); } MaybeLocal TranscodeUtf8FromUcs2(Environment* env, @@ -251,7 +251,7 @@ MaybeLocal TranscodeUtf8FromUcs2(Environment* env, return {}; } - return Buffer::New(env, &destbuf); + return Buffer::Copy(env, reinterpret_cast(destbuf.out()), destbuf.length() * sizeof(char)); } constexpr const char* EncodingName(const enum encoding encoding) { diff --git a/src/node_internals.h b/src/node_internals.h index 61a58b6ccfb26efefd6d3b61a1c8741f9550ae8d..29d1ecc2b209c9c3c2e956263ba2d57fb688b34c 100644 --- a/src/node_internals.h +++ b/src/node_internals.h @@ -124,6 +124,9 @@ v8::MaybeLocal InitializePrivateSymbols( class NodeArrayBufferAllocator : public ArrayBufferAllocator { public: + NodeArrayBufferAllocator(); + ~NodeArrayBufferAllocator() override; + inline uint32_t* zero_fill_field() { return zero_fill_field_; } void* Allocate(size_t size) override; // Defined in src/node.cc void* AllocateUninitialized(size_t size) override; void Free(void* data, size_t size) override; @@ -140,6 +143,7 @@ class NodeArrayBufferAllocator : public ArrayBufferAllocator { } private: + uint32_t* zero_fill_field_ = nullptr; // Boolean but exposed as uint32 to JS land. std::atomic total_mem_usage_ {0}; // Delegate to V8's allocator for compatibility with the V8 memory cage. diff --git a/src/node_serdes.cc b/src/node_serdes.cc index 00fcd4b6afccce47ff21c4447d9cd60f25c11835..5f96ee2051e5339456185efddb149c4d43093f31 100644 --- a/src/node_serdes.cc +++ b/src/node_serdes.cc @@ -29,6 +29,26 @@ using v8::ValueSerializer; namespace serdes { +v8::ArrayBuffer::Allocator* GetAllocator() { + static v8::ArrayBuffer::Allocator* allocator = v8::ArrayBuffer::Allocator::NewDefaultAllocator(); + return allocator; +} + +void* Reallocate(void* data, size_t old_length, + size_t new_length) { + if (old_length == new_length) return data; + uint8_t* new_data = + reinterpret_cast(GetAllocator()->AllocateUninitialized(new_length)); + if (new_data == nullptr) return nullptr; + size_t bytes_to_copy = std::min(old_length, new_length); + memcpy(new_data, data, bytes_to_copy); + if (new_length > bytes_to_copy) { + memset(new_data + bytes_to_copy, 0, new_length - bytes_to_copy); + } + GetAllocator()->Free(data, old_length); + return new_data; +} + class SerializerContext : public BaseObject, public ValueSerializer::Delegate { public: @@ -37,10 +57,15 @@ class SerializerContext : public BaseObject, ~SerializerContext() override = default; + // v8::ValueSerializer::Delegate void ThrowDataCloneError(Local message) override; Maybe WriteHostObject(Isolate* isolate, Local object) override; Maybe GetSharedArrayBufferId( Isolate* isolate, Local shared_array_buffer) override; + void* ReallocateBufferMemory(void* old_buffer, + size_t old_length, + size_t* new_length) override; + void FreeBufferMemory(void* buffer) override; static void SetTreatArrayBufferViewsAsHostObjects( const FunctionCallbackInfo& args); @@ -61,6 +86,7 @@ class SerializerContext : public BaseObject, private: ValueSerializer serializer_; + size_t last_length_ = 0; }; class DeserializerContext : public BaseObject, @@ -145,6 +171,24 @@ Maybe SerializerContext::GetSharedArrayBufferId( return id->Uint32Value(env()->context()); } +void* SerializerContext::ReallocateBufferMemory(void* old_buffer, + size_t requested_size, + size_t* new_length) { + *new_length = std::max(static_cast(4096), requested_size); + if (old_buffer) { + void* ret = Reallocate(old_buffer, last_length_, *new_length); + last_length_ = *new_length; + return ret; + } else { + last_length_ = *new_length; + return GetAllocator()->Allocate(*new_length); + } +} + +void SerializerContext::FreeBufferMemory(void* buffer) { + GetAllocator()->Free(buffer, last_length_); +} + Maybe SerializerContext::WriteHostObject(Isolate* isolate, Local input) { Local args[1] = { input }; @@ -211,10 +255,17 @@ void SerializerContext::ReleaseBuffer(const FunctionCallbackInfo& args) { // Note: Both ValueSerializer and this Buffer::New() variant use malloc() // as the underlying allocator. std::pair ret = ctx->serializer_.Release(); - Local buf; - if (Buffer::New(ctx->env(), reinterpret_cast(ret.first), ret.second) - .ToLocal(&buf)) { - args.GetReturnValue().Set(buf); + std::unique_ptr bs = + v8::ArrayBuffer::NewBackingStore(reinterpret_cast(ret.first), ret.second, + [](void* data, size_t length, void* deleter_data) { + if (data) GetAllocator()->Free(reinterpret_cast(data), length); + }, nullptr); + Local ab = v8::ArrayBuffer::New(ctx->env()->isolate(), std::move(bs)); + + auto buf = Buffer::New(ctx->env(), ab, 0, ret.second); + + if (!buf.IsEmpty()) { + args.GetReturnValue().Set(buf.ToLocalChecked()); } } diff --git a/src/node_trace_events.cc b/src/node_trace_events.cc index ef659f1c39f7ee958879bf395377bc99911fc346..225b1465b7c97d972a38968faf6d685017a80bf0 100644 --- a/src/node_trace_events.cc +++ b/src/node_trace_events.cc @@ -129,12 +129,28 @@ static void GetCategoryEnabledBuffer(const FunctionCallbackInfo& args) { const uint8_t* enabled_pointer = TRACE_EVENT_API_GET_CATEGORY_GROUP_ENABLED(category_name.out()); uint8_t* enabled_pointer_cast = const_cast(enabled_pointer); + uint8_t size = sizeof(*enabled_pointer_cast); +#if defined(V8_ENABLE_SANDBOX) + std::unique_ptr allocator(ArrayBuffer::Allocator::NewDefaultAllocator()); + void* v8_data = allocator->Allocate(size); + CHECK(v8_data); + memcpy(v8_data, enabled_pointer_cast, size); + std::unique_ptr bs = ArrayBuffer::NewBackingStore( + v8_data, + size, + [](void* data, size_t length, void*) { + std::unique_ptr allocator(ArrayBuffer::Allocator::NewDefaultAllocator()); + allocator->Free(data, length); + }, nullptr); +#else std::unique_ptr bs = ArrayBuffer::NewBackingStore( enabled_pointer_cast, - sizeof(*enabled_pointer_cast), + size, [](void*, size_t, void*) {}, nullptr); +#endif + auto ab = ArrayBuffer::New(isolate, std::move(bs)); v8::Local u8 = v8::Uint8Array::New(ab, 0, 1); diff --git a/test/parallel/test-buffer-concat.js b/test/parallel/test-buffer-concat.js index 9f0eadd2f10163c3c30657c84eb0ba55db17364d..7c1a6f71ca24dd2e54f9f5987aae2014b44bfba6 100644 --- a/test/parallel/test-buffer-concat.js +++ b/test/parallel/test-buffer-concat.js @@ -84,8 +84,7 @@ assert.throws(() => { Buffer.concat([Buffer.from('hello')], -2); }, { code: 'ERR_OUT_OF_RANGE', - message: 'The value of "length" is out of range. It must be >= 0 && <= 9007199254740991. ' + - 'Received -2' + message: /The value of "length" is out of range\. It must be >= 0 && <= (?:34359738367|9007199254740991)\. Received -2/ }); // eslint-disable-next-line node-core/crypto-check