v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
sandbox.h
Go to the documentation of this file.
1// Copyright 2021 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_SANDBOX_SANDBOX_H_
6#define V8_SANDBOX_SANDBOX_H_
7
10#include "include/v8config.h"
11#include "src/base/bounds.h"
12#include "src/common/globals.h"
13
14#if V8_ENABLE_WEBASSEMBLY
16#endif // V8_ENABLE_WEBASSEMBLY
17
18#include "testing/gtest/include/gtest/gtest_prod.h" // nogncheck
19
20namespace v8 {
21namespace internal {
22
23#ifdef V8_ENABLE_SANDBOX
24
48class V8_EXPORT_PRIVATE Sandbox {
49 public:
50 // +- ~~~ -+---------------------------------------- ~~~ -+- ~~~ -+
51 // | 32 GB | (Ideally) 1 TB | 32 GB |
52 // | | | |
53 // | Guard | 4 GB : ArrayBuffer backing stores, | Guard |
54 // | Region | V8 Heap : WASM memory buffers, and | Region |
55 // | (front) | Region : any other sandboxed objects. | (back) |
56 // +- ~~~ -+----------------+----------------------- ~~~ -+- ~~~ -+
57 // ^ ^
58 // base end
59 // < - - - - - - - - - - - size - - - - - - - - - - >
60 // < - - - - - - - - - - - - - reservation_size - - - - - - - - - - - - >
61
62 Sandbox() = default;
63
64 Sandbox(const Sandbox&) = delete;
65 Sandbox& operator=(Sandbox&) = delete;
66
67 /*
68 * Currently, if not enough virtual memory can be reserved for the sandbox,
69 * we will fall back to a partially-reserved sandbox. This constant can be
70 * used to determine if this fall-back is enabled.
71 * */
72 static constexpr bool kFallbackToPartiallyReservedSandboxAllowed = true;
73
86 void Initialize(v8::VirtualAddressSpace* vas);
87
93 void TearDown();
94
98 bool is_initialized() const { return initialized_; }
99
110 bool is_partially_reserved() const { return reservation_size_ < size_; }
111
120 bool smi_address_range_is_inaccessible() const {
121 return first_four_gb_of_address_space_are_reserved_;
122 }
123
131 Address base() const { return base_; }
132
138 Address end() const { return end_; }
139
143 size_t size() const { return size_; }
144
152 size_t reservation_size() const { return reservation_size_; }
153
159 v8::VirtualAddressSpace* address_space() const {
160 return address_space_.get();
161 }
162
166 v8::PageAllocator* page_allocator() const {
167 return sandbox_page_allocator_.get();
168 }
169
173 bool Contains(Address addr) const {
174 return base::IsInHalfOpenRange(addr, base_, base_ + size_);
175 }
176
180 bool Contains(void* ptr) const {
181 return Contains(reinterpret_cast<Address>(ptr));
182 }
183
197 bool ReservationContains(Address addr) const {
198 return base::IsInHalfOpenRange(addr, reservation_base_,
199 reservation_base_ + reservation_size_);
200 }
201
202 class SandboxedPointerConstants final {
203 public:
204 Address empty_backing_store_buffer() const {
205 return empty_backing_store_buffer_;
206 }
207 Address empty_backing_store_buffer_address() const {
208 return reinterpret_cast<Address>(&empty_backing_store_buffer_);
209 }
210 void set_empty_backing_store_buffer(Address value) {
211 empty_backing_store_buffer_ = value;
212 }
213
214 void Reset() { empty_backing_store_buffer_ = 0; }
215
216 private:
217 Address empty_backing_store_buffer_ = 0;
218 };
219 const SandboxedPointerConstants& constants() const { return constants_; }
220
221 Address base_address() const { return reinterpret_cast<Address>(&base_); }
222 Address end_address() const { return reinterpret_cast<Address>(&end_); }
223 Address size_address() const { return reinterpret_cast<Address>(&size_); }
224
225 static void InitializeDefaultOncePerProcess(v8::VirtualAddressSpace* vas);
226 static void TearDownDefault();
227
228 // Create a new sandbox allocating a fresh pointer cage.
229 // If new sandboxes cannot be created in this build configuration, abort.
230 //
231 static Sandbox* New(v8::VirtualAddressSpace* vas);
232
233#ifdef V8_COMPRESS_POINTERS_IN_MULTIPLE_CAGES
234#ifdef USING_V8_SHARED_PRIVATE
235 static Sandbox* current() { return current_non_inlined(); }
236 static void set_current(Sandbox* sandbox) {
237 set_current_non_inlined(sandbox);
238 }
239#else // !USING_V8_SHARED_PRIVATE
240 static Sandbox* current() { return current_; }
241 static void set_current(Sandbox* sandbox) { current_ = sandbox; }
242#endif // !USING_V8_SHARED_PRIVATE
243#else // !V8_COMPRESS_POINTERS_IN_MULTIPLE_CAGES
244 static Sandbox* current() { return GetDefault(); }
245#endif // !V8_COMPRESS_POINTERS_IN_MULTIPLE_CAGES
246
247 V8_INLINE static Sandbox* GetDefault() { return default_sandbox_; }
248
249 private:
250 // The SequentialUnmapperTest calls the private Initialize method to create a
251 // sandbox without guard regions, which would consume too much memory.
252 friend class SequentialUnmapperTest;
253
254 // These tests call the private Initialize methods below.
255 FRIEND_TEST(SandboxTest, InitializationWithSize);
256 FRIEND_TEST(SandboxTest, PartiallyReservedSandbox);
257
258 // Default process-wide sandbox.
259 static Sandbox* default_sandbox_;
260
261 // We allow tests to disable the guard regions around the sandbox. This is
262 // useful for example for tests like the SequentialUnmapperTest which track
263 // page allocations and so would incur a large overhead from the guard
264 // regions. The provided virtual address space must be able to allocate
265 // subspaces. The size must be a multiple of the allocation granularity of the
266 // virtual memory space.
267 bool Initialize(v8::VirtualAddressSpace* vas, size_t size,
268 bool use_guard_regions);
269
270 // Used when reserving virtual memory is too expensive. A partially reserved
271 // sandbox does not reserve all of its virtual memory and so doesn't have the
272 // desired security properties as unrelated mappings could end up inside of
273 // it and be corrupted. The size and size_to_reserve parameters must be
274 // multiples of the allocation granularity of the virtual address space.
275 bool InitializeAsPartiallyReservedSandbox(v8::VirtualAddressSpace* vas,
276 size_t size,
277 size_t size_to_reserve);
278
279 // Performs final initialization steps after the sandbox address space has
280 // been initialized. Called from the two Initialize variants above.
281 void FinishInitialization();
282
283 // Initialize the constant objects for this sandbox.
284 void InitializeConstants();
285
286#ifdef V8_COMPRESS_POINTERS_IN_MULTIPLE_CAGES
287 // These non-inlined accessors to current_ field are used in component builds
288 // where cross-component access to thread local variables is not allowed.
289 static Sandbox* current_non_inlined();
290 static void set_current_non_inlined(Sandbox* sandbox);
291#endif
292
295 size_t size_ = 0;
296
297 // Base and size of the virtual memory reservation backing this sandbox.
298 // These can be different from the sandbox base and size due to guard regions
299 // or when a partially-reserved sandbox is used.
300 Address reservation_base_ = kNullAddress;
301 size_t reservation_size_ = 0;
302
303 bool initialized_ = false;
304
305#if V8_ENABLE_WEBASSEMBLY && V8_TRAP_HANDLER_SUPPORTED
306 bool trap_handler_initialized_ = false;
307#endif
308
309 // The virtual address subspace backing the sandbox.
310 std::unique_ptr<v8::VirtualAddressSpace> address_space_;
311
312 // The page allocator instance for this sandbox.
313 std::unique_ptr<v8::PageAllocator> sandbox_page_allocator_;
314
315 // Constant objects inside this sandbox.
316 SandboxedPointerConstants constants_;
317
318 // Besides the address space reservation for the sandbox, we also try to
319 // reserve the first four gigabytes of the virtual address space (with an
320 // inaccessible mapping). This for example mitigates Smi<->HeapObject
321 // confusion bugs in which we treat a Smi value as a pointer and access it.
322 static bool first_four_gb_of_address_space_are_reserved_;
323
324#ifdef V8_COMPRESS_POINTERS_IN_MULTIPLE_CAGES
325 thread_local static Sandbox* current_;
326#endif // V8_COMPRESS_POINTERS_IN_MULTIPLE_CAGES
327};
328
329#endif // V8_ENABLE_SANDBOX
330
331// Helper function that can be used to ensure that certain objects are not
332// located inside the sandbox. Typically used for trusted objects.
333// Will always return false when the sandbox is disabled or partially reserved.
334V8_INLINE bool InsideSandbox(uintptr_t address) {
335#ifdef V8_ENABLE_SANDBOX
336 Sandbox* sandbox = Sandbox::current();
337 // Use ReservationContains (instead of just Contains) to correctly handle the
338 // case of partially-reserved sandboxes.
339 return sandbox->ReservationContains(address);
340#else
341 return false;
342#endif
343}
344
346#ifdef V8_ENABLE_SANDBOX
347 return reinterpret_cast<void*>(
348 Sandbox::current()->constants().empty_backing_store_buffer());
349#else
350 return nullptr;
351#endif
352}
353
354} // namespace internal
355} // namespace v8
356
357#endif // V8_SANDBOX_SANDBOX_H_
const int size_
Definition assembler.cc:132
const v8::base::TimeTicks end_
Definition sweeper.cc:54
int end
LineAndColumn current
uintptr_t Address
Definition memory.h:13
constexpr Address kNullAddress
V8_INLINE void * EmptyBackingStoreBuffer()
Definition sandbox.h:345
V8_INLINE bool InsideSandbox(uintptr_t address)
Definition sandbox.h:334
base::uc32 current_
#define V8_EXPORT_PRIVATE
Definition macros.h:460
#define V8_INLINE
Definition v8config.h:500
std::unique_ptr< ValueMirror > value