v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
wasm-module-builder.cc
Go to the documentation of this file.
1// Copyright 2015 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
6
13
14namespace v8 {
15namespace internal {
16namespace wasm {
17
18namespace {
19
20// Emit a section code and the size as a padded varint that can be patched
21// later.
22size_t EmitSection(SectionCode code, ZoneBuffer* buffer) {
23 // Emit the section code.
24 buffer->write_u8(code);
25
26 // Emit a placeholder for the length.
27 return buffer->reserve_u32v();
28}
29
30// Patch the size of a section after it's finished.
31void FixupSection(ZoneBuffer* buffer, size_t start) {
32 buffer->patch_u32v(start, static_cast<uint32_t>(buffer->offset() - start -
34}
35
36WasmOpcode FromInitExprOperator(WasmInitExpr::Operator op) {
37 switch (op) {
39 return kExprGlobalGet;
41 return kExprI32Const;
43 return kExprI64Const;
45 return kExprF32Const;
47 return kExprF64Const;
49 return kExprS128Const;
51 return kExprI32Add;
53 return kExprI32Sub;
55 return kExprI32Mul;
57 return kExprI64Add;
59 return kExprI64Sub;
61 return kExprI64Mul;
63 return kExprRefNull;
65 return kExprRefFunc;
67 return kExprStructNew;
69 return kExprStructNewDefault;
71 return kExprArrayNew;
73 return kExprArrayNewDefault;
75 return kExprArrayNewFixed;
77 return kExprRefI31;
79 return kExprStringConst;
81 return kExprAnyConvertExtern;
83 return kExprExternConvertAny;
84 }
85}
86
87void WriteInitializerExpressionWithoutEnd(ZoneBuffer* buffer,
88 const WasmInitExpr& init) {
89 switch (init.kind()) {
91 buffer->write_u8(kExprI32Const);
92 buffer->write_i32v(init.immediate().i32_const);
93 break;
95 buffer->write_u8(kExprI64Const);
96 buffer->write_i64v(init.immediate().i64_const);
97 break;
99 buffer->write_u8(kExprF32Const);
100 buffer->write_f32(init.immediate().f32_const);
101 break;
103 buffer->write_u8(kExprF64Const);
104 buffer->write_f64(init.immediate().f64_const);
105 break;
107 buffer->write_u8(kSimdPrefix);
108 buffer->write_u8(kExprS128Const & 0xFF);
109 buffer->write(init.immediate().s128_const.data(), kSimd128Size);
110 break;
117 WriteInitializerExpressionWithoutEnd(buffer, (*init.operands())[0]);
118 WriteInitializerExpressionWithoutEnd(buffer, (*init.operands())[1]);
119 buffer->write_u8(FromInitExprOperator(init.kind()));
120 break;
122 buffer->write_u8(kExprGlobalGet);
123 buffer->write_u32v(init.immediate().index);
124 break;
126 buffer->write_u8(kExprRefNull);
127 if (init.heap_type().encoding_needs_exact()) buffer->write_u8(kExactCode);
128 buffer->write_i32v(init.heap_type().code());
129 break;
131 buffer->write_u8(kExprRefFunc);
132 buffer->write_u32v(init.immediate().index);
133 break;
138 if (init.operands() != nullptr) {
139 for (const WasmInitExpr& operand : *init.operands()) {
140 WriteInitializerExpressionWithoutEnd(buffer, operand);
141 }
142 }
143 WasmOpcode opcode = FromInitExprOperator(init.kind());
144 DCHECK_EQ(opcode >> 8, kGCPrefix);
145 DCHECK_EQ(opcode & 0x80, 0);
146 buffer->write_u8(kGCPrefix);
147 buffer->write_u8(static_cast<uint8_t>(opcode));
148 buffer->write_u32v(init.immediate().index);
149 break;
150 }
152 static_assert((kExprArrayNewFixed >> 8) == kGCPrefix);
153 static_assert((kExprArrayNewFixed & 0x80) == 0);
154 for (const WasmInitExpr& operand : *init.operands()) {
155 WriteInitializerExpressionWithoutEnd(buffer, operand);
156 }
157 buffer->write_u8(kGCPrefix);
158 buffer->write_u8(static_cast<uint8_t>(kExprArrayNewFixed));
159 buffer->write_u32v(init.immediate().index);
160 buffer->write_u32v(static_cast<uint32_t>(init.operands()->size()));
161 break;
162 }
166 WriteInitializerExpressionWithoutEnd(buffer, (*init.operands())[0]);
167 WasmOpcode opcode = FromInitExprOperator(init.kind());
168 DCHECK_EQ(opcode >> 8, kGCPrefix);
169 DCHECK_EQ(opcode & 0x80, 0);
170 buffer->write_u8(kGCPrefix);
171 buffer->write_u8(opcode);
172 break;
173 }
175 buffer->write_u8(kGCPrefix);
176 buffer->write_u32v(kExprStringConst & 0xFF);
177 buffer->write_u32v(init.immediate().index);
178 break;
179 }
180}
181
182void WriteInitializerExpression(ZoneBuffer* buffer, const WasmInitExpr& init) {
183 WriteInitializerExpressionWithoutEnd(buffer, init);
184 buffer->write_u8(kExprEnd);
185}
186} // namespace
187
189 : builder_(builder),
190 locals_(builder->zone()),
191 signature_index_{0},
192 func_index_(static_cast<uint32_t>(builder->functions_.size())),
193 body_(builder->zone(), 256),
194 i32_temps_(builder->zone()),
195 i64_temps_(builder->zone()),
196 f32_temps_(builder->zone()),
197 f64_temps_(builder->zone()),
198 direct_calls_(builder->zone()),
199 asm_offsets_(builder->zone(), 8) {}
200
202
204
206
208
214
221
224 return locals_.AddLocals(1, type);
225}
226
227void WasmFunctionBuilder::EmitGetLocal(uint32_t local_index) {
228 EmitWithU32V(kExprLocalGet, local_index);
229}
230
231void WasmFunctionBuilder::EmitSetLocal(uint32_t local_index) {
232 EmitWithU32V(kExprLocalSet, local_index);
233}
234
235void WasmFunctionBuilder::EmitTeeLocal(uint32_t local_index) {
236 EmitWithU32V(kExprLocalTee, local_index);
237}
238
239void WasmFunctionBuilder::EmitCode(const uint8_t* code, uint32_t code_size) {
240 body_.write(code, code_size);
241}
242
243void WasmFunctionBuilder::EmitCode(std::initializer_list<const uint8_t> code) {
244 body_.write(code.begin(), code.size());
245}
246
248 DCHECK_LE(opcode, 0xFF);
249 body_.write_u8(opcode);
250}
251
253 DCHECK_GT(opcode, 0xFF);
254 if (opcode > 0xFFFF) {
255 DCHECK_EQ(kSimdPrefix, opcode >> 12);
256 body_.write_u8(kSimdPrefix);
257 body_.write_u32v(opcode & 0xFFF);
258 } else {
259 body_.write_u8(opcode >> 8); // Prefix.
260 body_.write_u32v(opcode & 0xff); // LEB encoded tail.
261 }
262}
263
265 const uint8_t immediate) {
266 body_.write_u8(opcode);
267 body_.write_u8(immediate);
268}
269
270void WasmFunctionBuilder::EmitWithU8U8(WasmOpcode opcode, const uint8_t imm1,
271 const uint8_t imm2) {
272 body_.write_u8(opcode);
273 body_.write_u8(imm1);
274 body_.write_u8(imm2);
275}
276
277void WasmFunctionBuilder::EmitWithI32V(WasmOpcode opcode, int32_t immediate) {
278 body_.write_u8(opcode);
279 body_.write_i32v(immediate);
280}
281
282void WasmFunctionBuilder::EmitWithU32V(WasmOpcode opcode, uint32_t immediate) {
283 body_.write_u8(opcode);
284 body_.write_u32v(immediate);
285}
286
287namespace {
288void WriteHeapType(ZoneBuffer* buffer, HeapType type) {
289 if (type.encoding_needs_exact()) buffer->write_u8(kExactCode);
290 buffer->write_i32v(type.code());
291}
292void WriteValueType(ZoneBuffer* buffer, const ValueType& type) {
293 buffer->write_u8(type.value_type_code());
294 if (type.encoding_needs_shared()) {
295 buffer->write_u8(kSharedFlagCode);
296 }
297 if (type.encoding_needs_heap_type()) {
298 WriteHeapType(buffer, type.heap_type());
299 }
300}
301} // namespace
302
304 WriteHeapType(&body_, type);
305}
306
308 WriteValueType(&body_, type);
309}
310
312 EmitWithI32V(kExprI32Const, value);
313}
314
316 body_.write_u8(kExprI64Const);
317 body_.write_i64v(value);
318}
319
321 body_.write_u8(kExprF32Const);
322 body_.write_f32(value);
323}
324
326 body_.write_u8(kExprF64Const);
327 body_.write_f64(value);
328}
329
331 DirectCallIndex call;
332 call.offset = body_.size();
333 call.direct_index = index;
334 direct_calls_.push_back(call);
335 uint8_t placeholder_bytes[kMaxVarInt32Size] = {0};
336 EmitCode(placeholder_bytes, arraysize(placeholder_bytes));
337}
338
340 const WasmInitExpr& init_expr) {
341 WriteInitializerExpression(&body_, init_expr);
342}
343
347
349 size_t to_number_position) {
350 // We only want to emit one mapping per byte offset.
352
354 uint32_t byte_offset = static_cast<uint32_t>(body_.size());
356 last_asm_byte_offset_ = byte_offset;
357
358 DCHECK_GE(std::numeric_limits<uint32_t>::max(), call_position);
359 uint32_t call_position_u32 = static_cast<uint32_t>(call_position);
361
362 DCHECK_GE(std::numeric_limits<uint32_t>::max(), to_number_position);
363 uint32_t to_number_position_u32 = static_cast<uint32_t>(to_number_position);
364 asm_offsets_.write_i32v(to_number_position_u32 - call_position_u32);
365 last_asm_source_position_ = to_number_position_u32;
366}
367
369 size_t function_position) {
371 DCHECK_GE(std::numeric_limits<uint32_t>::max(), function_position);
372 uint32_t function_position_u32 = static_cast<uint32_t>(function_position);
373 // Must be called before emitting any asm.js source position.
375 asm_func_start_source_position_ = function_position_u32;
376 last_asm_source_position_ = function_position_u32;
377}
378
382 uint8_t hint_byte = static_cast<uint8_t>(strategy) |
383 static_cast<uint8_t>(baseline) << 2 |
384 static_cast<uint8_t>(top_tier) << 4;
385 DCHECK_NE(hint_byte, kNoCompilationHint);
386 hint_ = hint_byte;
387}
388
393
397
399 size_t locals_size = locals_.Size();
400 buffer->write_size(locals_size + body_.size());
401 buffer->EnsureSpace(locals_size);
402 uint8_t** ptr = buffer->pos_ptr();
403 locals_.Emit(*ptr);
404 (*ptr) += locals_size; // UGLY: manual bump of position pointer
405 if (body_.size() > 0) {
406 size_t base = buffer->offset();
407 buffer->write(body_.begin(), body_.size());
408 for (DirectCallIndex call : direct_calls_) {
409 buffer->patch_u32v(
410 base + call.offset,
411 call.direct_index +
412 static_cast<uint32_t>(builder_->function_imports_.size()));
413 }
414 }
415}
416
419 buffer->write_size(0);
420 return;
421 }
422 size_t locals_enc_size = LEBHelper::sizeof_u32v(locals_.Size());
423 size_t func_start_size =
425 buffer->write_size(asm_offsets_.size() + locals_enc_size + func_start_size);
426 // Offset of the recorded byte offsets.
428 buffer->write_u32v(static_cast<uint32_t>(locals_.Size()));
429 // Start position of the function.
432}
433
435 : zone_(zone),
436 types_(zone),
437 function_imports_(zone),
438 global_imports_(zone),
439 exports_(zone),
440 functions_(zone),
441 tables_(zone),
442 memories_(zone),
443 data_segments_(zone),
444 element_segments_(zone),
445 globals_(zone),
446 tags_(zone),
447 signature_map_(zone),
448 current_recursive_group_start_(-1),
449 recursive_groups_(zone),
450 start_function_index_(-1) {}
451
453 functions_.push_back(zone_->New<WasmFunctionBuilder>(this));
454 // Add the signature if one was provided here.
455 if (sig) functions_.back()->SetSignature(sig);
456 return functions_.back();
457}
458
460 functions_.push_back(zone_->New<WasmFunctionBuilder>(this));
461 functions_.back()->SetSignature(sig_index);
462 return functions_.back();
463}
464
465void WasmModuleBuilder::AddDataSegment(const uint8_t* data, uint32_t size,
466 uint32_t dest) {
467 data_segments_.push_back({.data = ZoneVector<uint8_t>(zone()), .dest = dest});
469 for (uint32_t i = 0; i < size; i++) {
470 vec.push_back(data[i]);
471 }
472}
473
475 uint32_t size) {
476 data_segments_.push_back(
477 {.data = ZoneVector<uint8_t>(zone()), .dest = 0, .is_active = false});
479 for (uint32_t i = 0; i < size; i++) {
480 vec.push_back(data[i]);
481 }
482}
483
485 const FunctionSig* sig, bool is_final, ModuleTypeIndex supertype) {
486 ModuleTypeIndex index{static_cast<uint32_t>(types_.size())};
487 signature_map_.emplace(*sig, index);
488 types_.emplace_back(sig, supertype, is_final, false);
489 return index;
490}
491
493 bool is_final,
494 ModuleTypeIndex supertype) {
495 auto sig_entry = signature_map_.find(*sig);
496 if (sig_entry != signature_map_.end()) return sig_entry->second;
497 return ForceAddSignature(sig, is_final, supertype);
498}
499
501 DCHECK_EQ(0, type->return_count());
502 ModuleTypeIndex type_index = AddSignature(type, true);
503 uint32_t except_index = static_cast<uint32_t>(tags_.size());
504 tags_.push_back(type_index);
505 return except_index;
506}
507
509 bool is_final,
510 ModuleTypeIndex supertype) {
511 uint32_t index = static_cast<uint32_t>(types_.size());
512 types_.emplace_back(type, supertype, is_final, false);
513 return ModuleTypeIndex{index};
514}
515
517 ModuleTypeIndex supertype) {
518 uint32_t index = static_cast<uint32_t>(types_.size());
519 types_.emplace_back(type, supertype, is_final, false);
520 return ModuleTypeIndex{index};
521}
522
523uint32_t WasmModuleBuilder::IncreaseTableMinSize(uint32_t table_index,
524 uint32_t count) {
525 DCHECK_LT(table_index, tables_.size());
526 uint32_t old_min_size = tables_[table_index].min_size;
527 if (count > wasm::max_table_size() - old_min_size) {
528 return std::numeric_limits<uint32_t>::max();
529 }
530 tables_[table_index].min_size = old_min_size + count;
531 tables_[table_index].max_size =
532 std::max(old_min_size + count, tables_[table_index].max_size);
533 return old_min_size;
534}
535
536uint32_t WasmModuleBuilder::AddTable(ValueType type, uint32_t min_size) {
537 tables_.push_back({.type = type, .min_size = min_size});
538 return static_cast<uint32_t>(tables_.size() - 1);
539}
540
541uint32_t WasmModuleBuilder::AddTable(ValueType type, uint32_t min_size,
542 uint32_t max_size,
543 AddressType address_type) {
544 tables_.push_back({.type = type,
545 .min_size = min_size,
546 .max_size = max_size,
547 .has_maximum = true,
548 .address_type = address_type});
549 return static_cast<uint32_t>(tables_.size() - 1);
550}
551
552uint32_t WasmModuleBuilder::AddTable(ValueType type, uint32_t min_size,
553 uint32_t max_size, WasmInitExpr init,
554 AddressType address_type) {
555 tables_.push_back({.type = type,
556 .min_size = min_size,
557 .max_size = max_size,
558 .has_maximum = true,
559 .address_type = address_type,
560 .init = {init}});
561 return static_cast<uint32_t>(tables_.size() - 1);
562}
563
564uint32_t WasmModuleBuilder::AddMemory(uint32_t min_pages) {
565 memories_.push_back({.min_pages = min_pages});
566 return static_cast<uint32_t>(memories_.size() - 1);
567}
568
569uint32_t WasmModuleBuilder::AddMemory(uint32_t min_pages, uint32_t max_pages) {
570 memories_.push_back(
571 {.min_pages = min_pages, .max_pages = max_pages, .has_max_pages = true});
572 return static_cast<uint32_t>(memories_.size() - 1);
573}
574
575uint32_t WasmModuleBuilder::AddMemory64(uint32_t min_pages) {
576 memories_.push_back(
577 {.min_pages = min_pages, .address_type = AddressType::kI64});
578 return static_cast<uint32_t>(memories_.size() - 1);
579}
580
581uint32_t WasmModuleBuilder::AddMemory64(uint32_t min_pages,
582 uint32_t max_pages) {
583 memories_.push_back({.min_pages = min_pages,
584 .max_pages = max_pages,
585 .has_max_pages = true,
586 .address_type = AddressType::kI64});
587 return static_cast<uint32_t>(memories_.size() - 1);
588}
589
591 element_segments_.push_back(std::move(segment));
592 return static_cast<uint32_t>(element_segments_.size() - 1);
593}
594
596 uint32_t table_index, uint32_t index_in_table,
597 uint32_t direct_function_index,
599 WasmElemSegment segment(zone_, kWasmFuncRef, table_index,
600 WasmInitExpr(static_cast<int>(index_in_table)));
601 segment.indexing_mode = indexing_mode;
603 direct_function_index);
604 AddElementSegment(std::move(segment));
605}
606
608 const FunctionSig* sig,
610 DCHECK(adding_imports_allowed_);
611 function_imports_.push_back(
612 {.module = module, .name = name, .sig_index = AddSignature(sig, true)});
613 return static_cast<uint32_t>(function_imports_.size() - 1);
614}
615
617 ValueType type, bool mutability,
619 global_imports_.push_back({.module = module,
620 .name = name,
621 .type_code = type.value_type_code(),
622 .mutability = mutability});
623 return static_cast<uint32_t>(global_imports_.size() - 1);
624}
625
627 start_function_index_ = function->func_index();
628}
629
631 ImportExportKindCode kind, uint32_t index) {
632 DCHECK_LE(index, std::numeric_limits<int>::max());
633 exports_.push_back(
634 {.name = name, .kind = kind, .index = static_cast<int>(index)});
635}
636
637uint32_t WasmModuleBuilder::AddExportedGlobal(ValueType type, bool mutability,
638 WasmInitExpr init,
640 uint32_t index = AddGlobal(type, mutability, init);
641 AddExport(name, kExternalGlobal, index);
642 return index;
643}
644
646 int import_index) {
647#if DEBUG
648 // The size of function_imports_ must not change any more.
649 adding_imports_allowed_ = false;
650#endif
651 exports_.push_back(
652 {.name = name,
653 .kind = kExternalFunction,
654 .index = import_index - static_cast<int>(function_imports_.size())});
655}
656
657uint32_t WasmModuleBuilder::AddGlobal(ValueType type, bool mutability,
658 WasmInitExpr init) {
659 globals_.push_back({.type = type, .mutability = mutability, .init = init});
660 return static_cast<uint32_t>(globals_.size() - 1);
661}
662
664 // == Emit magic =============================================================
665 buffer->write_u32(kWasmMagic);
666 buffer->write_u32(kWasmVersion);
667
668 // == Emit types =============================================================
669 if (!types_.empty()) {
670 size_t start = EmitSection(kTypeSectionCode, buffer);
671 // Every recursion group occupies one type entry.
672 size_t type_count = types_.size() + recursive_groups_.size();
673 // Types inside recursion groups occupy no additional type entry.
674 for (auto [first_index, size] : recursive_groups_) {
675 type_count -= size;
676 }
677
678 buffer->write_size(type_count);
679
680 const RecGroup* next_rec_group =
682
683 for (uint32_t i = 0; i < types_.size(); i++) {
684 // Note: while loop, because recgroups can be empty.
685 while (next_rec_group && i == next_rec_group->start_index) {
687 buffer->write_u32v(next_rec_group->size);
688 next_rec_group = next_rec_group == &recursive_groups_.back()
689 ? nullptr
690 : next_rec_group + 1;
691 }
692
693 const TypeDefinition& type = types_[i];
694
695 if (type.supertype.valid()) {
696 buffer->write_u8(type.is_final ? kWasmSubtypeFinalCode
698 buffer->write_u8(1);
699 buffer->write_u32v(type.supertype);
700 } else if (!type.is_final) {
701 buffer->write_u8(kWasmSubtypeCode);
702 buffer->write_u8(0);
703 }
704 switch (type.kind) {
706 const FunctionSig* sig = type.function_sig;
708 buffer->write_size(sig->parameter_count());
709 for (auto param : sig->parameters()) {
710 WriteValueType(buffer, param);
711 }
712 buffer->write_size(sig->return_count());
713 for (auto ret : sig->returns()) {
714 WriteValueType(buffer, ret);
715 }
716 break;
717 }
719 const StructType* struct_type = type.struct_type;
721 buffer->write_size(struct_type->field_count());
722 for (uint32_t j = 0; j < struct_type->field_count(); j++) {
723 WriteValueType(buffer, struct_type->field(j));
724 buffer->write_u8(struct_type->mutability(j) ? 1 : 0);
725 }
726 break;
727 }
729 const ArrayType* array_type = type.array_type;
731 WriteValueType(buffer, array_type->element_type());
732 buffer->write_u8(array_type->mutability() ? 1 : 0);
733 break;
734 }
736 const ContType* cont_type = type.cont_type;
738 buffer->write_u32v(cont_type->contfun_typeindex());
739 break;
740 }
741 }
742 }
743
744 // Handle empty recursion groups defined after all types.
745 while (next_rec_group) {
746 DCHECK_EQ(types_.size(), next_rec_group->start_index);
747 DCHECK_EQ(0, next_rec_group->size);
749 buffer->write_u32v(0);
750 if (next_rec_group == &recursive_groups_.back()) break;
751 ++next_rec_group;
752 }
753
754 FixupSection(buffer, start);
755 }
756
757 // == Emit imports ===========================================================
758 if (global_imports_.size() + function_imports_.size() > 0) {
759 size_t start = EmitSection(kImportSectionCode, buffer);
760 buffer->write_size(global_imports_.size() + function_imports_.size());
761 for (auto import : global_imports_) {
762 buffer->write_string(import.module); // module name
763 buffer->write_string(import.name); // field name
764 buffer->write_u8(kExternalGlobal);
765 buffer->write_u8(import.type_code);
766 buffer->write_u8(import.mutability ? 1 : 0);
767 }
768 for (auto import : function_imports_) {
769 buffer->write_string(import.module); // module name
770 buffer->write_string(import.name); // field name
772 buffer->write_u32v(import.sig_index);
773 }
774 FixupSection(buffer, start);
775 }
776
777 // == Emit function signatures ===============================================
778 uint32_t num_function_names = 0;
779 if (!functions_.empty()) {
780 size_t start = EmitSection(kFunctionSectionCode, buffer);
781 buffer->write_size(functions_.size());
782 for (auto* function : functions_) {
783 function->WriteSignature(buffer);
784 if (!function->name_.empty()) ++num_function_names;
785 }
786 FixupSection(buffer, start);
787 }
788
789 // == Emit tables ============================================================
790 if (!tables_.empty()) {
791 size_t start = EmitSection(kTableSectionCode, buffer);
792 buffer->write_size(tables_.size());
793 for (const WasmTable& table : tables_) {
794 if (table.init) {
795 buffer->write_u8(0x40); // table-with-initializer
796 buffer->write_u8(0x00); // reserved byte
797 }
798 WriteValueType(buffer, table.type);
799 uint8_t limits_byte = (table.is_table64() ? 4 : 0) |
800 (table.is_shared ? 2 : 0) |
801 (table.has_maximum ? 1 : 0);
802 buffer->write_u8(limits_byte);
803 auto WriteValToBuffer = [&](uint32_t val) {
804 table.is_table64() ? buffer->write_u64v(val) : buffer->write_u32v(val);
805 };
806 WriteValToBuffer(table.min_size);
807 if (table.has_maximum) {
808 WriteValToBuffer(table.max_size);
809 }
810 if (table.init) {
811 WriteInitializerExpression(buffer, *table.init);
812 }
813 }
814 FixupSection(buffer, start);
815 }
816
817 // == Emit memory declaration ================================================
818 if (!memories_.empty()) {
819 size_t start = EmitSection(kMemorySectionCode, buffer);
820 buffer->write_size(memories_.size());
821 for (const WasmMemory& memory : memories_) {
822 uint8_t limits_byte = (memory.is_memory64() ? 4 : 0) |
823 (memory.is_shared ? 2 : 0) |
824 (memory.has_max_pages ? 1 : 0);
825 buffer->write_u8(limits_byte);
826 auto WriteValToBuffer = [&](uint32_t val) {
827 memory.is_memory64() ? buffer->write_u64v(val)
828 : buffer->write_u32v(val);
829 };
830 WriteValToBuffer(memory.min_pages);
831 if (memory.has_max_pages) {
832 WriteValToBuffer(memory.max_pages);
833 }
834 }
835 FixupSection(buffer, start);
836 }
837
838 // == Emit event section =====================================================
839 if (!tags_.empty()) {
840 size_t start = EmitSection(kTagSectionCode, buffer);
841 buffer->write_size(tags_.size());
842 for (ModuleTypeIndex type : tags_) {
844 buffer->write_u32v(type);
845 }
846 FixupSection(buffer, start);
847 }
848
849 // == Emit globals ===========================================================
850 if (!globals_.empty()) {
851 size_t start = EmitSection(kGlobalSectionCode, buffer);
852 buffer->write_size(globals_.size());
853
854 for (const WasmGlobal& global : globals_) {
855 WriteValueType(buffer, global.type);
856 buffer->write_u8(global.mutability ? 1 : 0);
857 WriteInitializerExpression(buffer, global.init);
858 }
859 FixupSection(buffer, start);
860 }
861
862 // == Emit exports ===========================================================
863 if (!exports_.empty()) {
864 size_t start = EmitSection(kExportSectionCode, buffer);
865 buffer->write_size(exports_.size());
866 for (auto ex : exports_) {
867 buffer->write_string(ex.name);
868 buffer->write_u8(ex.kind);
869 switch (ex.kind) {
871 buffer->write_size(ex.index + function_imports_.size());
872 break;
873 case kExternalGlobal:
874 buffer->write_size(ex.index + global_imports_.size());
875 break;
876 case kExternalMemory:
877 case kExternalTable:
878 // The WasmModuleBuilder doesn't support importing tables or memories
879 // yet, so there is no index offset to add.
880 buffer->write_size(ex.index);
881 break;
882 case kExternalTag:
883 UNREACHABLE();
884 }
885 }
886 FixupSection(buffer, start);
887 }
888
889 // == Emit start function index ==============================================
890 if (start_function_index_ >= 0) {
891 size_t start = EmitSection(kStartSectionCode, buffer);
893 FixupSection(buffer, start);
894 }
895
896 // == Emit element segments ==================================================
897 if (!element_segments_.empty()) {
898 size_t start = EmitSection(kElementSectionCode, buffer);
899 buffer->write_size(element_segments_.size());
900 for (const WasmElemSegment& segment : element_segments_) {
901 bool is_active = segment.status == WasmElemSegment::kStatusActive;
902 // We pick the most general syntax, i.e., we always explicitly emit the
903 // table index and the type, and use the expressions-as-elements syntax.
904 // The initial byte is one of 0x05, 0x06, and 0x07.
905 uint8_t kind_mask =
906 segment.status == WasmElemSegment::kStatusActive
907 ? 0b10
908 : segment.status == WasmElemSegment::kStatusDeclarative ? 0b11
909 : 0b01;
910 uint8_t expressions_as_elements_mask = 0b100;
911 buffer->write_u8(kind_mask | expressions_as_elements_mask);
912 if (is_active) {
913 buffer->write_u32v(segment.table_index);
914 WriteInitializerExpression(buffer, segment.offset);
915 }
916 WriteValueType(buffer, segment.type);
917 buffer->write_size(segment.entries.size());
918 for (const WasmElemSegment::Entry entry : segment.entries) {
919 uint8_t opcode =
921 ? kExprGlobalGet
923 ? kExprRefFunc
924 : kExprRefNull;
925 bool needs_function_offset =
926 segment.indexing_mode ==
929 uint32_t index =
930 entry.index + (needs_function_offset
931 ? static_cast<uint32_t>(function_imports_.size())
932 : 0);
933 buffer->write_u8(opcode);
934 buffer->write_u32v(index);
935 buffer->write_u8(kExprEnd);
936 }
937 }
938 FixupSection(buffer, start);
939 }
940
941 // == Emit data segment count section ========================================
942 if (std::any_of(
943 data_segments_.begin(), data_segments_.end(),
944 [](const WasmDataSegment& segment) { return !segment.is_active; })) {
946 buffer->write_u32v(1); // section length
947 buffer->write_u32v(static_cast<uint32_t>(data_segments_.size()));
948 }
949
950 // == Emit compilation hints section =========================================
951 bool emit_compilation_hints = false;
952 for (auto* fn : functions_) {
953 if (fn->hint_ != kNoCompilationHint) {
954 emit_compilation_hints = true;
955 break;
956 }
957 }
958 if (emit_compilation_hints) {
959 // Emit the section code.
961 // Emit a placeholder for section length.
962 size_t start = buffer->reserve_u32v();
963 // Emit custom section name.
964 buffer->write_string(base::CStrVector("compilationHints"));
965 // Emit hint count.
966 buffer->write_size(functions_.size());
967 // Emit hint bytes.
968 for (auto* fn : functions_) {
969 uint8_t hint_byte =
970 fn->hint_ != kNoCompilationHint ? fn->hint_ : kDefaultCompilationHint;
971 buffer->write_u8(hint_byte);
972 }
973 FixupSection(buffer, start);
974 }
975
976 // == Emit code ==============================================================
977 if (!functions_.empty()) {
978 size_t start = EmitSection(kCodeSectionCode, buffer);
979 buffer->write_size(functions_.size());
980 for (auto* function : functions_) {
981 function->WriteBody(buffer);
982 }
983 FixupSection(buffer, start);
984 }
985
986 // == Emit data segments =====================================================
987 if (!data_segments_.empty()) {
988 size_t start = EmitSection(kDataSectionCode, buffer);
989 buffer->write_size(data_segments_.size());
990
991 for (auto segment : data_segments_) {
992 if (segment.is_active) {
993 buffer->write_u8(0); // linear memory segment
994 buffer->write_u8(kExprI32Const); // constant expression for dest
995 buffer->write_u32v(segment.dest);
996 buffer->write_u8(kExprEnd);
997 } else {
998 buffer->write_u8(kPassive);
999 }
1000 buffer->write_u32v(static_cast<uint32_t>(segment.data.size()));
1001 buffer->write(segment.data.data(), segment.data.size());
1002 }
1003 FixupSection(buffer, start);
1004 }
1005
1006 // == Emit names =============================================================
1007 if (num_function_names > 0 || !function_imports_.empty()) {
1008 // Emit the section code.
1010 // Emit a placeholder for the length.
1011 size_t start = buffer->reserve_u32v();
1012 // Emit the section string.
1013 buffer->write_string(base::CStrVector("name"));
1014 // Emit a subsection for the function names.
1016 // Emit a placeholder for the subsection length.
1017 size_t functions_start = buffer->reserve_u32v();
1018 // Emit the function names.
1019 // Imports are always named.
1020 uint32_t num_imports = static_cast<uint32_t>(function_imports_.size());
1021 buffer->write_size(num_imports + num_function_names);
1022 uint32_t function_index = 0;
1023 for (; function_index < num_imports; ++function_index) {
1024 const WasmFunctionImport* import = &function_imports_[function_index];
1025 DCHECK(!import->name.empty());
1026 buffer->write_u32v(function_index);
1027 buffer->write_string(import->name);
1028 }
1029 if (num_function_names > 0) {
1030 for (auto* function : functions_) {
1031 DCHECK_EQ(function_index,
1032 function->func_index() + function_imports_.size());
1033 if (!function->name_.empty()) {
1034 buffer->write_u32v(function_index);
1035 buffer->write_string(function->name_);
1036 }
1037 ++function_index;
1038 }
1039 }
1040 FixupSection(buffer, functions_start);
1041 FixupSection(buffer, start);
1042 }
1043}
1044
1046 // == Emit asm.js offset table ===============================================
1047 buffer->write_size(functions_.size());
1048 // Emit the offset table per function.
1049 for (auto* function : functions_) {
1050 function->WriteAsmWasmOffsetTable(buffer);
1051 }
1052}
1053} // namespace wasm
1054} // namespace internal
1055} // namespace v8
Builtins::Kind kind
Definition builtins.cc:40
void push_back(const T &value)
T * New(Args &&... args)
Definition zone.h:114
ValueType element_type() const
ModuleTypeIndex contfun_typeindex() const
static size_t sizeof_u32v(size_t val)
Definition leb-helper.h:84
size_t Emit(uint8_t *buffer) const
uint32_t AddLocals(uint32_t count, ValueType type)
bool mutability(uint32_t index) const
ValueType field(uint32_t index) const
void SetCompilationHint(WasmCompilationHintStrategy strategy, WasmCompilationHintTier baseline, WasmCompilationHintTier top_tier)
void EmitFromInitializerExpression(const WasmInitExpr &init_expr)
void EmitWithU8U8(WasmOpcode opcode, const uint8_t imm1, const uint8_t imm2)
void EmitWithI32V(WasmOpcode opcode, int32_t immediate)
void WriteAsmWasmOffsetTable(ZoneBuffer *buffer) const
void SetAsmFunctionStartPosition(size_t function_position)
void EmitCode(const uint8_t *code, uint32_t code_size)
WasmFunctionBuilder(WasmModuleBuilder *builder)
void AddAsmWasmOffset(size_t call_position, size_t to_number_position)
void EmitWithU32V(WasmOpcode opcode, uint32_t immediate)
void EmitWithU8(WasmOpcode opcode, const uint8_t immediate)
void SetName(base::Vector< const char > name)
void WriteBody(ZoneBuffer *buffer) const
void WriteSignature(ZoneBuffer *buffer) const
ZoneVector< DirectCallIndex > direct_calls_
ModuleTypeIndex AddSignature(const FunctionSig *sig, bool is_final, ModuleTypeIndex supertype=kNoSuperType)
uint32_t AddImport(base::Vector< const char > name, const FunctionSig *sig, base::Vector< const char > module={})
ZoneUnorderedMap< FunctionSig, ModuleTypeIndex > signature_map_
void AddExport(base::Vector< const char > name, ImportExportKindCode kind, uint32_t index)
ZoneVector< ModuleTypeIndex > tags_
ModuleTypeIndex AddArrayType(ArrayType *type, bool is_final, ModuleTypeIndex supertype=kNoSuperType)
uint32_t AddGlobal(ValueType type, bool mutability, WasmInitExpr init)
uint32_t AddTag(const FunctionSig *type)
uint32_t AddElementSegment(WasmElemSegment segment)
void WriteTo(ZoneBuffer *buffer) const
ModuleTypeIndex ForceAddSignature(const FunctionSig *sig, bool is_final, ModuleTypeIndex supertype=kNoSuperType)
void MarkStartFunction(WasmFunctionBuilder *builder)
uint32_t IncreaseTableMinSize(uint32_t table_index, uint32_t count)
uint32_t AddMemory64(uint32_t min_pages)
void AddDataSegment(const uint8_t *data, uint32_t size, uint32_t dest)
void ExportImportedFunction(base::Vector< const char > name, int import_index)
ZoneVector< WasmGlobalImport > global_imports_
WasmFunctionBuilder * AddFunction(const FunctionSig *sig=nullptr)
uint32_t AddTable(ValueType type, uint32_t min_size)
ModuleTypeIndex AddStructType(StructType *type, bool is_final, ModuleTypeIndex supertype=kNoSuperType)
ZoneVector< WasmElemSegment > element_segments_
void SetIndirectFunction(uint32_t table_index, uint32_t index_in_table, uint32_t direct_function_index, WasmElemSegment::FunctionIndexingMode indexing_mode)
ZoneVector< WasmDataSegment > data_segments_
uint32_t AddGlobalImport(base::Vector< const char > name, ValueType type, bool mutability, base::Vector< const char > module={})
ZoneVector< WasmFunctionImport > function_imports_
ZoneVector< WasmFunctionBuilder * > functions_
void AddPassiveDataSegment(const uint8_t *data, uint32_t size)
void WriteAsmJsOffsetTable(ZoneBuffer *buffer) const
uint32_t AddExportedGlobal(ValueType type, bool mutability, WasmInitExpr init, base::Vector< const char > name)
void patch_u32v(size_t offset, uint32_t val)
void write_string(base::Vector< const char > name)
void write(const uint8_t *data, size_t size)
Zone * zone_
int start
uint32_t count
const int func_index_
EmitFn fn
int position
Definition liveedit.cc:290
ExecutionTier top_tier
Vector< const char > CStrVector(const char *data)
Definition vector.h:331
constexpr uint8_t kNoCompilationHint
constexpr uint32_t kWasmMagic
constexpr uint8_t kWasmRecursiveTypeGroupCode
uint32_t max_table_size()
constexpr uint8_t kSharedFlagCode
constexpr uint8_t kWasmSubtypeCode
constexpr uint8_t kWasmContTypeCode
constexpr size_t kPaddedVarInt32Size
Definition leb-helper.h:19
constexpr IndependentHeapType kWasmFuncRef
constexpr uint8_t kWasmStructTypeCode
constexpr uint8_t kWasmSubtypeFinalCode
constexpr uint8_t kWasmFunctionTypeCode
constexpr uint8_t kDefaultCompilationHint
constexpr uint32_t kWasmVersion
constexpr uint8_t kWasmArrayTypeCode
constexpr size_t kMaxVarInt32Size
Definition leb-helper.h:20
constexpr uint32_t kExceptionAttribute
constexpr int kSimd128Size
Definition globals.h:706
too high values may cause the compiler to set high thresholds for inlining to as much as possible avoid inlined allocation of objects that cannot escape trace load stores from virtual maglev objects use TurboFan fast string builder analyze liveness of environment slots and zap dead values trace TurboFan load elimination emit data about basic block usage in builtins to this enable builtin reordering when run mksnapshot flag for emit warnings when applying builtin profile data verify register allocation in TurboFan randomly schedule instructions to stress dependency tracking enable store store elimination in TurboFan rewrite far to near simulate GC compiler thread race related to allow float parameters to be passed in simulator mode JS Wasm Run additional turbo_optimize_inlined_js_wasm_wrappers enable experimental feedback collection in generic lowering enable Turboshaft s WasmLoadElimination enable Turboshaft s low level load elimination for JS enable Turboshaft s escape analysis for string concatenation use enable Turbolev features that we want to ship in the not too far future trace individual Turboshaft reduction steps trace intermediate Turboshaft reduction steps invocation count threshold for early optimization Enables optimizations which favor memory size over execution speed Enables sampling allocation profiler with X as a sample interval min size of a semi the new space consists of two semi spaces max size of the Collect garbage after Collect garbage after keeps maps alive for< n > old space garbage collections print one detailed trace line in name
Definition flags.cc:2086
kWasmInternalFunctionIndirectPointerTag kProtectedInstanceDataOffset sig
refactor address components for immediate indexing make OptimizeMaglevOnNextCall optimize to turbofan instead of maglev filter for tracing turbofan compilation nullptr
Definition flags.cc:1263
constexpr uint32_t kMaxUInt32
Definition globals.h:387
Definition c-api.cc:87
std::vector< ValueType > locals_
const std::vector< ModuleTypeIndex > & functions_
std::vector< ValueType > globals_
RegExpBuilder builder_
#define DCHECK_LE(v1, v2)
Definition logging.h:490
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define DCHECK_GE(v1, v2)
Definition logging.h:488
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_LT(v1, v2)
Definition logging.h:489
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define DCHECK_GT(v1, v2)
Definition logging.h:487
#define arraysize(array)
Definition macros.h:67
enum v8::internal::wasm::WasmModuleBuilder::WasmElemSegment::Entry::Kind kind
wasm::ValueType type
const wasm::FunctionBody & body_