v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
bytecode-array-writer.cc
Go to the documentation of this file.
1// Copyright 2015 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
6
7#include "src/api/api-inl.h"
16
17namespace v8 {
18namespace internal {
19namespace interpreter {
20
23
25 Zone* zone, ConstantArrayBuilder* constant_array_builder,
27 : bytecodes_(zone),
28 unbound_jumps_(0),
29 source_position_table_builder_(zone, source_position_mode),
30 constant_array_builder_(constant_array_builder),
31 last_bytecode_(Bytecode::kIllegal),
32 last_bytecode_offset_(0),
33 last_bytecode_had_source_info_(false),
34 elide_noneffectful_bytecodes_(
35 v8_flags.ignition_elide_noneffectful_bytecodes),
36 exit_seen_in_block_(false) {
37 bytecodes_.reserve(512); // Derived via experimentation.
38}
39
40template <typename IsolateT>
42 IsolateT* isolate, int register_count, uint16_t parameter_count,
43 uint16_t max_arguments, DirectHandle<TrustedByteArray> handler_table) {
45
46 int bytecode_size = static_cast<int>(bytecodes()->size());
47 int frame_size = register_count * kSystemPointerSize;
50 Handle<BytecodeArray> bytecode_array = isolate->factory()->NewBytecodeArray(
51 bytecode_size, &bytecodes()->front(), frame_size, parameter_count,
52 max_arguments, constant_pool, handler_table);
53 return bytecode_array;
54}
55
58 Isolate* isolate, int register_count, uint16_t parameter_count,
59 uint16_t max_arguments, DirectHandle<TrustedByteArray> handler_table);
62 LocalIsolate* isolate, int register_count, uint16_t parameter_count,
63 uint16_t max_arguments, DirectHandle<TrustedByteArray> handler_table);
64
65template <typename IsolateT>
67 IsolateT* isolate) {
69 DirectHandle<TrustedByteArray> source_position_table =
71 ? isolate->factory()->empty_trusted_byte_array()
73 return source_position_table;
74}
75
78 Isolate* isolate);
81 LocalIsolate* isolate);
82
83#ifdef DEBUG
84int BytecodeArrayWriter::CheckBytecodeMatches(Tagged<BytecodeArray> bytecode) {
85 int mismatches = false;
86 int bytecode_size = static_cast<int>(bytecodes()->size());
87 const uint8_t* bytecode_ptr = &bytecodes()->front();
88 if (bytecode_size != bytecode->length()) mismatches = true;
89
90 // If there's a mismatch only in the length of the bytecode (very unlikely)
91 // then the first mismatch will be the first extra bytecode.
92 int first_mismatch = std::min(bytecode_size, bytecode->length());
93 for (int i = 0; i < first_mismatch; ++i) {
94 if (bytecode_ptr[i] != bytecode->get(i)) {
95 mismatches = true;
96 first_mismatch = i;
97 break;
98 }
99 }
100
101 if (mismatches) {
102 return first_mismatch;
103 }
104 return -1;
105}
106#endif
107
108void BytecodeArrayWriter::Write(BytecodeNode* node) {
109 DCHECK(!Bytecodes::IsJump(node->bytecode()));
110
111 if (exit_seen_in_block_) return; // Don't emit dead code.
112 UpdateExitSeenInBlock(node->bytecode());
113 MaybeElideLastBytecode(node->bytecode(), node->source_info().is_valid());
114
116 EmitBytecode(node);
117}
118
120 DCHECK(Bytecodes::IsForwardJump(node->bytecode()));
121
122 if (exit_seen_in_block_) return; // Don't emit dead code.
123 UpdateExitSeenInBlock(node->bytecode());
124 MaybeElideLastBytecode(node->bytecode(), node->source_info().is_valid());
125
127 EmitJump(node, label);
128}
129
131 BytecodeLoopHeader* loop_header) {
132 DCHECK_EQ(node->bytecode(), Bytecode::kJumpLoop);
133
134 if (exit_seen_in_block_) return; // Don't emit dead code.
135 UpdateExitSeenInBlock(node->bytecode());
136 MaybeElideLastBytecode(node->bytecode(), node->source_info().is_valid());
137
139 EmitJumpLoop(node, loop_header);
140}
141
143 BytecodeJumpTable* jump_table) {
144 DCHECK(Bytecodes::IsSwitch(node->bytecode()));
145
146 if (exit_seen_in_block_) return; // Don't emit dead code.
147 UpdateExitSeenInBlock(node->bytecode());
148 MaybeElideLastBytecode(node->bytecode(), node->source_info().is_valid());
149
151 EmitSwitch(node, jump_table);
152}
153
155 DCHECK(label->has_referrer_jump());
156 size_t current_offset = bytecodes()->size();
157 // Update the jump instruction's location.
158 PatchJump(current_offset, label->jump_offset());
159 label->bind();
161}
162
164 size_t current_offset = bytecodes()->size();
165 loop_header->bind_to(current_offset);
166 // Don't start a basic block when the entire loop is dead.
167 if (exit_seen_in_block_) return;
169}
170
172 int case_value) {
173 DCHECK(!jump_table->is_bound(case_value));
174
175 size_t current_offset = bytecodes()->size();
176 size_t relative_jump = current_offset - jump_table->switch_bytecode_offset();
177
179 jump_table->ConstantPoolEntryFor(case_value),
180 Smi::FromInt(static_cast<int>(relative_jump)));
181 jump_table->mark_bound(case_value);
182
184}
185
187 HandlerTableBuilder* handler_table_builder, int handler_id) {
188 size_t current_offset = bytecodes()->size();
190 handler_table_builder->SetHandlerTarget(handler_id, current_offset);
191}
192
194 HandlerTableBuilder* handler_table_builder, int handler_id) {
195 size_t current_offset = bytecodes()->size();
196 // Try blocks don't have to be in a separate basic block, but we do have to
197 // invalidate the bytecode to avoid eliding it and changing the offset.
199 handler_table_builder->SetTryRegionStart(handler_id, current_offset);
200}
201
203 HandlerTableBuilder* handler_table_builder, int handler_id) {
204 // Try blocks don't have to be in a separate basic block, but we do have to
205 // invalidate the bytecode to avoid eliding it and changing the offset.
207 size_t current_offset = bytecodes()->size();
208 handler_table_builder->SetTryRegionEnd(handler_id, current_offset);
209}
210
216
221
223 const BytecodeNode* const node) {
224 int bytecode_offset = static_cast<int>(bytecodes()->size());
225 const BytecodeSourceInfo& source_info = node->source_info();
226 if (source_info.is_valid()) {
228 bytecode_offset, SourcePosition(source_info.source_position()),
229 source_info.is_statement());
230 }
231}
232
234 switch (bytecode) {
235 case Bytecode::kReturn:
236 case Bytecode::kThrow:
237 case Bytecode::kReThrow:
238 case Bytecode::kAbort:
239 case Bytecode::kJump:
240 case Bytecode::kJumpLoop:
241 case Bytecode::kJumpConstant:
242 case Bytecode::kSuspendGenerator:
243 exit_seen_in_block_ = true;
244 break;
245 default:
246 break;
247 }
248}
249
251 bool has_source_info) {
253
254 // If the last bytecode loaded the accumulator without any external effect,
255 // and the next bytecode clobbers this load without reading the accumulator,
256 // then the previous bytecode can be elided as it has no effect.
258 Bytecodes::GetImplicitRegisterUse(next_bytecode) ==
260 (!last_bytecode_had_source_info_ || !has_source_info)) {
263 // If the last bytecode had source info we will transfer the source info
264 // to this bytecode.
265 has_source_info |= last_bytecode_had_source_info_;
266 }
267 last_bytecode_ = next_bytecode;
268 last_bytecode_had_source_info_ = has_source_info;
270}
271
273 last_bytecode_ = Bytecode::kIllegal;
274}
275
277 DCHECK_NE(node->bytecode(), Bytecode::kIllegal);
278
279 Bytecode bytecode = node->bytecode();
280 OperandScale operand_scale = node->operand_scale();
281
282 if (operand_scale != OperandScale::kSingle) {
285 }
287
288 const uint32_t* const operands = node->operands();
289 const int operand_count = node->operand_count();
290 const OperandSize* operand_sizes =
291 Bytecodes::GetOperandSizes(bytecode, operand_scale);
292 for (int i = 0; i < operand_count; ++i) {
293 switch (operand_sizes[i]) {
295 UNREACHABLE();
297 bytecodes()->push_back(static_cast<uint8_t>(operands[i]));
298 break;
299 case OperandSize::kShort: {
300 uint16_t operand = static_cast<uint16_t>(operands[i]);
301 const uint8_t* raw_operand = reinterpret_cast<const uint8_t*>(&operand);
302 bytecodes()->push_back(raw_operand[0]);
303 bytecodes()->push_back(raw_operand[1]);
304 break;
305 }
306 case OperandSize::kQuad: {
307 const uint8_t* raw_operand =
308 reinterpret_cast<const uint8_t*>(&operands[i]);
309 bytecodes()->push_back(raw_operand[0]);
310 bytecodes()->push_back(raw_operand[1]);
311 bytecodes()->push_back(raw_operand[2]);
312 bytecodes()->push_back(raw_operand[3]);
313 break;
314 }
315 }
316 }
317}
318
319// static
321 switch (jump_bytecode) {
322 case Bytecode::kJump:
323 return Bytecode::kJumpConstant;
324 case Bytecode::kJumpIfTrue:
325 return Bytecode::kJumpIfTrueConstant;
326 case Bytecode::kJumpIfFalse:
327 return Bytecode::kJumpIfFalseConstant;
328 case Bytecode::kJumpIfToBooleanTrue:
329 return Bytecode::kJumpIfToBooleanTrueConstant;
330 case Bytecode::kJumpIfToBooleanFalse:
331 return Bytecode::kJumpIfToBooleanFalseConstant;
332 case Bytecode::kJumpIfNull:
333 return Bytecode::kJumpIfNullConstant;
334 case Bytecode::kJumpIfNotNull:
335 return Bytecode::kJumpIfNotNullConstant;
336 case Bytecode::kJumpIfUndefined:
337 return Bytecode::kJumpIfUndefinedConstant;
338 case Bytecode::kJumpIfNotUndefined:
339 return Bytecode::kJumpIfNotUndefinedConstant;
340 case Bytecode::kJumpIfUndefinedOrNull:
341 return Bytecode::kJumpIfUndefinedOrNullConstant;
342 case Bytecode::kJumpIfJSReceiver:
343 return Bytecode::kJumpIfJSReceiverConstant;
344 case Bytecode::kJumpIfForInDone:
345 return Bytecode::kJumpIfForInDoneConstant;
346 default:
347 UNREACHABLE();
348 }
349}
350
352 int delta) {
353 Bytecode jump_bytecode = Bytecodes::FromByte(bytecodes()->at(jump_location));
354 DCHECK(Bytecodes::IsForwardJump(jump_bytecode));
355 DCHECK(Bytecodes::IsJumpImmediate(jump_bytecode));
356 DCHECK_EQ(Bytecodes::GetOperandType(jump_bytecode, 0), OperandType::kUImm);
357 DCHECK_GT(delta, 0);
358 size_t operand_location = jump_location + 1;
359 DCHECK_EQ(bytecodes()->at(operand_location), k8BitJumpPlaceholder);
360 if (Bytecodes::ScaleForUnsignedOperand(delta) == OperandScale::kSingle) {
361 // The jump fits within the range of an UImm8 operand, so cancel
362 // the reservation and jump directly.
364 bytecodes()->at(operand_location) = static_cast<uint8_t>(delta);
365 } else {
366 // The jump does not fit within the range of an UImm8 operand, so
367 // commit reservation putting the offset into the constant pool,
368 // and update the jump instruction and operand.
371 DCHECK_EQ(Bytecodes::SizeForUnsignedOperand(static_cast<uint32_t>(entry)),
373 jump_bytecode = GetJumpWithConstantOperand(jump_bytecode);
374 bytecodes()->at(jump_location) = Bytecodes::ToByte(jump_bytecode);
375 bytecodes()->at(operand_location) = static_cast<uint8_t>(entry);
376 }
377}
378
380 int delta) {
381 Bytecode jump_bytecode = Bytecodes::FromByte(bytecodes()->at(jump_location));
382 DCHECK(Bytecodes::IsForwardJump(jump_bytecode));
383 DCHECK(Bytecodes::IsJumpImmediate(jump_bytecode));
384 DCHECK_EQ(Bytecodes::GetOperandType(jump_bytecode, 0), OperandType::kUImm);
385 DCHECK_GT(delta, 0);
386 size_t operand_location = jump_location + 1;
387 uint8_t operand_bytes[2];
388 if (Bytecodes::ScaleForUnsignedOperand(delta) <= OperandScale::kDouble) {
389 // The jump fits within the range of an Imm16 operand, so cancel
390 // the reservation and jump directly.
393 reinterpret_cast<Address>(operand_bytes), static_cast<uint16_t>(delta));
394 } else {
395 // The jump does not fit within the range of an Imm16 operand, so
396 // commit reservation putting the offset into the constant pool,
397 // and update the jump instruction and operand.
400 jump_bytecode = GetJumpWithConstantOperand(jump_bytecode);
401 bytecodes()->at(jump_location) = Bytecodes::ToByte(jump_bytecode);
403 reinterpret_cast<Address>(operand_bytes), static_cast<uint16_t>(entry));
404 }
405 DCHECK(bytecodes()->at(operand_location) == k8BitJumpPlaceholder &&
406 bytecodes()->at(operand_location + 1) == k8BitJumpPlaceholder);
407 bytecodes()->at(operand_location++) = operand_bytes[0];
408 bytecodes()->at(operand_location) = operand_bytes[1];
409}
410
412 int delta) {
414 Bytecodes::FromByte(bytecodes()->at(jump_location))));
416 uint8_t operand_bytes[4];
417 base::WriteUnalignedValue<uint32_t>(reinterpret_cast<Address>(operand_bytes),
418 static_cast<uint32_t>(delta));
419 size_t operand_location = jump_location + 1;
420 DCHECK(bytecodes()->at(operand_location) == k8BitJumpPlaceholder &&
421 bytecodes()->at(operand_location + 1) == k8BitJumpPlaceholder &&
422 bytecodes()->at(operand_location + 2) == k8BitJumpPlaceholder &&
423 bytecodes()->at(operand_location + 3) == k8BitJumpPlaceholder);
424 bytecodes()->at(operand_location++) = operand_bytes[0];
425 bytecodes()->at(operand_location++) = operand_bytes[1];
426 bytecodes()->at(operand_location++) = operand_bytes[2];
427 bytecodes()->at(operand_location) = operand_bytes[3];
428}
429
430void BytecodeArrayWriter::PatchJump(size_t jump_target, size_t jump_location) {
431 Bytecode jump_bytecode = Bytecodes::FromByte(bytecodes()->at(jump_location));
432 int delta = static_cast<int>(jump_target - jump_location);
433 int prefix_offset = 0;
434 OperandScale operand_scale = OperandScale::kSingle;
435 if (Bytecodes::IsPrefixScalingBytecode(jump_bytecode)) {
436 // If a prefix scaling bytecode is emitted the target offset is one
437 // less than the case of no prefix scaling bytecode.
438 delta -= 1;
439 prefix_offset = 1;
440 operand_scale = Bytecodes::PrefixBytecodeToOperandScale(jump_bytecode);
441 jump_bytecode =
442 Bytecodes::FromByte(bytecodes()->at(jump_location + prefix_offset));
443 }
444
445 DCHECK(Bytecodes::IsJump(jump_bytecode));
446 switch (operand_scale) {
447 case OperandScale::kSingle:
448 PatchJumpWith8BitOperand(jump_location, delta);
449 break;
450 case OperandScale::kDouble:
451 PatchJumpWith16BitOperand(jump_location + prefix_offset, delta);
452 break;
453 case OperandScale::kQuadruple:
454 PatchJumpWith32BitOperand(jump_location + prefix_offset, delta);
455 break;
456 default:
457 UNREACHABLE();
458 }
460}
461
463 BytecodeLoopHeader* loop_header) {
464 DCHECK_EQ(node->bytecode(), Bytecode::kJumpLoop);
465 DCHECK_EQ(0u, node->operand(0));
466
467 size_t current_offset = bytecodes()->size();
468
469 CHECK_GE(current_offset, loop_header->offset());
470 CHECK_LE(current_offset, static_cast<size_t>(kMaxUInt32));
471
472 // Update the actual jump offset now that we know the bytecode offset of both
473 // the target loop header and this JumpLoop bytecode.
474 //
475 // The label has been bound already so this is a backwards jump.
476 uint32_t delta =
477 static_cast<uint32_t>(current_offset - loop_header->offset());
478 // This JumpLoop bytecode itself may have a kWide or kExtraWide prefix; if
479 // so, bump the delta to account for it.
480 const bool emits_prefix_bytecode =
481 Bytecodes::OperandScaleRequiresPrefixBytecode(node->operand_scale()) ||
484 if (emits_prefix_bytecode) {
485 static constexpr int kPrefixBytecodeSize = 1;
486 delta += kPrefixBytecodeSize;
487 DCHECK_EQ(Bytecodes::Size(Bytecode::kWide, OperandScale::kSingle),
488 kPrefixBytecodeSize);
489 DCHECK_EQ(Bytecodes::Size(Bytecode::kExtraWide, OperandScale::kSingle),
490 kPrefixBytecodeSize);
491 }
492 node->update_operand0(delta);
493 DCHECK_EQ(
495 emits_prefix_bytecode);
496
497 EmitBytecode(node);
498}
499
501 DCHECK(Bytecodes::IsForwardJump(node->bytecode()));
502 DCHECK_EQ(0u, node->operand(0));
503
504 size_t current_offset = bytecodes()->size();
505
506 // The label has not yet been bound so this is a forward reference
507 // that will be patched when the label is bound. We create a
508 // reservation in the constant pool so the jump can be patched
509 // when the label is bound. The reservation means the maximum size
510 // of the operand for the constant is known and the jump can
511 // be emitted into the bytecode stream with space for the operand.
513 label->set_referrer(current_offset);
514 OperandSize reserved_operand_size =
516 static_cast<OperandSize>(node->operand_scale()));
517 DCHECK_NE(Bytecode::kJumpLoop, node->bytecode());
518 switch (reserved_operand_size) {
520 UNREACHABLE();
522 node->update_operand0(k8BitJumpPlaceholder);
523 break;
525 node->update_operand0(k16BitJumpPlaceholder);
526 break;
528 node->update_operand0(k32BitJumpPlaceholder);
529 break;
530 }
531 EmitBytecode(node);
532}
533
535 BytecodeJumpTable* jump_table) {
536 DCHECK(Bytecodes::IsSwitch(node->bytecode()));
537
538 size_t current_offset = bytecodes()->size();
539 if (node->operand_scale() > OperandScale::kSingle) {
540 // Adjust for scaling byte prefix.
541 current_offset += 1;
542 }
543 jump_table->set_switch_bytecode_offset(current_offset);
544
545 EmitBytecode(node);
546}
547
548} // namespace interpreter
549} // namespace internal
550} // namespace v8
int16_t parameter_count
Definition builtins.cc:67
static constexpr Tagged< Smi > FromInt(int value)
Definition smi.h:38
Handle< TrustedByteArray > ToSourcePositionTable(IsolateT *isolate)
void AddPosition(size_t code_offset, SourcePosition source_position, bool is_statement)
void reserve(size_t new_cap)
void resize(size_t new_size)
void push_back(const T &value)
void BindLoopHeader(BytecodeLoopHeader *loop_header)
void WriteSwitch(BytecodeNode *node, BytecodeJumpTable *jump_table)
void WriteJump(BytecodeNode *node, BytecodeLabel *label)
Handle< BytecodeArray > ToBytecodeArray(IsolateT *isolate, int register_count, uint16_t parameter_count, uint16_t max_arguments, DirectHandle< TrustedByteArray > handler_table)
void EmitJump(BytecodeNode *node, BytecodeLabel *label)
void PatchJumpWith8BitOperand(size_t jump_location, int delta)
DirectHandle< TrustedByteArray > ToSourcePositionTable(IsolateT *isolate)
BytecodeArrayWriter(Zone *zone, ConstantArrayBuilder *constant_array_builder, SourcePositionTableBuilder::RecordingMode source_position_mode)
SourcePositionTableBuilder * source_position_table_builder()
void BindTryRegionEnd(HandlerTableBuilder *handler_table_builder, int handler_id)
void MaybeElideLastBytecode(Bytecode next_bytecode, bool has_source_info)
void PatchJumpWith32BitOperand(size_t jump_location, int delta)
void BindHandlerTarget(HandlerTableBuilder *handler_table_builder, int handler_id)
void BindJumpTableEntry(BytecodeJumpTable *jump_table, int case_value)
void UpdateSourcePositionTable(const BytecodeNode *const node)
void EmitSwitch(BytecodeNode *node, BytecodeJumpTable *jump_table)
void PatchJump(size_t jump_target, size_t jump_location)
void WriteJumpLoop(BytecodeNode *node, BytecodeLoopHeader *loop_header)
void PatchJumpWith16BitOperand(size_t jump_location, int delta)
void EmitJumpLoop(BytecodeNode *node, BytecodeLoopHeader *loop_header)
void BindTryRegionStart(HandlerTableBuilder *handler_table_builder, int handler_id)
void EmitBytecode(const BytecodeNode *const node)
static ImplicitRegisterUse GetImplicitRegisterUse(Bytecode bytecode)
Definition bytecodes.h:681
static OperandScale ScaleForUnsignedOperand(uint32_t value)
Definition bytecodes.h:1085
static Bytecode FromByte(uint8_t value)
Definition bytecodes.h:624
static OperandScale PrefixBytecodeToOperandScale(Bytecode bytecode)
Definition bytecodes.h:650
static constexpr bool IsJumpImmediate(Bytecode bytecode)
Definition bytecodes.h:777
static constexpr bool IsForwardJump(Bytecode bytecode)
Definition bytecodes.h:805
static constexpr bool IsSwitch(Bytecode bytecode)
Definition bytecodes.h:819
static Bytecode OperandScaleToPrefixBytecode(OperandScale operand_scale)
Definition bytecodes.h:632
static constexpr bool IsAccumulatorLoadWithoutEffects(Bytecode bytecode)
Definition bytecodes.h:718
static uint8_t ToByte(Bytecode bytecode)
Definition bytecodes.h:618
static OperandType GetOperandType(Bytecode bytecode, int i)
Definition bytecodes.h:894
static int Size(Bytecode bytecode, OperandScale operand_scale)
Definition bytecodes.h:964
static constexpr bool IsPrefixScalingBytecode(Bytecode bytecode)
Definition bytecodes.h:865
static OperandSize SizeForUnsignedOperand(uint32_t value)
Definition bytecodes.h:1096
static bool OperandScaleRequiresPrefixBytecode(OperandScale operand_scale)
Definition bytecodes.h:644
static constexpr bool IsJump(Bytecode bytecode)
Definition bytecodes.h:798
static const OperandSize * GetOperandSizes(Bytecode bytecode, OperandScale operand_scale)
Definition bytecodes.h:938
void SetJumpTableSmi(size_t index, Tagged< Smi > smi)
Handle< TrustedFixedArray > ToFixedArray(IsolateT *isolate)
size_t CommitReservedEntry(OperandSize operand_size, Tagged< Smi > value)
OperandSize CreateReservedEntry(OperandSize minimum_operand_size=OperandSize::kNone)
void SetTryRegionStart(int handler_id, size_t offset)
void SetHandlerTarget(int handler_id, size_t offset)
void SetTryRegionEnd(int handler_id, size_t offset)
Label label
#define EXPORT_TEMPLATE_DEFINE(export)
SourcePositionTableBuilder source_position_table_builder_
int position
Definition liveedit.cc:290
static void WriteUnalignedValue(Address p, V value)
Definition memory.h:41
Bytecode GetJumpWithConstantOperand(Bytecode jump_bytecode)
constexpr int kFunctionEntryBytecodeOffset
Definition globals.h:854
constexpr int kSystemPointerSize
Definition globals.h:410
V8_EXPORT_PRIVATE FlagValues v8_flags
constexpr uint32_t kMaxUInt32
Definition globals.h:387
#define STATIC_CONST_MEMBER_DEFINITION
#define CHECK_GE(lhs, rhs)
#define CHECK_LE(lhs, rhs)
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define DCHECK_GT(v1, v2)
Definition logging.h:487
#define V8_EXPORT_PRIVATE
Definition macros.h:460