183#if defined(USE_SIGNALS)
185AtomicGuard::AtomicGuard(AtomicMutex* atomic,
bool is_blocking)
186 : atomic_(atomic), is_success_(
false) {
188 bool expected =
false;
191 is_success_ = atomic->compare_exchange_strong(expected,
true);
192 }
while (is_blocking && !is_success_);
195AtomicGuard::~AtomicGuard() {
196 if (!is_success_)
return;
197 atomic_->store(
false);
200bool AtomicGuard::is_success()
const {
return is_success_; }
202class Sampler::PlatformData {
205 : vm_tid_(base::OS::GetCurrentThreadId()), vm_tself_(pthread_self()) {}
206 int vm_tid()
const {
return vm_tid_; }
207 pthread_t vm_tself()
const {
return vm_tself_; }
214void SamplerManager::AddSampler(
Sampler* sampler) {
215 AtomicGuard atomic_guard(&samplers_access_counter_);
218 auto it = sampler_map_.find(thread_id);
219 if (it == sampler_map_.end()) {
220 SamplerList samplers;
221 samplers.push_back(sampler);
222 sampler_map_.emplace(thread_id, std::move(samplers));
224 SamplerList& samplers = it->second;
225 auto sampler_it = std::find(samplers.begin(), samplers.end(), sampler);
226 if (sampler_it == samplers.end()) samplers.push_back(sampler);
230void SamplerManager::RemoveSampler(
Sampler* sampler) {
231 AtomicGuard atomic_guard(&samplers_access_counter_);
234 auto it = sampler_map_.find(thread_id);
236 SamplerList& samplers = it->second;
237 samplers.erase(std::remove(samplers.begin(), samplers.end(), sampler),
239 if (samplers.empty()) {
240 sampler_map_.erase(it);
245 AtomicGuard atomic_guard(&samplers_access_counter_,
false);
247 if (!atomic_guard.is_success())
return;
249 auto it = sampler_map_.find(thread_id);
250 if (it == sampler_map_.end())
return;
251 SamplerList& samplers = it->second;
253 for (
Sampler* sampler : samplers) {
257 if (isolate ==
nullptr || !isolate->IsInUse())
continue;
262SamplerManager* SamplerManager::instance() {
264 return instance.
get();
267#elif V8_OS_WIN || V8_OS_CYGWIN
273class Sampler::PlatformData {
279 HANDLE current_process = GetCurrentProcess();
281 current_process, GetCurrentThread(), current_process, &profiled_thread_,
282 THREAD_GET_CONTEXT | THREAD_SUSPEND_RESUME | THREAD_QUERY_INFORMATION,
289 if (profiled_thread_ !=
nullptr) {
290 CloseHandle(profiled_thread_);
291 profiled_thread_ =
nullptr;
295 HANDLE profiled_thread() {
return profiled_thread_; }
303class Sampler::PlatformData {
306 zx_handle_duplicate(zx_thread_self(), ZX_RIGHT_SAME_RIGHTS,
310 if (profiled_thread_ != ZX_HANDLE_INVALID) {
311 zx_handle_close(profiled_thread_);
312 profiled_thread_ = ZX_HANDLE_INVALID;
316 zx_handle_t profiled_thread() {
return profiled_thread_; }
319 zx_handle_t profiled_thread_ = ZX_HANDLE_INVALID;
324#if defined(USE_SIGNALS)
327 static void IncreaseSamplerCount() {
329 if (++client_count_ == 1) Install();
332 static void DecreaseSamplerCount() {
334 if (--client_count_ == 0) Restore();
337 static bool Installed() {
340 return signal_handler_installed_;
346 static void Install() {
348 sa.sa_sigaction = &HandleProfilerSignal;
349 sigemptyset(&sa.sa_mask);
351 sa.sa_flags = SA_SIGINFO | SA_ONSTACK;
353 sa.sa_flags = SA_RESTART | SA_SIGINFO | SA_ONSTACK;
355 signal_handler_installed_ =
356 (sigaction(SIGPROF, &sa, &old_signal_handler_) == 0);
359 static void Restore() {
360 if (signal_handler_installed_) {
361 signal_handler_installed_ =
false;
362#if V8_OS_AIX || V8_TARGET_ARCH_S390X
366 base::OS::Sleep(base::TimeDelta::FromMicroseconds(10));
368 sigaction(SIGPROF, &old_signal_handler_,
nullptr);
372 static void FillRegisterState(
void* context,
RegisterState* regs);
373 static void HandleProfilerSignal(
int signal, siginfo_t* info,
void* context);
377 static int client_count_;
378 static bool signal_handler_installed_;
379 static struct sigaction old_signal_handler_;
385int SignalHandler::client_count_ = 0;
386struct sigaction SignalHandler::old_signal_handler_;
387bool SignalHandler::signal_handler_installed_ = false;
389void SignalHandler::HandleProfilerSignal(int signal, siginfo_t* info,
393 if (signal != SIGPROF)
return;
395 FillRegisterState(context, &state);
396 SamplerManager::instance()->DoSample(state);
399void SignalHandler::FillRegisterState(
void* context,
RegisterState* state) {
401 ucontext_t* ucontext =
reinterpret_cast<ucontext_t*
>(
context);
402#if !(V8_OS_OPENBSD || V8_OS_ZOS || \
403 (V8_OS_LINUX && (V8_HOST_ARCH_S390X || V8_HOST_ARCH_PPC64)))
404 mcontext_t& mcontext = ucontext->uc_mcontext;
406 __mcontext_t_* mcontext =
reinterpret_cast<__mcontext_t_*
>(
context);
410 state->pc =
reinterpret_cast<void*
>(mcontext.gregs[REG_EIP]);
411 state->sp =
reinterpret_cast<void*
>(mcontext.gregs[REG_ESP]);
412 state->fp =
reinterpret_cast<void*
>(mcontext.gregs[REG_EBP]);
413#elif V8_HOST_ARCH_X64
414 state->pc =
reinterpret_cast<void*
>(mcontext.gregs[REG_RIP]);
415 state->sp =
reinterpret_cast<void*
>(mcontext.gregs[REG_RSP]);
416 state->fp =
reinterpret_cast<void*
>(mcontext.gregs[REG_RBP]);
417#elif V8_HOST_ARCH_ARM
418#if V8_LIBC_GLIBC && !V8_GLIBC_PREREQ(2, 4)
421 state->pc =
reinterpret_cast<void*
>(mcontext.gregs[R15]);
422 state->sp =
reinterpret_cast<void*
>(mcontext.gregs[R13]);
423 state->fp =
reinterpret_cast<void*
>(mcontext.gregs[R11]);
424 state->lr =
reinterpret_cast<void*
>(mcontext.gregs[R14]);
426 state->pc =
reinterpret_cast<void*
>(mcontext.arm_pc);
427 state->sp =
reinterpret_cast<void*
>(mcontext.arm_sp);
428 state->fp =
reinterpret_cast<void*
>(mcontext.arm_fp);
429 state->lr =
reinterpret_cast<void*
>(mcontext.arm_lr);
431#elif V8_HOST_ARCH_ARM64
432 state->pc =
reinterpret_cast<void*
>(mcontext.pc);
433 state->sp =
reinterpret_cast<void*
>(mcontext.sp);
435 state->fp =
reinterpret_cast<void*
>(mcontext.regs[29]);
437 state->lr =
reinterpret_cast<void*
>(mcontext.regs[30]);
438#elif V8_HOST_ARCH_MIPS64
439 state->pc =
reinterpret_cast<void*
>(mcontext.pc);
440 state->sp =
reinterpret_cast<void*
>(mcontext.gregs[29]);
441 state->fp =
reinterpret_cast<void*
>(mcontext.gregs[30]);
442#elif V8_HOST_ARCH_LOONG64
443 state->pc =
reinterpret_cast<void*
>(mcontext.__pc);
444 state->sp =
reinterpret_cast<void*
>(mcontext.__gregs[3]);
445 state->fp =
reinterpret_cast<void*
>(mcontext.__gregs[22]);
446#elif V8_HOST_ARCH_PPC64
448 state->pc =
reinterpret_cast<void*
>(ucontext->uc_mcontext.regs->nip);
449 state->sp =
reinterpret_cast<void*
>(ucontext->uc_mcontext.regs->gpr[PT_R1]);
450 state->fp =
reinterpret_cast<void*
>(ucontext->uc_mcontext.regs->gpr[PT_R31]);
451 state->lr =
reinterpret_cast<void*
>(ucontext->uc_mcontext.regs->link);
454 state->pc =
reinterpret_cast<void*
>(ucontext->uc_mcontext.gp_regs[32]);
455 state->sp =
reinterpret_cast<void*
>(ucontext->uc_mcontext.gp_regs[1]);
456 state->fp =
reinterpret_cast<void*
>(ucontext->uc_mcontext.gp_regs[31]);
457 state->lr =
reinterpret_cast<void*
>(ucontext->uc_mcontext.gp_regs[36]);
459#elif V8_HOST_ARCH_S390X
460 state->pc =
reinterpret_cast<void*
>(ucontext->uc_mcontext.psw.addr);
461 state->sp =
reinterpret_cast<void*
>(ucontext->uc_mcontext.gregs[15]);
462 state->fp =
reinterpret_cast<void*
>(ucontext->uc_mcontext.gregs[11]);
463 state->lr =
reinterpret_cast<void*
>(ucontext->uc_mcontext.gregs[14]);
464#elif V8_HOST_ARCH_RISCV64 || V8_HOST_ARCH_RISCV32
466 state->pc =
reinterpret_cast<void*
>(mcontext.__gregs[REG_PC]);
467 state->sp =
reinterpret_cast<void*
>(mcontext.__gregs[REG_SP]);
468 state->fp =
reinterpret_cast<void*
>(mcontext.__gregs[REG_S0]);
469 state->lr =
reinterpret_cast<void*
>(mcontext.__gregs[REG_RA]);
473 state->pc =
reinterpret_cast<void*
>(mcontext->__mc_psw);
474 state->sp =
reinterpret_cast<void*
>(mcontext->__mc_gr[15]);
475 state->fp =
reinterpret_cast<void*
>(mcontext->__mc_gr[11]);
476 state->lr =
reinterpret_cast<void*
>(mcontext->__mc_gr[14]);
479#if V8_TARGET_ARCH_ARM64
481 state->pc =
reinterpret_cast<void*
>(mcontext->__ss.__pc);
482 state->sp =
reinterpret_cast<void*
>(mcontext->__ss.__sp);
483 state->fp =
reinterpret_cast<void*
>(mcontext->__ss.__fp);
484#elif V8_TARGET_ARCH_X64
486 state->pc =
reinterpret_cast<void*
>(mcontext->__ss.__rip);
487 state->sp =
reinterpret_cast<void*
>(mcontext->__ss.__rsp);
488 state->fp =
reinterpret_cast<void*
>(mcontext->__ss.__rbp);
490#error Unexpected iOS target architecture.
495 state->pc =
reinterpret_cast<void*
>(mcontext->__ss.__rip);
496 state->sp =
reinterpret_cast<void*
>(mcontext->__ss.__rsp);
497 state->fp =
reinterpret_cast<void*
>(mcontext->__ss.__rbp);
498#elif V8_HOST_ARCH_IA32
499 state->pc =
reinterpret_cast<void*
>(mcontext->__ss.__eip);
500 state->sp =
reinterpret_cast<void*
>(mcontext->__ss.__esp);
501 state->fp =
reinterpret_cast<void*
>(mcontext->__ss.__ebp);
502#elif V8_HOST_ARCH_ARM64
504 reinterpret_cast<void*
>(arm_thread_state64_get_pc(mcontext->__ss));
506 reinterpret_cast<void*
>(arm_thread_state64_get_sp(mcontext->__ss));
508 reinterpret_cast<void*
>(arm_thread_state64_get_fp(mcontext->__ss));
512 state->pc =
reinterpret_cast<void*
>(mcontext.mc_eip);
513 state->sp =
reinterpret_cast<void*
>(mcontext.mc_esp);
514 state->fp =
reinterpret_cast<void*
>(mcontext.mc_ebp);
515#elif V8_HOST_ARCH_X64
516 state->pc =
reinterpret_cast<void*
>(mcontext.mc_rip);
517 state->sp =
reinterpret_cast<void*
>(mcontext.mc_rsp);
518 state->fp =
reinterpret_cast<void*
>(mcontext.mc_rbp);
519#elif V8_HOST_ARCH_ARM
520 state->pc =
reinterpret_cast<void*
>(mcontext.__gregs[_REG_PC]);
521 state->sp =
reinterpret_cast<void*
>(mcontext.__gregs[_REG_SP]);
522 state->fp =
reinterpret_cast<void*
>(mcontext.__gregs[_REG_FP]);
526 state->pc =
reinterpret_cast<void*
>(mcontext.__gregs[_REG_EIP]);
527 state->sp =
reinterpret_cast<void*
>(mcontext.__gregs[_REG_ESP]);
528 state->fp =
reinterpret_cast<void*
>(mcontext.__gregs[_REG_EBP]);
529#elif V8_HOST_ARCH_X64
530 state->pc =
reinterpret_cast<void*
>(mcontext.__gregs[_REG_RIP]);
531 state->sp =
reinterpret_cast<void*
>(mcontext.__gregs[_REG_RSP]);
532 state->fp =
reinterpret_cast<void*
>(mcontext.__gregs[_REG_RBP]);
536 state->pc =
reinterpret_cast<void*
>(ucontext->sc_eip);
537 state->sp =
reinterpret_cast<void*
>(ucontext->sc_esp);
538 state->fp =
reinterpret_cast<void*
>(ucontext->sc_ebp);
539#elif V8_HOST_ARCH_X64
540 state->pc =
reinterpret_cast<void*
>(ucontext->sc_rip);
541 state->sp =
reinterpret_cast<void*
>(ucontext->sc_rsp);
542 state->fp =
reinterpret_cast<void*
>(ucontext->sc_rbp);
545 state->pc =
reinterpret_cast<void*
>(mcontext.gregs[REG_PC]);
546 state->sp =
reinterpret_cast<void*
>(mcontext.gregs[REG_SP]);
547 state->fp =
reinterpret_cast<void*
>(mcontext.gregs[REG_FP]);
550 state->pc =
reinterpret_cast<void*
>(mcontext.cpu.eip);
551 state->sp =
reinterpret_cast<void*
>(mcontext.cpu.esp);
552 state->fp =
reinterpret_cast<void*
>(mcontext.cpu.ebp);
553#elif V8_HOST_ARCH_ARM
554 state->pc =
reinterpret_cast<void*
>(mcontext.cpu.gpr[ARM_REG_PC]);
555 state->sp =
reinterpret_cast<void*
>(mcontext.cpu.gpr[ARM_REG_SP]);
556 state->fp =
reinterpret_cast<void*
>(mcontext.cpu.gpr[ARM_REG_FP]);
559 state->pc =
reinterpret_cast<void*
>(mcontext.jmp_context.iar);
560 state->sp =
reinterpret_cast<void*
>(mcontext.jmp_context.gpr[1]);
561 state->fp =
reinterpret_cast<void*
>(mcontext.jmp_context.gpr[31]);
562 state->lr =
reinterpret_cast<void*
>(mcontext.jmp_context.lr);
572 SetMutexDeadlockDetectionMode(absl::OnDeadlockCycle::kIgnore);
580#if defined(USE_SIGNALS)
581 SignalHandler::IncreaseSamplerCount();
582 SamplerManager::instance()->AddSampler(
this);
587#if defined(USE_SIGNALS)
588 SamplerManager::instance()->RemoveSampler(
this);
589 SignalHandler::DecreaseSamplerCount();
595#if defined(USE_SIGNALS)
599 if (!SignalHandler::Installed())
return;
604#elif V8_OS_WIN || V8_OS_CYGWIN
608 if (profiled_thread ==
nullptr)
return;
610 const DWORD kSuspendFailed =
static_cast<DWORD>(-1);
611 if (SuspendThread(profiled_thread) == kSuspendFailed)
return;
615 memset(&context, 0,
sizeof(context));
616 context.ContextFlags = CONTEXT_FULL;
617 if (GetThreadContext(profiled_thread, &context) != 0) {
620 state.
pc =
reinterpret_cast<void*
>(context.Rip);
621 state.sp =
reinterpret_cast<void*
>(context.Rsp);
622 state.fp =
reinterpret_cast<void*
>(context.Rbp);
623#elif V8_HOST_ARCH_ARM64
624 state.pc =
reinterpret_cast<void*
>(context.Pc);
625 state.sp =
reinterpret_cast<void*
>(context.Sp);
626 state.fp =
reinterpret_cast<void*
>(context.Fp);
628 state.pc =
reinterpret_cast<void*
>(context.Eip);
629 state.sp =
reinterpret_cast<void*
>(context.Esp);
630 state.fp =
reinterpret_cast<void*
>(context.Ebp);
634 ResumeThread(profiled_thread);
640 zx_handle_t profiled_thread =
platform_data()->profiled_thread();
641 if (profiled_thread == ZX_HANDLE_INVALID)
return;
643 zx_handle_t suspend_token = ZX_HANDLE_INVALID;
644 if (zx_task_suspend_token(profiled_thread, &suspend_token) != ZX_OK)
return;
651 zx_signals_t signals = 0;
652 zx_status_t suspended = zx_object_wait_one(
653 profiled_thread, ZX_THREAD_SUSPENDED | ZX_THREAD_TERMINATED,
654 zx_deadline_after(ZX_MSEC(100)), &signals);
655 if (suspended != ZX_OK || (signals & ZX_THREAD_SUSPENDED) == 0) {
656 zx_handle_close(suspend_token);
661 zx_thread_state_general_regs_t thread_state = {};
662 if (zx_thread_read_state(profiled_thread, ZX_THREAD_STATE_GENERAL_REGS,
663 &thread_state,
sizeof(thread_state)) == ZX_OK) {
666 state.
pc =
reinterpret_cast<void*
>(thread_state.rip);
667 state.sp =
reinterpret_cast<void*
>(thread_state.rsp);
668 state.fp =
reinterpret_cast<void*
>(thread_state.rbp);
669#elif V8_HOST_ARCH_ARM64
670 state.pc =
reinterpret_cast<void*
>(thread_state.pc);
671 state.sp =
reinterpret_cast<void*
>(thread_state.sp);
672 state.fp =
reinterpret_cast<void*
>(thread_state.r[29]);
677 zx_handle_close(suspend_token);
681#if defined(ZX_THREAD_STATE_REGSET0)
682#undef ZX_THREAD_STATE_GENERAL_REGS