// Copyright 2014 the V8 project authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "src/v8.h" #include "src/arguments.h" #include "src/compiler.h" #include "src/deoptimizer.h" #include "src/frames.h" #include "src/full-codegen.h" #include "src/isolate-inl.h" #include "src/runtime/runtime-utils.h" #include "src/v8threads.h" #include "src/vm-state-inl.h" namespace v8 { namespace internal { RUNTIME_FUNCTION(Runtime_CompileLazy) { HandleScope scope(isolate); DCHECK(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0); #ifdef DEBUG if (FLAG_trace_lazy && !function->shared()->is_compiled()) { PrintF("[unoptimized: "); function->PrintName(); PrintF("]\n"); } #endif // Compile the target function. DCHECK(function->shared()->allows_lazy_compilation()); Handle code; ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, code, Compiler::GetLazyCode(function)); DCHECK(code->kind() == Code::FUNCTION || code->kind() == Code::OPTIMIZED_FUNCTION); function->ReplaceCode(*code); return *code; } RUNTIME_FUNCTION(Runtime_CompileOptimized) { HandleScope scope(isolate); DCHECK(args.length() == 2); CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0); CONVERT_BOOLEAN_ARG_CHECKED(concurrent, 1); DCHECK(isolate->use_crankshaft()); Compiler::ConcurrencyMode mode = concurrent ? Compiler::CONCURRENT : Compiler::NOT_CONCURRENT; Handle code; Handle unoptimized(function->shared()->code()); if (Compiler::GetOptimizedCode(function, unoptimized, mode).ToHandle(&code)) { // Optimization succeeded, return optimized code. function->ReplaceCode(*code); } else { // Optimization failed, get unoptimized code. if (isolate->has_pending_exception()) { // Possible stack overflow. return isolate->heap()->exception(); } code = Handle(function->shared()->code(), isolate); if (code->kind() != Code::FUNCTION && code->kind() != Code::OPTIMIZED_FUNCTION) { ASSIGN_RETURN_FAILURE_ON_EXCEPTION( isolate, code, Compiler::GetUnoptimizedCode(function)); } function->ReplaceCode(*code); } DCHECK(function->code()->kind() == Code::FUNCTION || function->code()->kind() == Code::OPTIMIZED_FUNCTION || function->IsInOptimizationQueue()); return function->code(); } RUNTIME_FUNCTION(Runtime_NotifyStubFailure) { HandleScope scope(isolate); DCHECK(args.length() == 0); Deoptimizer* deoptimizer = Deoptimizer::Grab(isolate); DCHECK(AllowHeapAllocation::IsAllowed()); delete deoptimizer; return isolate->heap()->undefined_value(); } class ActivationsFinder : public ThreadVisitor { public: Code* code_; bool has_code_activations_; explicit ActivationsFinder(Code* code) : code_(code), has_code_activations_(false) {} void VisitThread(Isolate* isolate, ThreadLocalTop* top) { JavaScriptFrameIterator it(isolate, top); VisitFrames(&it); } void VisitFrames(JavaScriptFrameIterator* it) { for (; !it->done(); it->Advance()) { JavaScriptFrame* frame = it->frame(); if (code_->contains(frame->pc())) has_code_activations_ = true; } } }; RUNTIME_FUNCTION(Runtime_NotifyDeoptimized) { HandleScope scope(isolate); DCHECK(args.length() == 1); CONVERT_SMI_ARG_CHECKED(type_arg, 0); Deoptimizer::BailoutType type = static_cast(type_arg); Deoptimizer* deoptimizer = Deoptimizer::Grab(isolate); DCHECK(AllowHeapAllocation::IsAllowed()); Handle function = deoptimizer->function(); Handle optimized_code = deoptimizer->compiled_code(); DCHECK(optimized_code->kind() == Code::OPTIMIZED_FUNCTION); DCHECK(type == deoptimizer->bailout_type()); // Make sure to materialize objects before causing any allocation. JavaScriptFrameIterator it(isolate); deoptimizer->MaterializeHeapObjects(&it); delete deoptimizer; JavaScriptFrame* frame = it.frame(); RUNTIME_ASSERT(frame->function()->IsJSFunction()); DCHECK(frame->function() == *function); // Avoid doing too much work when running with --always-opt and keep // the optimized code around. if (FLAG_always_opt || type == Deoptimizer::LAZY) { return isolate->heap()->undefined_value(); } // Search for other activations of the same function and code. ActivationsFinder activations_finder(*optimized_code); activations_finder.VisitFrames(&it); isolate->thread_manager()->IterateArchivedThreads(&activations_finder); if (!activations_finder.has_code_activations_) { if (function->code() == *optimized_code) { if (FLAG_trace_deopt) { PrintF("[removing optimized code for: "); function->PrintName(); PrintF("]\n"); } function->ReplaceCode(function->shared()->code()); // Evict optimized code for this function from the cache so that it // doesn't get used for new closures. function->shared()->EvictFromOptimizedCodeMap(*optimized_code, "notify deoptimized"); } } else { // TODO(titzer): we should probably do DeoptimizeCodeList(code) // unconditionally if the code is not already marked for deoptimization. // If there is an index by shared function info, all the better. Deoptimizer::DeoptimizeFunction(*function); } return isolate->heap()->undefined_value(); } static bool IsSuitableForOnStackReplacement(Isolate* isolate, Handle function, Handle current_code) { // Keep track of whether we've succeeded in optimizing. if (!current_code->optimizable()) return false; // If we are trying to do OSR when there are already optimized // activations of the function, it means (a) the function is directly or // indirectly recursive and (b) an optimized invocation has been // deoptimized so that we are currently in an unoptimized activation. // Check for optimized activations of this function. for (JavaScriptFrameIterator it(isolate); !it.done(); it.Advance()) { JavaScriptFrame* frame = it.frame(); if (frame->is_optimized() && frame->function() == *function) return false; } return true; } RUNTIME_FUNCTION(Runtime_CompileForOnStackReplacement) { HandleScope scope(isolate); DCHECK(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0); Handle caller_code(function->shared()->code()); // We're not prepared to handle a function with arguments object. DCHECK(!function->shared()->uses_arguments()); RUNTIME_ASSERT(FLAG_use_osr); // Passing the PC in the javascript frame from the caller directly is // not GC safe, so we walk the stack to get it. JavaScriptFrameIterator it(isolate); JavaScriptFrame* frame = it.frame(); if (!caller_code->contains(frame->pc())) { // Code on the stack may not be the code object referenced by the shared // function info. It may have been replaced to include deoptimization data. caller_code = Handle(frame->LookupCode()); } uint32_t pc_offset = static_cast(frame->pc() - caller_code->instruction_start()); #ifdef DEBUG DCHECK_EQ(frame->function(), *function); DCHECK_EQ(frame->LookupCode(), *caller_code); DCHECK(caller_code->contains(frame->pc())); #endif // DEBUG BailoutId ast_id = caller_code->TranslatePcOffsetToAstId(pc_offset); DCHECK(!ast_id.IsNone()); Compiler::ConcurrencyMode mode = isolate->concurrent_osr_enabled() && (function->shared()->ast_node_count() > 512) ? Compiler::CONCURRENT : Compiler::NOT_CONCURRENT; Handle result = Handle::null(); OptimizedCompileJob* job = NULL; if (mode == Compiler::CONCURRENT) { // Gate the OSR entry with a stack check. BackEdgeTable::AddStackCheck(caller_code, pc_offset); // Poll already queued compilation jobs. OptimizingCompilerThread* thread = isolate->optimizing_compiler_thread(); if (thread->IsQueuedForOSR(function, ast_id)) { if (FLAG_trace_osr) { PrintF("[OSR - Still waiting for queued: "); function->PrintName(); PrintF(" at AST id %d]\n", ast_id.ToInt()); } return NULL; } job = thread->FindReadyOSRCandidate(function, ast_id); } if (job != NULL) { if (FLAG_trace_osr) { PrintF("[OSR - Found ready: "); function->PrintName(); PrintF(" at AST id %d]\n", ast_id.ToInt()); } result = Compiler::GetConcurrentlyOptimizedCode(job); } else if (IsSuitableForOnStackReplacement(isolate, function, caller_code)) { if (FLAG_trace_osr) { PrintF("[OSR - Compiling: "); function->PrintName(); PrintF(" at AST id %d]\n", ast_id.ToInt()); } MaybeHandle maybe_result = Compiler::GetOptimizedCode(function, caller_code, mode, ast_id); if (maybe_result.ToHandle(&result) && result.is_identical_to(isolate->builtins()->InOptimizationQueue())) { // Optimization is queued. Return to check later. return NULL; } } // Revert the patched back edge table, regardless of whether OSR succeeds. BackEdgeTable::Revert(isolate, *caller_code); // Check whether we ended up with usable optimized code. if (!result.is_null() && result->kind() == Code::OPTIMIZED_FUNCTION) { DeoptimizationInputData* data = DeoptimizationInputData::cast(result->deoptimization_data()); if (data->OsrPcOffset()->value() >= 0) { DCHECK(BailoutId(data->OsrAstId()->value()) == ast_id); if (FLAG_trace_osr) { PrintF("[OSR - Entry at AST id %d, offset %d in optimized code]\n", ast_id.ToInt(), data->OsrPcOffset()->value()); } // TODO(titzer): this is a massive hack to make the deopt counts // match. Fix heuristics for reenabling optimizations! function->shared()->increment_deopt_count(); if (result->is_turbofanned()) { // TurboFanned OSR code cannot be installed into the function. // But the function is obviously hot, so optimize it next time. function->ReplaceCode( isolate->builtins()->builtin(Builtins::kCompileOptimized)); } else { // Crankshafted OSR code can be installed into the function. function->ReplaceCode(*result); } return *result; } } // Failed. if (FLAG_trace_osr) { PrintF("[OSR - Failed: "); function->PrintName(); PrintF(" at AST id %d]\n", ast_id.ToInt()); } if (!function->IsOptimized()) { function->ReplaceCode(function->shared()->code()); } return NULL; } RUNTIME_FUNCTION(Runtime_TryInstallOptimizedCode) { HandleScope scope(isolate); DCHECK(args.length() == 1); CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0); // First check if this is a real stack overflow. StackLimitCheck check(isolate); if (check.JsHasOverflowed()) { SealHandleScope shs(isolate); return isolate->StackOverflow(); } isolate->optimizing_compiler_thread()->InstallOptimizedFunctions(); return (function->IsOptimized()) ? function->code() : function->shared()->code(); } bool CodeGenerationFromStringsAllowed(Isolate* isolate, Handle context) { DCHECK(context->allow_code_gen_from_strings()->IsFalse()); // Check with callback if set. AllowCodeGenerationFromStringsCallback callback = isolate->allow_code_gen_callback(); if (callback == NULL) { // No callback set and code generation disallowed. return false; } else { // Callback set. Let it decide if code generation is allowed. VMState state(isolate); return callback(v8::Utils::ToLocal(context)); } } RUNTIME_FUNCTION(Runtime_CompileString) { HandleScope scope(isolate); DCHECK(args.length() == 3); CONVERT_ARG_HANDLE_CHECKED(String, source, 0); CONVERT_BOOLEAN_ARG_CHECKED(function_literal_only, 1); CONVERT_SMI_ARG_CHECKED(source_offset, 2); // Extract native context. Handle context(isolate->native_context()); // Check if native context allows code generation from // strings. Throw an exception if it doesn't. if (context->allow_code_gen_from_strings()->IsFalse() && !CodeGenerationFromStringsAllowed(isolate, context)) { Handle error_message = context->ErrorMessageForCodeGenerationFromStrings(); THROW_NEW_ERROR_RETURN_FAILURE( isolate, NewEvalError("code_gen_from_strings", HandleVector(&error_message, 1))); } // Compile source string in the native context. ParseRestriction restriction = function_literal_only ? ONLY_SINGLE_FUNCTION_LITERAL : NO_PARSE_RESTRICTION; Handle outer_info(context->closure()->shared(), isolate); Handle fun; ASSIGN_RETURN_FAILURE_ON_EXCEPTION( isolate, fun, Compiler::GetFunctionFromEval(source, outer_info, context, SLOPPY, restriction, RelocInfo::kNoPosition)); if (function_literal_only) { // The actual body is wrapped, which shifts line numbers. Handle