2017-03-16 11:32:01 +00:00
|
|
|
// Copyright 2017 the V8 project authors. All rights reserved.
|
|
|
|
// Use of this source code is governed by a BSD-style license that can be
|
|
|
|
// found in the LICENSE file.
|
|
|
|
|
2017-11-30 15:27:59 +00:00
|
|
|
#include "src/api.h"
|
2017-03-16 11:32:01 +00:00
|
|
|
#include "src/builtins/builtins-utils-gen.h"
|
|
|
|
#include "src/builtins/builtins.h"
|
|
|
|
#include "src/code-stub-assembler.h"
|
2017-09-13 10:56:20 +00:00
|
|
|
#include "src/heap/heap-inl.h"
|
2018-03-21 09:51:18 +00:00
|
|
|
#include "src/ic/accessor-assembler.h"
|
2017-03-16 11:32:01 +00:00
|
|
|
#include "src/macro-assembler.h"
|
2018-03-08 14:07:39 +00:00
|
|
|
#include "src/objects/debug-objects.h"
|
2017-09-13 10:56:20 +00:00
|
|
|
#include "src/objects/shared-function-info.h"
|
2017-03-16 11:32:01 +00:00
|
|
|
#include "src/runtime/runtime.h"
|
|
|
|
|
|
|
|
namespace v8 {
|
|
|
|
namespace internal {
|
|
|
|
|
2017-11-30 15:27:59 +00:00
|
|
|
template <typename T>
|
|
|
|
using TNode = compiler::TNode<T>;
|
|
|
|
|
2017-03-16 11:32:01 +00:00
|
|
|
// -----------------------------------------------------------------------------
|
|
|
|
// Interrupt and stack checks.
|
|
|
|
|
|
|
|
void Builtins::Generate_InterruptCheck(MacroAssembler* masm) {
|
|
|
|
masm->TailCallRuntime(Runtime::kInterrupt);
|
|
|
|
}
|
|
|
|
|
|
|
|
void Builtins::Generate_StackCheck(MacroAssembler* masm) {
|
|
|
|
masm->TailCallRuntime(Runtime::kStackGuard);
|
|
|
|
}
|
|
|
|
|
|
|
|
// -----------------------------------------------------------------------------
|
|
|
|
// TurboFan support builtins.
|
|
|
|
|
|
|
|
TF_BUILTIN(CopyFastSmiOrObjectElements, CodeStubAssembler) {
|
|
|
|
Node* object = Parameter(Descriptor::kObject);
|
|
|
|
|
|
|
|
// Load the {object}s elements.
|
2017-10-17 06:54:48 +00:00
|
|
|
Node* source = LoadObjectField(object, JSObject::kElementsOffset);
|
2017-10-19 14:17:56 +00:00
|
|
|
Node* target = CloneFixedArray(source, ExtractFixedArrayFlag::kFixedArrays);
|
2017-10-17 06:54:48 +00:00
|
|
|
StoreObjectField(object, JSObject::kElementsOffset, target);
|
|
|
|
Return(target);
|
2017-03-16 11:32:01 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
TF_BUILTIN(GrowFastDoubleElements, CodeStubAssembler) {
|
|
|
|
Node* object = Parameter(Descriptor::kObject);
|
|
|
|
Node* key = Parameter(Descriptor::kKey);
|
|
|
|
Node* context = Parameter(Descriptor::kContext);
|
|
|
|
|
|
|
|
Label runtime(this, Label::kDeferred);
|
|
|
|
Node* elements = LoadElements(object);
|
2017-06-30 11:26:14 +00:00
|
|
|
elements = TryGrowElementsCapacity(object, elements, PACKED_DOUBLE_ELEMENTS,
|
2017-03-16 11:32:01 +00:00
|
|
|
key, &runtime);
|
|
|
|
Return(elements);
|
|
|
|
|
2017-04-06 10:46:06 +00:00
|
|
|
BIND(&runtime);
|
2017-03-16 11:32:01 +00:00
|
|
|
TailCallRuntime(Runtime::kGrowArrayElements, context, object, key);
|
|
|
|
}
|
|
|
|
|
|
|
|
TF_BUILTIN(GrowFastSmiOrObjectElements, CodeStubAssembler) {
|
|
|
|
Node* object = Parameter(Descriptor::kObject);
|
|
|
|
Node* key = Parameter(Descriptor::kKey);
|
|
|
|
Node* context = Parameter(Descriptor::kContext);
|
|
|
|
|
|
|
|
Label runtime(this, Label::kDeferred);
|
|
|
|
Node* elements = LoadElements(object);
|
|
|
|
elements =
|
2017-06-30 11:26:14 +00:00
|
|
|
TryGrowElementsCapacity(object, elements, PACKED_ELEMENTS, key, &runtime);
|
2017-03-16 11:32:01 +00:00
|
|
|
Return(elements);
|
|
|
|
|
2017-04-06 10:46:06 +00:00
|
|
|
BIND(&runtime);
|
2017-03-16 11:32:01 +00:00
|
|
|
TailCallRuntime(Runtime::kGrowArrayElements, context, object, key);
|
|
|
|
}
|
|
|
|
|
2017-09-01 08:54:08 +00:00
|
|
|
TF_BUILTIN(NewArgumentsElements, CodeStubAssembler) {
|
2017-03-16 11:32:01 +00:00
|
|
|
Node* frame = Parameter(Descriptor::kFrame);
|
2018-03-27 13:11:54 +00:00
|
|
|
TNode<IntPtrT> length = SmiToIntPtr(Parameter(Descriptor::kLength));
|
|
|
|
TNode<IntPtrT> mapped_count =
|
|
|
|
SmiToIntPtr(Parameter(Descriptor::kMappedCount));
|
2017-03-16 11:32:01 +00:00
|
|
|
|
|
|
|
// Check if we can allocate in new space.
|
2017-06-30 11:26:14 +00:00
|
|
|
ElementsKind kind = PACKED_ELEMENTS;
|
2017-03-16 11:32:01 +00:00
|
|
|
int max_elements = FixedArray::GetMaxLengthForNewSpaceAllocation(kind);
|
|
|
|
Label if_newspace(this), if_oldspace(this, Label::kDeferred);
|
|
|
|
Branch(IntPtrLessThan(length, IntPtrConstant(max_elements)), &if_newspace,
|
|
|
|
&if_oldspace);
|
|
|
|
|
2017-04-06 10:46:06 +00:00
|
|
|
BIND(&if_newspace);
|
2017-03-16 11:32:01 +00:00
|
|
|
{
|
|
|
|
// Prefer EmptyFixedArray in case of non-positive {length} (the {length}
|
|
|
|
// can be negative here for rest parameters).
|
|
|
|
Label if_empty(this), if_notempty(this);
|
|
|
|
Branch(IntPtrLessThanOrEqual(length, IntPtrConstant(0)), &if_empty,
|
|
|
|
&if_notempty);
|
|
|
|
|
2017-04-06 10:46:06 +00:00
|
|
|
BIND(&if_empty);
|
2017-03-16 11:32:01 +00:00
|
|
|
Return(EmptyFixedArrayConstant());
|
|
|
|
|
2017-04-06 10:46:06 +00:00
|
|
|
BIND(&if_notempty);
|
2017-03-16 11:32:01 +00:00
|
|
|
{
|
|
|
|
// Allocate a FixedArray in new space.
|
|
|
|
Node* result = AllocateFixedArray(kind, length);
|
|
|
|
|
2017-09-01 08:54:08 +00:00
|
|
|
// The elements might be used to back mapped arguments. In that case fill
|
|
|
|
// the mapped elements (i.e. the first {mapped_count}) with the hole, but
|
|
|
|
// make sure not to overshoot the {length} if some arguments are missing.
|
2018-03-27 13:11:54 +00:00
|
|
|
TNode<IntPtrT> number_of_holes = IntPtrMin(mapped_count, length);
|
2017-09-01 08:54:08 +00:00
|
|
|
Node* the_hole = TheHoleConstant();
|
|
|
|
|
|
|
|
// Fill the first elements up to {number_of_holes} with the hole.
|
|
|
|
VARIABLE(var_index, MachineType::PointerRepresentation());
|
|
|
|
Label loop1(this, &var_index), done_loop1(this);
|
|
|
|
var_index.Bind(IntPtrConstant(0));
|
|
|
|
Goto(&loop1);
|
|
|
|
BIND(&loop1);
|
|
|
|
{
|
|
|
|
// Load the current {index}.
|
|
|
|
Node* index = var_index.value();
|
|
|
|
|
|
|
|
// Check if we are done.
|
|
|
|
GotoIf(WordEqual(index, number_of_holes), &done_loop1);
|
|
|
|
|
|
|
|
// Store the hole into the {result}.
|
|
|
|
StoreFixedArrayElement(result, index, the_hole, SKIP_WRITE_BARRIER);
|
|
|
|
|
|
|
|
// Continue with next {index}.
|
|
|
|
var_index.Bind(IntPtrAdd(index, IntPtrConstant(1)));
|
|
|
|
Goto(&loop1);
|
|
|
|
}
|
|
|
|
BIND(&done_loop1);
|
|
|
|
|
2017-03-16 11:32:01 +00:00
|
|
|
// Compute the effective {offset} into the {frame}.
|
|
|
|
Node* offset = IntPtrAdd(length, IntPtrConstant(1));
|
|
|
|
|
|
|
|
// Copy the parameters from {frame} (starting at {offset}) to {result}.
|
2017-09-01 08:54:08 +00:00
|
|
|
Label loop2(this, &var_index), done_loop2(this);
|
|
|
|
Goto(&loop2);
|
|
|
|
BIND(&loop2);
|
2017-03-16 11:32:01 +00:00
|
|
|
{
|
|
|
|
// Load the current {index}.
|
|
|
|
Node* index = var_index.value();
|
|
|
|
|
|
|
|
// Check if we are done.
|
2017-09-01 08:54:08 +00:00
|
|
|
GotoIf(WordEqual(index, length), &done_loop2);
|
2017-03-16 11:32:01 +00:00
|
|
|
|
|
|
|
// Load the parameter at the given {index}.
|
|
|
|
Node* value = Load(MachineType::AnyTagged(), frame,
|
2017-05-19 13:17:02 +00:00
|
|
|
TimesPointerSize(IntPtrSub(offset, index)));
|
2017-03-16 11:32:01 +00:00
|
|
|
|
|
|
|
// Store the {value} into the {result}.
|
|
|
|
StoreFixedArrayElement(result, index, value, SKIP_WRITE_BARRIER);
|
|
|
|
|
|
|
|
// Continue with next {index}.
|
|
|
|
var_index.Bind(IntPtrAdd(index, IntPtrConstant(1)));
|
2017-09-01 08:54:08 +00:00
|
|
|
Goto(&loop2);
|
2017-03-16 11:32:01 +00:00
|
|
|
}
|
2017-09-01 08:54:08 +00:00
|
|
|
BIND(&done_loop2);
|
2017-03-16 11:32:01 +00:00
|
|
|
|
|
|
|
Return(result);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-04-06 10:46:06 +00:00
|
|
|
BIND(&if_oldspace);
|
2017-03-16 11:32:01 +00:00
|
|
|
{
|
|
|
|
// Allocate in old space (or large object space).
|
|
|
|
TailCallRuntime(Runtime::kNewArgumentsElements, NoContextConstant(),
|
2018-02-23 13:46:00 +00:00
|
|
|
BitcastWordToTagged(frame), SmiFromIntPtr(length),
|
|
|
|
SmiFromIntPtr(mapped_count));
|
2017-03-16 11:32:01 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-03-20 10:55:37 +00:00
|
|
|
TF_BUILTIN(ReturnReceiver, CodeStubAssembler) {
|
|
|
|
Return(Parameter(Descriptor::kReceiver));
|
|
|
|
}
|
2017-03-16 11:32:01 +00:00
|
|
|
|
2018-03-08 14:07:39 +00:00
|
|
|
TF_BUILTIN(DebugBreakTrampoline, CodeStubAssembler) {
|
|
|
|
Label tailcall_to_shared(this);
|
2018-06-18 16:35:56 +00:00
|
|
|
TNode<Context> context = CAST(Parameter(Descriptor::kContext));
|
|
|
|
TNode<Object> new_target = CAST(Parameter(Descriptor::kJSNewTarget));
|
2018-03-08 14:07:39 +00:00
|
|
|
TNode<Int32T> arg_count =
|
2018-06-18 16:35:56 +00:00
|
|
|
UncheckedCast<Int32T>(Parameter(Descriptor::kJSActualArgumentsCount));
|
2018-03-08 14:07:39 +00:00
|
|
|
TNode<JSFunction> function = CAST(LoadFromFrame(
|
|
|
|
StandardFrameConstants::kFunctionOffset, MachineType::TaggedPointer()));
|
|
|
|
|
|
|
|
// Check break-at-entry flag on the debug info.
|
|
|
|
TNode<SharedFunctionInfo> shared =
|
|
|
|
CAST(LoadObjectField(function, JSFunction::kSharedFunctionInfoOffset));
|
|
|
|
TNode<Object> maybe_debug_info =
|
|
|
|
LoadObjectField(shared, SharedFunctionInfo::kDebugInfoOffset);
|
|
|
|
GotoIf(TaggedIsSmi(maybe_debug_info), &tailcall_to_shared);
|
|
|
|
|
|
|
|
{
|
|
|
|
TNode<DebugInfo> debug_info = CAST(maybe_debug_info);
|
|
|
|
TNode<Smi> flags =
|
|
|
|
CAST(LoadObjectField(debug_info, DebugInfo::kFlagsOffset));
|
|
|
|
GotoIfNot(SmiToInt32(SmiAnd(flags, SmiConstant(DebugInfo::kBreakAtEntry))),
|
|
|
|
&tailcall_to_shared);
|
|
|
|
|
|
|
|
CallRuntime(Runtime::kDebugBreakAtEntry, context, function);
|
|
|
|
Goto(&tailcall_to_shared);
|
|
|
|
}
|
|
|
|
|
|
|
|
BIND(&tailcall_to_shared);
|
|
|
|
// Tail call into code object on the SharedFunctionInfo.
|
2018-03-22 16:09:55 +00:00
|
|
|
TNode<Code> code = GetSharedFunctionInfoCode(shared);
|
2018-06-07 15:21:27 +00:00
|
|
|
TailCallJSCode(code, context, function, new_target, arg_count);
|
2018-03-08 14:07:39 +00:00
|
|
|
}
|
|
|
|
|
2017-08-03 11:17:17 +00:00
|
|
|
class RecordWriteCodeStubAssembler : public CodeStubAssembler {
|
|
|
|
public:
|
|
|
|
explicit RecordWriteCodeStubAssembler(compiler::CodeAssemblerState* state)
|
|
|
|
: CodeStubAssembler(state) {}
|
|
|
|
|
|
|
|
Node* IsMarking() {
|
|
|
|
Node* is_marking_addr = ExternalConstant(
|
|
|
|
ExternalReference::heap_is_marking_flag_address(this->isolate()));
|
|
|
|
return Load(MachineType::Uint8(), is_marking_addr);
|
|
|
|
}
|
|
|
|
|
|
|
|
Node* IsPageFlagSet(Node* object, int mask) {
|
|
|
|
Node* page = WordAnd(object, IntPtrConstant(~Page::kPageAlignmentMask));
|
|
|
|
Node* flags = Load(MachineType::Pointer(), page,
|
|
|
|
IntPtrConstant(MemoryChunk::kFlagsOffset));
|
|
|
|
return WordNotEqual(WordAnd(flags, IntPtrConstant(mask)),
|
|
|
|
IntPtrConstant(0));
|
|
|
|
}
|
|
|
|
|
|
|
|
void GotoIfNotBlack(Node* object, Label* not_black) {
|
|
|
|
Label exit(this);
|
|
|
|
Label* black = &exit;
|
|
|
|
|
2017-10-18 09:06:55 +00:00
|
|
|
DCHECK_EQ(strcmp(Marking::kBlackBitPattern, "11"), 0);
|
2017-08-03 11:17:17 +00:00
|
|
|
|
|
|
|
Node* cell;
|
|
|
|
Node* mask;
|
|
|
|
|
|
|
|
GetMarkBit(object, &cell, &mask);
|
2018-02-23 13:46:00 +00:00
|
|
|
mask = TruncateIntPtrToInt32(mask);
|
2017-08-03 11:17:17 +00:00
|
|
|
|
|
|
|
Node* bits = Load(MachineType::Int32(), cell);
|
|
|
|
Node* bit_0 = Word32And(bits, mask);
|
|
|
|
|
|
|
|
GotoIf(Word32Equal(bit_0, Int32Constant(0)), not_black);
|
|
|
|
|
|
|
|
mask = Word32Shl(mask, Int32Constant(1));
|
|
|
|
|
|
|
|
Label word_boundary(this), in_word(this);
|
|
|
|
|
|
|
|
// If mask becomes zero, we know mask was `1 << 31`, i.e., the bit is on
|
|
|
|
// word boundary. Otherwise, the bit is within the word.
|
|
|
|
Branch(Word32Equal(mask, Int32Constant(0)), &word_boundary, &in_word);
|
|
|
|
|
|
|
|
BIND(&word_boundary);
|
|
|
|
{
|
|
|
|
Node* bit_1 = Word32And(
|
|
|
|
Load(MachineType::Int32(), IntPtrAdd(cell, IntPtrConstant(4))),
|
|
|
|
Int32Constant(1));
|
|
|
|
Branch(Word32Equal(bit_1, Int32Constant(0)), not_black, black);
|
|
|
|
}
|
|
|
|
|
|
|
|
BIND(&in_word);
|
|
|
|
{
|
|
|
|
Branch(Word32Equal(Word32And(bits, mask), Int32Constant(0)), not_black,
|
|
|
|
black);
|
|
|
|
}
|
|
|
|
|
|
|
|
BIND(&exit);
|
|
|
|
}
|
|
|
|
|
|
|
|
Node* IsWhite(Node* object) {
|
2017-10-18 09:06:55 +00:00
|
|
|
DCHECK_EQ(strcmp(Marking::kWhiteBitPattern, "00"), 0);
|
2017-08-03 11:17:17 +00:00
|
|
|
Node* cell;
|
|
|
|
Node* mask;
|
|
|
|
GetMarkBit(object, &cell, &mask);
|
2018-02-23 13:46:00 +00:00
|
|
|
mask = TruncateIntPtrToInt32(mask);
|
2017-08-03 11:17:17 +00:00
|
|
|
// Non-white has 1 for the first bit, so we only need to check for the first
|
|
|
|
// bit.
|
2017-10-24 11:38:07 +00:00
|
|
|
return Word32Equal(Word32And(Load(MachineType::Int32(), cell), mask),
|
|
|
|
Int32Constant(0));
|
2017-08-03 11:17:17 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void GetMarkBit(Node* object, Node** cell, Node** mask) {
|
|
|
|
Node* page = WordAnd(object, IntPtrConstant(~Page::kPageAlignmentMask));
|
|
|
|
|
|
|
|
{
|
|
|
|
// Temp variable to calculate cell offset in bitmap.
|
|
|
|
Node* r0;
|
|
|
|
int shift = Bitmap::kBitsPerCellLog2 + kPointerSizeLog2 -
|
|
|
|
Bitmap::kBytesPerCellLog2;
|
|
|
|
r0 = WordShr(object, IntPtrConstant(shift));
|
|
|
|
r0 = WordAnd(r0, IntPtrConstant((Page::kPageAlignmentMask >> shift) &
|
|
|
|
~(Bitmap::kBytesPerCell - 1)));
|
|
|
|
*cell = IntPtrAdd(IntPtrAdd(page, r0),
|
|
|
|
IntPtrConstant(MemoryChunk::kHeaderSize));
|
|
|
|
}
|
|
|
|
{
|
|
|
|
// Temp variable to calculate bit offset in cell.
|
|
|
|
Node* r1;
|
|
|
|
r1 = WordShr(object, IntPtrConstant(kPointerSizeLog2));
|
|
|
|
r1 = WordAnd(r1, IntPtrConstant((1 << Bitmap::kBitsPerCellLog2) - 1));
|
|
|
|
// It seems that LSB(e.g. cl) is automatically used, so no manual masking
|
|
|
|
// is needed. Uncomment the following line otherwise.
|
|
|
|
// WordAnd(r1, IntPtrConstant((1 << kBitsPerByte) - 1)));
|
|
|
|
*mask = WordShl(IntPtrConstant(1), r1);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-09-22 09:13:07 +00:00
|
|
|
Node* ShouldSkipFPRegs(Node* mode) {
|
|
|
|
return WordEqual(mode, SmiConstant(kDontSaveFPRegs));
|
|
|
|
}
|
|
|
|
|
|
|
|
Node* ShouldEmitRememberSet(Node* remembered_set) {
|
|
|
|
return WordEqual(remembered_set, SmiConstant(EMIT_REMEMBERED_SET));
|
|
|
|
}
|
|
|
|
|
|
|
|
void CallCFunction1WithCallerSavedRegistersMode(MachineType return_type,
|
|
|
|
MachineType arg0_type,
|
|
|
|
Node* function, Node* arg0,
|
|
|
|
Node* mode, Label* next) {
|
|
|
|
Label dont_save_fp(this), save_fp(this);
|
|
|
|
Branch(ShouldSkipFPRegs(mode), &dont_save_fp, &save_fp);
|
|
|
|
BIND(&dont_save_fp);
|
|
|
|
{
|
|
|
|
CallCFunction1WithCallerSavedRegisters(return_type, arg0_type, function,
|
|
|
|
arg0, kDontSaveFPRegs);
|
|
|
|
Goto(next);
|
|
|
|
}
|
|
|
|
|
|
|
|
BIND(&save_fp);
|
|
|
|
{
|
|
|
|
CallCFunction1WithCallerSavedRegisters(return_type, arg0_type, function,
|
|
|
|
arg0, kSaveFPRegs);
|
|
|
|
Goto(next);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void CallCFunction3WithCallerSavedRegistersMode(
|
|
|
|
MachineType return_type, MachineType arg0_type, MachineType arg1_type,
|
|
|
|
MachineType arg2_type, Node* function, Node* arg0, Node* arg1, Node* arg2,
|
|
|
|
Node* mode, Label* next) {
|
|
|
|
Label dont_save_fp(this), save_fp(this);
|
|
|
|
Branch(ShouldSkipFPRegs(mode), &dont_save_fp, &save_fp);
|
|
|
|
BIND(&dont_save_fp);
|
|
|
|
{
|
|
|
|
CallCFunction3WithCallerSavedRegisters(return_type, arg0_type, arg1_type,
|
|
|
|
arg2_type, function, arg0, arg1,
|
|
|
|
arg2, kDontSaveFPRegs);
|
|
|
|
Goto(next);
|
|
|
|
}
|
|
|
|
|
|
|
|
BIND(&save_fp);
|
|
|
|
{
|
|
|
|
CallCFunction3WithCallerSavedRegisters(return_type, arg0_type, arg1_type,
|
|
|
|
arg2_type, function, arg0, arg1,
|
|
|
|
arg2, kSaveFPRegs);
|
|
|
|
Goto(next);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void InsertToStoreBufferAndGoto(Node* isolate, Node* slot, Node* mode,
|
|
|
|
Label* next) {
|
2017-08-03 11:17:17 +00:00
|
|
|
Node* store_buffer_top_addr =
|
|
|
|
ExternalConstant(ExternalReference::store_buffer_top(this->isolate()));
|
|
|
|
Node* store_buffer_top =
|
|
|
|
Load(MachineType::Pointer(), store_buffer_top_addr);
|
|
|
|
StoreNoWriteBarrier(MachineType::PointerRepresentation(), store_buffer_top,
|
|
|
|
slot);
|
|
|
|
Node* new_store_buffer_top =
|
|
|
|
IntPtrAdd(store_buffer_top, IntPtrConstant(kPointerSize));
|
|
|
|
StoreNoWriteBarrier(MachineType::PointerRepresentation(),
|
|
|
|
store_buffer_top_addr, new_store_buffer_top);
|
|
|
|
|
|
|
|
Node* test = WordAnd(new_store_buffer_top,
|
|
|
|
IntPtrConstant(StoreBuffer::kStoreBufferMask));
|
|
|
|
|
|
|
|
Label overflow(this);
|
|
|
|
Branch(WordEqual(test, IntPtrConstant(0)), &overflow, next);
|
|
|
|
|
|
|
|
BIND(&overflow);
|
|
|
|
{
|
2018-04-25 07:28:14 +00:00
|
|
|
Node* function =
|
|
|
|
ExternalConstant(ExternalReference::store_buffer_overflow_function());
|
2017-09-22 09:13:07 +00:00
|
|
|
CallCFunction1WithCallerSavedRegistersMode(MachineType::Int32(),
|
|
|
|
MachineType::Pointer(),
|
|
|
|
function, isolate, mode, next);
|
2017-08-03 11:17:17 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
TF_BUILTIN(RecordWrite, RecordWriteCodeStubAssembler) {
|
|
|
|
Node* object = BitcastTaggedToWord(Parameter(Descriptor::kObject));
|
|
|
|
Node* slot = Parameter(Descriptor::kSlot);
|
|
|
|
Node* isolate = Parameter(Descriptor::kIsolate);
|
2017-09-22 09:13:07 +00:00
|
|
|
Node* remembered_set = Parameter(Descriptor::kRememberedSet);
|
|
|
|
Node* fp_mode = Parameter(Descriptor::kFPMode);
|
2017-08-03 11:17:17 +00:00
|
|
|
|
2017-09-22 09:13:07 +00:00
|
|
|
Node* value = Load(MachineType::Pointer(), slot);
|
|
|
|
|
|
|
|
Label generational_wb(this);
|
2017-08-03 11:17:17 +00:00
|
|
|
Label incremental_wb(this);
|
|
|
|
Label exit(this);
|
|
|
|
|
2017-09-22 09:13:07 +00:00
|
|
|
Branch(ShouldEmitRememberSet(remembered_set), &generational_wb,
|
|
|
|
&incremental_wb);
|
2017-08-03 11:17:17 +00:00
|
|
|
|
2017-09-22 09:13:07 +00:00
|
|
|
BIND(&generational_wb);
|
2017-08-03 11:17:17 +00:00
|
|
|
{
|
2017-09-22 09:13:07 +00:00
|
|
|
Label test_old_to_new_flags(this);
|
|
|
|
Label store_buffer_exit(this), store_buffer_incremental_wb(this);
|
|
|
|
// When incremental marking is not on, we skip cross generation pointer
|
|
|
|
// checking here, because there are checks for
|
|
|
|
// `kPointersFromHereAreInterestingMask` and
|
|
|
|
// `kPointersToHereAreInterestingMask` in
|
|
|
|
// `src/compiler/<arch>/code-generator-<arch>.cc` before calling this stub,
|
|
|
|
// which serves as the cross generation checking.
|
|
|
|
Branch(IsMarking(), &test_old_to_new_flags, &store_buffer_exit);
|
|
|
|
|
|
|
|
BIND(&test_old_to_new_flags);
|
|
|
|
{
|
|
|
|
// TODO(albertnetymk): Try to cache the page flag for value and object,
|
|
|
|
// instead of calling IsPageFlagSet each time.
|
|
|
|
Node* value_in_new_space =
|
|
|
|
IsPageFlagSet(value, MemoryChunk::kIsInNewSpaceMask);
|
|
|
|
GotoIfNot(value_in_new_space, &incremental_wb);
|
2017-08-03 11:17:17 +00:00
|
|
|
|
2017-09-22 09:13:07 +00:00
|
|
|
Node* object_in_new_space =
|
|
|
|
IsPageFlagSet(object, MemoryChunk::kIsInNewSpaceMask);
|
|
|
|
GotoIf(object_in_new_space, &incremental_wb);
|
2017-08-03 11:17:17 +00:00
|
|
|
|
2017-09-22 09:13:07 +00:00
|
|
|
Goto(&store_buffer_incremental_wb);
|
|
|
|
}
|
|
|
|
|
|
|
|
BIND(&store_buffer_exit);
|
|
|
|
{ InsertToStoreBufferAndGoto(isolate, slot, fp_mode, &exit); }
|
|
|
|
|
|
|
|
BIND(&store_buffer_incremental_wb);
|
|
|
|
{ InsertToStoreBufferAndGoto(isolate, slot, fp_mode, &incremental_wb); }
|
|
|
|
}
|
2017-08-03 11:17:17 +00:00
|
|
|
|
|
|
|
BIND(&incremental_wb);
|
|
|
|
{
|
|
|
|
Label call_incremental_wb(this);
|
|
|
|
|
|
|
|
#ifndef V8_CONCURRENT_MARKING
|
|
|
|
GotoIfNotBlack(object, &exit);
|
|
|
|
#endif
|
|
|
|
|
|
|
|
// There are two cases we need to call incremental write barrier.
|
|
|
|
// 1) value_is_white
|
|
|
|
GotoIf(IsWhite(value), &call_incremental_wb);
|
|
|
|
|
|
|
|
// 2) is_compacting && value_in_EC && obj_isnt_skip
|
|
|
|
// is_compacting = true when is_marking = true
|
|
|
|
GotoIfNot(IsPageFlagSet(value, MemoryChunk::kEvacuationCandidateMask),
|
|
|
|
&exit);
|
|
|
|
GotoIf(
|
|
|
|
IsPageFlagSet(object, MemoryChunk::kSkipEvacuationSlotsRecordingMask),
|
|
|
|
&exit);
|
|
|
|
|
|
|
|
Goto(&call_incremental_wb);
|
|
|
|
|
|
|
|
BIND(&call_incremental_wb);
|
|
|
|
{
|
2017-08-21 15:23:17 +00:00
|
|
|
Node* function = ExternalConstant(
|
2018-04-25 07:28:14 +00:00
|
|
|
ExternalReference::incremental_marking_record_write_function());
|
2017-09-22 09:13:07 +00:00
|
|
|
CallCFunction3WithCallerSavedRegistersMode(
|
2017-08-21 15:23:17 +00:00
|
|
|
MachineType::Int32(), MachineType::Pointer(), MachineType::Pointer(),
|
2017-09-22 09:13:07 +00:00
|
|
|
MachineType::Pointer(), function, object, slot, isolate, fp_mode,
|
|
|
|
&exit);
|
2017-08-03 11:17:17 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
BIND(&exit);
|
|
|
|
Return(TrueConstant());
|
|
|
|
}
|
|
|
|
|
2018-03-21 09:51:18 +00:00
|
|
|
class DeletePropertyBaseAssembler : public AccessorAssembler {
|
2017-04-20 12:11:05 +00:00
|
|
|
public:
|
|
|
|
explicit DeletePropertyBaseAssembler(compiler::CodeAssemblerState* state)
|
2018-03-21 09:51:18 +00:00
|
|
|
: AccessorAssembler(state) {}
|
2017-04-20 12:11:05 +00:00
|
|
|
|
2018-05-11 11:00:17 +00:00
|
|
|
void DeleteDictionaryProperty(TNode<Object> receiver,
|
|
|
|
TNode<NameDictionary> properties,
|
|
|
|
TNode<Name> name, TNode<Context> context,
|
|
|
|
Label* dont_delete, Label* notfound) {
|
|
|
|
TVARIABLE(IntPtrT, var_name_index);
|
2017-04-20 12:11:05 +00:00
|
|
|
Label dictionary_found(this, &var_name_index);
|
|
|
|
NameDictionaryLookup<NameDictionary>(properties, name, &dictionary_found,
|
|
|
|
&var_name_index, notfound);
|
|
|
|
|
|
|
|
BIND(&dictionary_found);
|
2018-05-11 11:00:17 +00:00
|
|
|
TNode<IntPtrT> key_index = var_name_index.value();
|
|
|
|
TNode<Uint32T> details =
|
2017-04-20 12:11:05 +00:00
|
|
|
LoadDetailsByKeyIndex<NameDictionary>(properties, key_index);
|
|
|
|
GotoIf(IsSetWord32(details, PropertyDetails::kAttributesDontDeleteMask),
|
|
|
|
dont_delete);
|
|
|
|
// Overwrite the entry itself (see NameDictionary::SetEntry).
|
2018-05-11 11:00:17 +00:00
|
|
|
TNode<HeapObject> filler = TheHoleConstant();
|
2017-04-20 12:11:05 +00:00
|
|
|
DCHECK(Heap::RootIsImmortalImmovable(Heap::kTheHoleValueRootIndex));
|
|
|
|
StoreFixedArrayElement(properties, key_index, filler, SKIP_WRITE_BARRIER);
|
|
|
|
StoreValueByKeyIndex<NameDictionary>(properties, key_index, filler,
|
|
|
|
SKIP_WRITE_BARRIER);
|
|
|
|
StoreDetailsByKeyIndex<NameDictionary>(properties, key_index,
|
2017-07-10 15:02:17 +00:00
|
|
|
SmiConstant(0));
|
2017-04-20 12:11:05 +00:00
|
|
|
|
|
|
|
// Update bookkeeping information (see NameDictionary::ElementRemoved).
|
2018-05-11 11:00:17 +00:00
|
|
|
TNode<Smi> nof = GetNumberOfElements<NameDictionary>(properties);
|
|
|
|
TNode<Smi> new_nof = SmiSub(nof, SmiConstant(1));
|
2017-04-20 12:11:05 +00:00
|
|
|
SetNumberOfElements<NameDictionary>(properties, new_nof);
|
2018-05-11 11:00:17 +00:00
|
|
|
TNode<Smi> num_deleted =
|
|
|
|
GetNumberOfDeletedElements<NameDictionary>(properties);
|
|
|
|
TNode<Smi> new_deleted = SmiAdd(num_deleted, SmiConstant(1));
|
2017-04-20 12:11:05 +00:00
|
|
|
SetNumberOfDeletedElements<NameDictionary>(properties, new_deleted);
|
|
|
|
|
|
|
|
// Shrink the dictionary if necessary (see NameDictionary::Shrink).
|
|
|
|
Label shrinking_done(this);
|
2018-05-11 11:00:17 +00:00
|
|
|
TNode<Smi> capacity = GetCapacity<NameDictionary>(properties);
|
2017-04-20 12:11:05 +00:00
|
|
|
GotoIf(SmiGreaterThan(new_nof, SmiShr(capacity, 2)), &shrinking_done);
|
|
|
|
GotoIf(SmiLessThan(new_nof, SmiConstant(16)), &shrinking_done);
|
2017-06-12 14:03:47 +00:00
|
|
|
CallRuntime(Runtime::kShrinkPropertyDictionary, context, receiver);
|
2017-04-20 12:11:05 +00:00
|
|
|
Goto(&shrinking_done);
|
|
|
|
BIND(&shrinking_done);
|
|
|
|
|
|
|
|
Return(TrueConstant());
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
TF_BUILTIN(DeleteProperty, DeletePropertyBaseAssembler) {
|
2018-05-11 11:00:17 +00:00
|
|
|
TNode<Object> receiver = CAST(Parameter(Descriptor::kObject));
|
|
|
|
TNode<Object> key = CAST(Parameter(Descriptor::kKey));
|
|
|
|
TNode<Smi> language_mode = CAST(Parameter(Descriptor::kLanguageMode));
|
|
|
|
TNode<Context> context = CAST(Parameter(Descriptor::kContext));
|
2017-04-20 12:11:05 +00:00
|
|
|
|
|
|
|
VARIABLE(var_index, MachineType::PointerRepresentation());
|
|
|
|
VARIABLE(var_unique, MachineRepresentation::kTagged, key);
|
|
|
|
Label if_index(this), if_unique_name(this), if_notunique(this),
|
|
|
|
if_notfound(this), slow(this);
|
|
|
|
|
|
|
|
GotoIf(TaggedIsSmi(receiver), &slow);
|
2018-05-11 11:00:17 +00:00
|
|
|
TNode<Map> receiver_map = LoadMap(CAST(receiver));
|
2018-04-12 11:37:14 +00:00
|
|
|
TNode<Int32T> instance_type = LoadMapInstanceType(receiver_map);
|
|
|
|
GotoIf(IsCustomElementsReceiverInstanceType(instance_type), &slow);
|
2017-04-20 12:11:05 +00:00
|
|
|
TryToName(key, &if_index, &var_index, &if_unique_name, &var_unique, &slow,
|
|
|
|
&if_notunique);
|
|
|
|
|
|
|
|
BIND(&if_index);
|
|
|
|
{
|
|
|
|
Comment("integer index");
|
|
|
|
Goto(&slow); // TODO(jkummerow): Implement more smarts here.
|
|
|
|
}
|
|
|
|
|
|
|
|
BIND(&if_unique_name);
|
|
|
|
{
|
|
|
|
Comment("key is unique name");
|
2018-05-11 11:00:17 +00:00
|
|
|
TNode<Name> unique = CAST(var_unique.value());
|
2017-04-20 12:11:05 +00:00
|
|
|
CheckForAssociatedProtector(unique, &slow);
|
|
|
|
|
|
|
|
Label dictionary(this), dont_delete(this);
|
2017-07-19 20:51:26 +00:00
|
|
|
GotoIf(IsDictionaryMap(receiver_map), &dictionary);
|
|
|
|
|
2017-05-03 15:50:50 +00:00
|
|
|
// Fast properties need to clear recorded slots, which can only be done
|
|
|
|
// in C++.
|
|
|
|
Goto(&slow);
|
2017-04-20 12:11:05 +00:00
|
|
|
|
|
|
|
BIND(&dictionary);
|
|
|
|
{
|
2018-03-21 09:51:18 +00:00
|
|
|
InvalidateValidityCellIfPrototype(receiver_map);
|
|
|
|
|
2018-05-11 11:00:17 +00:00
|
|
|
TNode<NameDictionary> properties =
|
|
|
|
CAST(LoadSlowProperties(CAST(receiver)));
|
2017-04-20 12:11:05 +00:00
|
|
|
DeleteDictionaryProperty(receiver, properties, unique, context,
|
|
|
|
&dont_delete, &if_notfound);
|
|
|
|
}
|
|
|
|
|
|
|
|
BIND(&dont_delete);
|
|
|
|
{
|
2017-10-16 10:55:06 +00:00
|
|
|
STATIC_ASSERT(LanguageModeSize == 2);
|
2017-10-25 17:43:04 +00:00
|
|
|
GotoIf(SmiNotEqual(language_mode, SmiConstant(LanguageMode::kSloppy)),
|
2017-10-16 10:55:06 +00:00
|
|
|
&slow);
|
2017-04-20 12:11:05 +00:00
|
|
|
Return(FalseConstant());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
BIND(&if_notunique);
|
|
|
|
{
|
|
|
|
// If the string was not found in the string table, then no object can
|
|
|
|
// have a property with that name.
|
|
|
|
TryInternalizeString(key, &if_index, &var_index, &if_unique_name,
|
|
|
|
&var_unique, &if_notfound, &slow);
|
|
|
|
}
|
|
|
|
|
|
|
|
BIND(&if_notfound);
|
|
|
|
Return(TrueConstant());
|
|
|
|
|
|
|
|
BIND(&slow);
|
|
|
|
{
|
|
|
|
TailCallRuntime(Runtime::kDeleteProperty, context, receiver, key,
|
|
|
|
language_mode);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-09-01 10:49:06 +00:00
|
|
|
TF_BUILTIN(ForInEnumerate, CodeStubAssembler) {
|
|
|
|
Node* receiver = Parameter(Descriptor::kReceiver);
|
|
|
|
Node* context = Parameter(Descriptor::kContext);
|
|
|
|
|
|
|
|
Label if_empty(this), if_runtime(this, Label::kDeferred);
|
|
|
|
Node* receiver_map = CheckEnumCache(receiver, &if_empty, &if_runtime);
|
|
|
|
Return(receiver_map);
|
|
|
|
|
|
|
|
BIND(&if_empty);
|
|
|
|
Return(EmptyFixedArrayConstant());
|
|
|
|
|
|
|
|
BIND(&if_runtime);
|
|
|
|
TailCallRuntime(Runtime::kForInEnumerate, context, receiver);
|
|
|
|
}
|
|
|
|
|
|
|
|
TF_BUILTIN(ForInFilter, CodeStubAssembler) {
|
|
|
|
Node* key = Parameter(Descriptor::kKey);
|
|
|
|
Node* object = Parameter(Descriptor::kObject);
|
|
|
|
Node* context = Parameter(Descriptor::kContext);
|
|
|
|
|
|
|
|
CSA_ASSERT(this, IsString(key));
|
|
|
|
|
|
|
|
Label if_true(this), if_false(this);
|
2017-12-19 19:20:38 +00:00
|
|
|
TNode<Oddball> result = HasProperty(object, key, context, kForInHasProperty);
|
2017-09-01 10:49:06 +00:00
|
|
|
Branch(IsTrue(result), &if_true, &if_false);
|
|
|
|
|
|
|
|
BIND(&if_true);
|
|
|
|
Return(key);
|
|
|
|
|
|
|
|
BIND(&if_false);
|
|
|
|
Return(UndefinedConstant());
|
|
|
|
}
|
|
|
|
|
2017-10-27 07:34:03 +00:00
|
|
|
TF_BUILTIN(SameValue, CodeStubAssembler) {
|
|
|
|
Node* lhs = Parameter(Descriptor::kLeft);
|
|
|
|
Node* rhs = Parameter(Descriptor::kRight);
|
|
|
|
|
|
|
|
Label if_true(this), if_false(this);
|
|
|
|
BranchIfSameValue(lhs, rhs, &if_true, &if_false);
|
|
|
|
|
|
|
|
BIND(&if_true);
|
|
|
|
Return(TrueConstant());
|
|
|
|
|
|
|
|
BIND(&if_false);
|
|
|
|
Return(FalseConstant());
|
|
|
|
}
|
|
|
|
|
2017-11-30 15:27:59 +00:00
|
|
|
class InternalBuiltinsAssembler : public CodeStubAssembler {
|
|
|
|
public:
|
|
|
|
explicit InternalBuiltinsAssembler(compiler::CodeAssemblerState* state)
|
|
|
|
: CodeStubAssembler(state) {}
|
|
|
|
|
2018-01-09 11:34:42 +00:00
|
|
|
TNode<IntPtrT> GetPendingMicrotaskCount();
|
|
|
|
void SetPendingMicrotaskCount(TNode<IntPtrT> count);
|
|
|
|
|
|
|
|
TNode<FixedArray> GetMicrotaskQueue();
|
|
|
|
void SetMicrotaskQueue(TNode<FixedArray> queue);
|
|
|
|
|
2017-11-30 15:27:59 +00:00
|
|
|
TNode<Context> GetCurrentContext();
|
|
|
|
void SetCurrentContext(TNode<Context> context);
|
|
|
|
|
|
|
|
void EnterMicrotaskContext(TNode<Context> context);
|
|
|
|
void LeaveMicrotaskContext();
|
|
|
|
|
2018-02-08 16:36:52 +00:00
|
|
|
void RunPromiseHook(Runtime::FunctionId id, TNode<Context> context,
|
2018-05-14 08:55:07 +00:00
|
|
|
SloppyTNode<HeapObject> promise_or_capability);
|
2018-02-08 16:36:52 +00:00
|
|
|
|
2017-11-30 15:27:59 +00:00
|
|
|
TNode<Object> GetPendingException() {
|
2018-04-25 07:28:14 +00:00
|
|
|
auto ref = ExternalReference::Create(kPendingExceptionAddress, isolate());
|
2017-11-30 15:27:59 +00:00
|
|
|
return TNode<Object>::UncheckedCast(
|
|
|
|
Load(MachineType::AnyTagged(), ExternalConstant(ref)));
|
|
|
|
}
|
|
|
|
void ClearPendingException() {
|
2018-04-25 07:28:14 +00:00
|
|
|
auto ref = ExternalReference::Create(kPendingExceptionAddress, isolate());
|
2017-11-30 15:27:59 +00:00
|
|
|
StoreNoWriteBarrier(MachineRepresentation::kTagged, ExternalConstant(ref),
|
|
|
|
TheHoleConstant());
|
|
|
|
}
|
|
|
|
|
|
|
|
TNode<Object> GetScheduledException() {
|
|
|
|
auto ref = ExternalReference::scheduled_exception_address(isolate());
|
|
|
|
return TNode<Object>::UncheckedCast(
|
|
|
|
Load(MachineType::AnyTagged(), ExternalConstant(ref)));
|
|
|
|
}
|
|
|
|
void ClearScheduledException() {
|
|
|
|
auto ref = ExternalReference::scheduled_exception_address(isolate());
|
|
|
|
StoreNoWriteBarrier(MachineRepresentation::kTagged, ExternalConstant(ref),
|
|
|
|
TheHoleConstant());
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2018-01-09 11:34:42 +00:00
|
|
|
TNode<IntPtrT> InternalBuiltinsAssembler::GetPendingMicrotaskCount() {
|
|
|
|
auto ref = ExternalReference::pending_microtask_count_address(isolate());
|
|
|
|
if (kIntSize == 8) {
|
|
|
|
return TNode<IntPtrT>::UncheckedCast(
|
|
|
|
Load(MachineType::Int64(), ExternalConstant(ref)));
|
|
|
|
} else {
|
|
|
|
Node* const value = Load(MachineType::Int32(), ExternalConstant(ref));
|
|
|
|
return ChangeInt32ToIntPtr(value);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void InternalBuiltinsAssembler::SetPendingMicrotaskCount(TNode<IntPtrT> count) {
|
|
|
|
auto ref = ExternalReference::pending_microtask_count_address(isolate());
|
|
|
|
auto rep = kIntSize == 8 ? MachineRepresentation::kWord64
|
|
|
|
: MachineRepresentation::kWord32;
|
|
|
|
if (kIntSize == 4 && kPointerSize == 8) {
|
|
|
|
Node* const truncated_count =
|
|
|
|
TruncateInt64ToInt32(TNode<Int64T>::UncheckedCast(count));
|
|
|
|
StoreNoWriteBarrier(rep, ExternalConstant(ref), truncated_count);
|
|
|
|
} else {
|
|
|
|
StoreNoWriteBarrier(rep, ExternalConstant(ref), count);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
TNode<FixedArray> InternalBuiltinsAssembler::GetMicrotaskQueue() {
|
|
|
|
return TNode<FixedArray>::UncheckedCast(
|
|
|
|
LoadRoot(Heap::kMicrotaskQueueRootIndex));
|
|
|
|
}
|
|
|
|
|
|
|
|
void InternalBuiltinsAssembler::SetMicrotaskQueue(TNode<FixedArray> queue) {
|
|
|
|
StoreRoot(Heap::kMicrotaskQueueRootIndex, queue);
|
|
|
|
}
|
|
|
|
|
2017-11-30 15:27:59 +00:00
|
|
|
TNode<Context> InternalBuiltinsAssembler::GetCurrentContext() {
|
2018-04-25 07:28:14 +00:00
|
|
|
auto ref = ExternalReference::Create(kContextAddress, isolate());
|
2017-11-30 15:27:59 +00:00
|
|
|
return TNode<Context>::UncheckedCast(
|
|
|
|
Load(MachineType::AnyTagged(), ExternalConstant(ref)));
|
|
|
|
}
|
|
|
|
|
|
|
|
void InternalBuiltinsAssembler::SetCurrentContext(TNode<Context> context) {
|
2018-04-25 07:28:14 +00:00
|
|
|
auto ref = ExternalReference::Create(kContextAddress, isolate());
|
2017-11-30 15:27:59 +00:00
|
|
|
StoreNoWriteBarrier(MachineRepresentation::kTagged, ExternalConstant(ref),
|
|
|
|
context);
|
|
|
|
}
|
|
|
|
|
|
|
|
void InternalBuiltinsAssembler::EnterMicrotaskContext(
|
|
|
|
TNode<Context> microtask_context) {
|
|
|
|
auto ref = ExternalReference::handle_scope_implementer_address(isolate());
|
|
|
|
Node* const hsi = Load(MachineType::Pointer(), ExternalConstant(ref));
|
|
|
|
StoreNoWriteBarrier(
|
|
|
|
MachineType::PointerRepresentation(), hsi,
|
|
|
|
IntPtrConstant(HandleScopeImplementerOffsets::kMicrotaskContext),
|
|
|
|
BitcastTaggedToWord(microtask_context));
|
|
|
|
|
|
|
|
// Load mirrored std::vector length from
|
|
|
|
// HandleScopeImplementer::entered_contexts_count_
|
|
|
|
auto type = kSizetSize == 8 ? MachineType::Uint64() : MachineType::Uint32();
|
|
|
|
Node* entered_contexts_length = Load(
|
|
|
|
type, hsi,
|
|
|
|
IntPtrConstant(HandleScopeImplementerOffsets::kEnteredContextsCount));
|
|
|
|
|
|
|
|
auto rep = kSizetSize == 8 ? MachineRepresentation::kWord64
|
|
|
|
: MachineRepresentation::kWord32;
|
|
|
|
|
|
|
|
StoreNoWriteBarrier(
|
|
|
|
rep, hsi,
|
|
|
|
IntPtrConstant(
|
|
|
|
HandleScopeImplementerOffsets::kEnteredContextCountDuringMicrotasks),
|
|
|
|
entered_contexts_length);
|
|
|
|
}
|
|
|
|
|
|
|
|
void InternalBuiltinsAssembler::LeaveMicrotaskContext() {
|
|
|
|
auto ref = ExternalReference::handle_scope_implementer_address(isolate());
|
|
|
|
|
|
|
|
Node* const hsi = Load(MachineType::Pointer(), ExternalConstant(ref));
|
|
|
|
StoreNoWriteBarrier(
|
|
|
|
MachineType::PointerRepresentation(), hsi,
|
|
|
|
IntPtrConstant(HandleScopeImplementerOffsets::kMicrotaskContext),
|
|
|
|
IntPtrConstant(0));
|
|
|
|
if (kSizetSize == 4) {
|
|
|
|
StoreNoWriteBarrier(
|
|
|
|
MachineRepresentation::kWord32, hsi,
|
|
|
|
IntPtrConstant(HandleScopeImplementerOffsets::
|
|
|
|
kEnteredContextCountDuringMicrotasks),
|
|
|
|
Int32Constant(0));
|
|
|
|
} else {
|
|
|
|
StoreNoWriteBarrier(
|
|
|
|
MachineRepresentation::kWord64, hsi,
|
|
|
|
IntPtrConstant(HandleScopeImplementerOffsets::
|
|
|
|
kEnteredContextCountDuringMicrotasks),
|
|
|
|
Int64Constant(0));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-02-08 16:36:52 +00:00
|
|
|
void InternalBuiltinsAssembler::RunPromiseHook(
|
|
|
|
Runtime::FunctionId id, TNode<Context> context,
|
2018-05-14 08:55:07 +00:00
|
|
|
SloppyTNode<HeapObject> promise_or_capability) {
|
2018-02-08 16:36:52 +00:00
|
|
|
Label hook(this, Label::kDeferred), done_hook(this);
|
2018-06-05 14:16:34 +00:00
|
|
|
GotoIf(IsDebugActive(), &hook);
|
|
|
|
Branch(IsPromiseHookEnabledOrHasAsyncEventDelegate(), &hook, &done_hook);
|
2018-02-08 16:36:52 +00:00
|
|
|
BIND(&hook);
|
|
|
|
{
|
2018-05-14 08:55:07 +00:00
|
|
|
// Get to the underlying JSPromise instance.
|
|
|
|
Node* const promise = Select<HeapObject>(
|
|
|
|
IsJSPromise(promise_or_capability),
|
|
|
|
[=] { return promise_or_capability; },
|
|
|
|
[=] {
|
|
|
|
return CAST(LoadObjectField(promise_or_capability,
|
|
|
|
PromiseCapability::kPromiseOffset));
|
|
|
|
});
|
|
|
|
CallRuntime(id, context, promise);
|
2018-02-08 16:36:52 +00:00
|
|
|
Goto(&done_hook);
|
|
|
|
}
|
|
|
|
BIND(&done_hook);
|
|
|
|
}
|
|
|
|
|
2018-01-09 11:34:42 +00:00
|
|
|
TF_BUILTIN(EnqueueMicrotask, InternalBuiltinsAssembler) {
|
|
|
|
Node* microtask = Parameter(Descriptor::kMicrotask);
|
|
|
|
|
|
|
|
TNode<IntPtrT> num_tasks = GetPendingMicrotaskCount();
|
|
|
|
TNode<IntPtrT> new_num_tasks = IntPtrAdd(num_tasks, IntPtrConstant(1));
|
|
|
|
TNode<FixedArray> queue = GetMicrotaskQueue();
|
|
|
|
TNode<IntPtrT> queue_length = LoadAndUntagFixedArrayBaseLength(queue);
|
|
|
|
|
|
|
|
Label if_append(this), if_grow(this), done(this);
|
|
|
|
Branch(WordEqual(num_tasks, queue_length), &if_grow, &if_append);
|
|
|
|
|
|
|
|
BIND(&if_grow);
|
|
|
|
{
|
|
|
|
// Determine the new queue length and check if we need to allocate
|
|
|
|
// in large object space (instead of just going to new space, where
|
|
|
|
// we also know that we don't need any write barriers for setting
|
|
|
|
// up the new queue object).
|
|
|
|
Label if_newspace(this), if_lospace(this, Label::kDeferred);
|
|
|
|
TNode<IntPtrT> new_queue_length =
|
|
|
|
IntPtrMax(IntPtrConstant(8), IntPtrAdd(num_tasks, num_tasks));
|
|
|
|
Branch(IntPtrLessThanOrEqual(new_queue_length,
|
|
|
|
IntPtrConstant(FixedArray::kMaxRegularLength)),
|
|
|
|
&if_newspace, &if_lospace);
|
|
|
|
|
|
|
|
BIND(&if_newspace);
|
|
|
|
{
|
|
|
|
// This is the likely case where the new queue fits into new space,
|
|
|
|
// and thus we don't need any write barriers for initializing it.
|
|
|
|
TNode<FixedArray> new_queue =
|
2018-04-30 15:15:46 +00:00
|
|
|
AllocateFixedArray(PACKED_ELEMENTS, new_queue_length);
|
2018-01-09 11:34:42 +00:00
|
|
|
CopyFixedArrayElements(PACKED_ELEMENTS, queue, new_queue, num_tasks,
|
|
|
|
SKIP_WRITE_BARRIER);
|
|
|
|
StoreFixedArrayElement(new_queue, num_tasks, microtask,
|
|
|
|
SKIP_WRITE_BARRIER);
|
|
|
|
FillFixedArrayWithValue(PACKED_ELEMENTS, new_queue, new_num_tasks,
|
|
|
|
new_queue_length, Heap::kUndefinedValueRootIndex);
|
|
|
|
SetMicrotaskQueue(new_queue);
|
|
|
|
Goto(&done);
|
|
|
|
}
|
|
|
|
|
|
|
|
BIND(&if_lospace);
|
|
|
|
{
|
|
|
|
// The fallback case where the new queue ends up in large object space.
|
2018-04-30 15:15:46 +00:00
|
|
|
TNode<FixedArray> new_queue = AllocateFixedArray(
|
2018-01-09 11:34:42 +00:00
|
|
|
PACKED_ELEMENTS, new_queue_length, INTPTR_PARAMETERS,
|
2018-04-30 15:15:46 +00:00
|
|
|
AllocationFlag::kAllowLargeObjectAllocation);
|
2018-01-09 11:34:42 +00:00
|
|
|
CopyFixedArrayElements(PACKED_ELEMENTS, queue, new_queue, num_tasks);
|
|
|
|
StoreFixedArrayElement(new_queue, num_tasks, microtask);
|
|
|
|
FillFixedArrayWithValue(PACKED_ELEMENTS, new_queue, new_num_tasks,
|
|
|
|
new_queue_length, Heap::kUndefinedValueRootIndex);
|
|
|
|
SetMicrotaskQueue(new_queue);
|
|
|
|
Goto(&done);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
BIND(&if_append);
|
|
|
|
{
|
|
|
|
StoreFixedArrayElement(queue, num_tasks, microtask);
|
|
|
|
Goto(&done);
|
|
|
|
}
|
|
|
|
|
|
|
|
BIND(&done);
|
|
|
|
SetPendingMicrotaskCount(new_num_tasks);
|
|
|
|
Return(UndefinedConstant());
|
|
|
|
}
|
|
|
|
|
2017-11-30 15:27:59 +00:00
|
|
|
TF_BUILTIN(RunMicrotasks, InternalBuiltinsAssembler) {
|
2018-01-29 15:43:04 +00:00
|
|
|
// Load the current context from the isolate.
|
|
|
|
TNode<Context> current_context = GetCurrentContext();
|
2017-11-30 15:27:59 +00:00
|
|
|
|
2018-01-29 15:43:04 +00:00
|
|
|
Label init_queue_loop(this);
|
2017-11-30 15:27:59 +00:00
|
|
|
Goto(&init_queue_loop);
|
|
|
|
BIND(&init_queue_loop);
|
|
|
|
{
|
|
|
|
TVARIABLE(IntPtrT, index, IntPtrConstant(0));
|
2018-02-08 16:36:52 +00:00
|
|
|
Label loop(this, &index), loop_next(this);
|
2017-11-30 15:27:59 +00:00
|
|
|
|
|
|
|
TNode<IntPtrT> num_tasks = GetPendingMicrotaskCount();
|
|
|
|
ReturnIf(IntPtrEqual(num_tasks, IntPtrConstant(0)), UndefinedConstant());
|
|
|
|
|
|
|
|
TNode<FixedArray> queue = GetMicrotaskQueue();
|
|
|
|
|
|
|
|
CSA_ASSERT(this, IntPtrGreaterThanOrEqual(
|
|
|
|
LoadAndUntagFixedArrayBaseLength(queue), num_tasks));
|
|
|
|
CSA_ASSERT(this, IntPtrGreaterThan(num_tasks, IntPtrConstant(0)));
|
|
|
|
|
|
|
|
SetPendingMicrotaskCount(IntPtrConstant(0));
|
2018-02-26 16:51:23 +00:00
|
|
|
SetMicrotaskQueue(EmptyFixedArrayConstant());
|
2017-11-30 15:27:59 +00:00
|
|
|
|
|
|
|
Goto(&loop);
|
|
|
|
BIND(&loop);
|
|
|
|
{
|
2018-03-28 14:05:31 +00:00
|
|
|
TNode<HeapObject> microtask =
|
|
|
|
CAST(LoadFixedArrayElement(queue, index.value()));
|
2018-02-09 15:24:14 +00:00
|
|
|
index = IntPtrAdd(index.value(), IntPtrConstant(1));
|
2017-11-30 15:27:59 +00:00
|
|
|
|
|
|
|
CSA_ASSERT(this, TaggedIsNotSmi(microtask));
|
|
|
|
|
|
|
|
TNode<Map> microtask_map = LoadMap(microtask);
|
|
|
|
TNode<Int32T> microtask_type = LoadMapInstanceType(microtask_map);
|
|
|
|
|
2018-02-08 16:36:52 +00:00
|
|
|
VARIABLE(var_exception, MachineRepresentation::kTagged,
|
|
|
|
TheHoleConstant());
|
|
|
|
Label if_exception(this, Label::kDeferred);
|
|
|
|
Label is_callable(this), is_callback(this),
|
|
|
|
is_promise_fulfill_reaction_job(this),
|
|
|
|
is_promise_reject_reaction_job(this),
|
|
|
|
is_promise_resolve_thenable_job(this),
|
|
|
|
is_unreachable(this, Label::kDeferred);
|
|
|
|
|
|
|
|
int32_t case_values[] = {CALLABLE_TASK_TYPE, CALLBACK_TASK_TYPE,
|
|
|
|
PROMISE_FULFILL_REACTION_JOB_TASK_TYPE,
|
|
|
|
PROMISE_REJECT_REACTION_JOB_TASK_TYPE,
|
|
|
|
PROMISE_RESOLVE_THENABLE_JOB_TASK_TYPE};
|
|
|
|
Label* case_labels[] = {
|
|
|
|
&is_callable, &is_callback, &is_promise_fulfill_reaction_job,
|
|
|
|
&is_promise_reject_reaction_job, &is_promise_resolve_thenable_job};
|
2017-11-30 15:27:59 +00:00
|
|
|
static_assert(arraysize(case_values) == arraysize(case_labels), "");
|
|
|
|
Switch(microtask_type, &is_unreachable, case_values, case_labels,
|
|
|
|
arraysize(case_labels));
|
|
|
|
|
2018-02-08 16:36:52 +00:00
|
|
|
BIND(&is_callable);
|
|
|
|
{
|
|
|
|
// Enter the context of the {microtask}.
|
2018-02-26 16:51:23 +00:00
|
|
|
TNode<Context> microtask_context =
|
|
|
|
LoadObjectField<Context>(microtask, CallableTask::kContextOffset);
|
2018-02-27 09:36:55 +00:00
|
|
|
TNode<Context> native_context = LoadNativeContext(microtask_context);
|
|
|
|
|
2018-02-08 16:36:52 +00:00
|
|
|
CSA_ASSERT(this, IsNativeContext(native_context));
|
|
|
|
EnterMicrotaskContext(microtask_context);
|
|
|
|
SetCurrentContext(native_context);
|
|
|
|
|
2018-02-26 16:51:23 +00:00
|
|
|
TNode<JSReceiver> callable = LoadObjectField<JSReceiver>(
|
|
|
|
microtask, CallableTask::kCallableOffset);
|
2018-02-08 16:36:52 +00:00
|
|
|
Node* const result = CallJS(
|
|
|
|
CodeFactory::Call(isolate(), ConvertReceiverMode::kNullOrUndefined),
|
|
|
|
microtask_context, callable, UndefinedConstant());
|
|
|
|
GotoIfException(result, &if_exception, &var_exception);
|
|
|
|
LeaveMicrotaskContext();
|
|
|
|
SetCurrentContext(current_context);
|
|
|
|
Goto(&loop_next);
|
|
|
|
}
|
|
|
|
|
|
|
|
BIND(&is_callback);
|
2017-11-30 15:27:59 +00:00
|
|
|
{
|
2018-01-29 15:43:04 +00:00
|
|
|
Node* const microtask_callback =
|
2018-02-08 16:36:52 +00:00
|
|
|
LoadObjectField(microtask, CallbackTask::kCallbackOffset);
|
2018-01-29 15:43:04 +00:00
|
|
|
Node* const microtask_data =
|
2018-02-08 16:36:52 +00:00
|
|
|
LoadObjectField(microtask, CallbackTask::kDataOffset);
|
2018-01-31 09:10:00 +00:00
|
|
|
|
2018-01-29 15:43:04 +00:00
|
|
|
// If this turns out to become a bottleneck because of the calls
|
2018-05-07 14:09:04 +00:00
|
|
|
// to C++ via CEntry, we can choose to speed them up using a
|
2018-01-29 15:43:04 +00:00
|
|
|
// similar mechanism that we use for the CallApiFunction stub,
|
|
|
|
// except that calling the MicrotaskCallback is even easier, since
|
|
|
|
// it doesn't accept any tagged parameters, doesn't return a value
|
|
|
|
// and ignores exceptions.
|
|
|
|
//
|
|
|
|
// But from our current measurements it doesn't seem to be a
|
|
|
|
// serious performance problem, even if the microtask is full
|
2018-02-08 16:36:52 +00:00
|
|
|
// of CallHandlerTasks (which is not a realistic use case anyways).
|
2018-03-19 14:16:21 +00:00
|
|
|
Node* const result =
|
|
|
|
CallRuntime(Runtime::kRunMicrotaskCallback, current_context,
|
|
|
|
microtask_callback, microtask_data);
|
|
|
|
GotoIfException(result, &if_exception, &var_exception);
|
2018-02-08 16:36:52 +00:00
|
|
|
Goto(&loop_next);
|
2017-11-30 15:27:59 +00:00
|
|
|
}
|
|
|
|
|
2018-02-08 16:36:52 +00:00
|
|
|
BIND(&is_promise_resolve_thenable_job);
|
2017-11-30 15:27:59 +00:00
|
|
|
{
|
2018-02-08 16:36:52 +00:00
|
|
|
// Enter the context of the {microtask}.
|
2018-02-26 16:51:23 +00:00
|
|
|
TNode<Context> microtask_context = LoadObjectField<Context>(
|
|
|
|
microtask, PromiseResolveThenableJobTask::kContextOffset);
|
2018-02-27 09:36:55 +00:00
|
|
|
TNode<Context> native_context = LoadNativeContext(microtask_context);
|
2018-02-08 16:36:52 +00:00
|
|
|
CSA_ASSERT(this, IsNativeContext(native_context));
|
|
|
|
EnterMicrotaskContext(microtask_context);
|
2017-11-30 15:27:59 +00:00
|
|
|
SetCurrentContext(native_context);
|
2018-02-08 16:36:52 +00:00
|
|
|
|
|
|
|
Node* const promise_to_resolve = LoadObjectField(
|
|
|
|
microtask, PromiseResolveThenableJobTask::kPromiseToResolveOffset);
|
|
|
|
Node* const then = LoadObjectField(
|
|
|
|
microtask, PromiseResolveThenableJobTask::kThenOffset);
|
|
|
|
Node* const thenable = LoadObjectField(
|
|
|
|
microtask, PromiseResolveThenableJobTask::kThenableOffset);
|
|
|
|
|
|
|
|
Node* const result =
|
[builtins] Optimize PromiseResolveThenableJob for the common case.
The idea here is that in case the `thenable` is a JSPromise and `then`
is the initial `Promise.prototype.then` method, and the @@species lookup
chain is intact, we can skip creating the temporary promise and the
closures (with the shared context), and instead directly call into our
PerformPromiseThen. This is sound since - given above mentioned
conditions - our short-cut
PerformPromiseThen(thenable, undefined, undefined, promise_to_resolve)
is not observably different from the actual
resolve, reject = CreateResolvingFunctions(promise_to_resolve)
result_capability = NewPromiseCapability(%Promise%)
PerformPromiseThen(thenable, resolve, reject, result_capability)
except through PromiseHooks (and potentially via the async stack
traces). So we disable the fast-path if either promise hooks are enabled
or the debugger is active for now.
This improves the performance on the wikipedia benchmark by 20-25% and
the bluebird-doxbee benchmark by around 20%.
Bug: v8:7253
Change-Id: I23c92ad365c2b71d65057573f2d8febe2afe00b0
Reviewed-on: https://chromium-review.googlesource.com/911800
Commit-Queue: Benedikt Meurer <bmeurer@chromium.org>
Reviewed-by: Sathya Gunasekaran <gsathya@chromium.org>
Cr-Commit-Position: refs/heads/master@{#51261}
2018-02-13 04:08:31 +00:00
|
|
|
CallBuiltin(Builtins::kPromiseResolveThenableJob, native_context,
|
2018-02-08 16:36:52 +00:00
|
|
|
promise_to_resolve, thenable, then);
|
|
|
|
GotoIfException(result, &if_exception, &var_exception);
|
2018-02-06 09:47:20 +00:00
|
|
|
LeaveMicrotaskContext();
|
|
|
|
SetCurrentContext(current_context);
|
2018-02-08 16:36:52 +00:00
|
|
|
Goto(&loop_next);
|
2017-11-30 15:27:59 +00:00
|
|
|
}
|
|
|
|
|
2018-02-08 16:36:52 +00:00
|
|
|
BIND(&is_promise_fulfill_reaction_job);
|
2017-11-30 15:27:59 +00:00
|
|
|
{
|
2018-02-08 16:36:52 +00:00
|
|
|
// Enter the context of the {microtask}.
|
2018-02-26 16:51:23 +00:00
|
|
|
TNode<Context> microtask_context = LoadObjectField<Context>(
|
|
|
|
microtask, PromiseReactionJobTask::kContextOffset);
|
2018-02-27 09:36:55 +00:00
|
|
|
TNode<Context> native_context = LoadNativeContext(microtask_context);
|
2018-02-08 16:36:52 +00:00
|
|
|
CSA_ASSERT(this, IsNativeContext(native_context));
|
2018-02-07 17:59:00 +00:00
|
|
|
EnterMicrotaskContext(microtask_context);
|
2018-02-08 16:36:52 +00:00
|
|
|
SetCurrentContext(native_context);
|
2017-11-30 15:27:59 +00:00
|
|
|
|
2018-02-08 16:36:52 +00:00
|
|
|
Node* const argument =
|
|
|
|
LoadObjectField(microtask, PromiseReactionJobTask::kArgumentOffset);
|
|
|
|
Node* const handler =
|
|
|
|
LoadObjectField(microtask, PromiseReactionJobTask::kHandlerOffset);
|
2018-05-14 08:55:07 +00:00
|
|
|
Node* const promise_or_capability = LoadObjectField(
|
|
|
|
microtask, PromiseReactionJobTask::kPromiseOrCapabilityOffset);
|
2017-11-30 15:27:59 +00:00
|
|
|
|
2018-02-08 16:36:52 +00:00
|
|
|
// Run the promise before/debug hook if enabled.
|
2018-05-14 08:55:07 +00:00
|
|
|
RunPromiseHook(Runtime::kPromiseHookBefore, microtask_context,
|
|
|
|
promise_or_capability);
|
2018-02-08 16:36:52 +00:00
|
|
|
|
|
|
|
Node* const result =
|
|
|
|
CallBuiltin(Builtins::kPromiseFulfillReactionJob, microtask_context,
|
2018-05-14 08:55:07 +00:00
|
|
|
argument, handler, promise_or_capability);
|
2018-02-08 16:36:52 +00:00
|
|
|
GotoIfException(result, &if_exception, &var_exception);
|
|
|
|
|
|
|
|
// Run the promise after/debug hook if enabled.
|
2018-05-14 08:55:07 +00:00
|
|
|
RunPromiseHook(Runtime::kPromiseHookAfter, microtask_context,
|
|
|
|
promise_or_capability);
|
2018-01-31 09:10:00 +00:00
|
|
|
|
2018-02-06 09:47:20 +00:00
|
|
|
LeaveMicrotaskContext();
|
|
|
|
SetCurrentContext(current_context);
|
2018-02-08 16:36:52 +00:00
|
|
|
Goto(&loop_next);
|
2017-11-30 15:27:59 +00:00
|
|
|
}
|
|
|
|
|
2018-02-08 16:36:52 +00:00
|
|
|
BIND(&is_promise_reject_reaction_job);
|
2017-11-30 15:27:59 +00:00
|
|
|
{
|
2018-02-08 16:36:52 +00:00
|
|
|
// Enter the context of the {microtask}.
|
2018-02-26 16:51:23 +00:00
|
|
|
TNode<Context> microtask_context = LoadObjectField<Context>(
|
|
|
|
microtask, PromiseReactionJobTask::kContextOffset);
|
2018-02-27 09:36:55 +00:00
|
|
|
TNode<Context> native_context = LoadNativeContext(microtask_context);
|
2018-02-08 16:36:52 +00:00
|
|
|
CSA_ASSERT(this, IsNativeContext(native_context));
|
2018-02-07 17:59:00 +00:00
|
|
|
EnterMicrotaskContext(microtask_context);
|
2018-02-08 16:36:52 +00:00
|
|
|
SetCurrentContext(native_context);
|
|
|
|
|
|
|
|
Node* const argument =
|
|
|
|
LoadObjectField(microtask, PromiseReactionJobTask::kArgumentOffset);
|
|
|
|
Node* const handler =
|
|
|
|
LoadObjectField(microtask, PromiseReactionJobTask::kHandlerOffset);
|
2018-05-14 08:55:07 +00:00
|
|
|
Node* const promise_or_capability = LoadObjectField(
|
|
|
|
microtask, PromiseReactionJobTask::kPromiseOrCapabilityOffset);
|
2018-02-08 16:36:52 +00:00
|
|
|
|
|
|
|
// Run the promise before/debug hook if enabled.
|
2018-05-14 08:55:07 +00:00
|
|
|
RunPromiseHook(Runtime::kPromiseHookBefore, microtask_context,
|
|
|
|
promise_or_capability);
|
2018-02-08 16:36:52 +00:00
|
|
|
|
|
|
|
Node* const result =
|
|
|
|
CallBuiltin(Builtins::kPromiseRejectReactionJob, microtask_context,
|
2018-05-14 08:55:07 +00:00
|
|
|
argument, handler, promise_or_capability);
|
2018-02-08 16:36:52 +00:00
|
|
|
GotoIfException(result, &if_exception, &var_exception);
|
2018-01-31 09:10:00 +00:00
|
|
|
|
2018-02-08 16:36:52 +00:00
|
|
|
// Run the promise after/debug hook if enabled.
|
2018-05-14 08:55:07 +00:00
|
|
|
RunPromiseHook(Runtime::kPromiseHookAfter, microtask_context,
|
|
|
|
promise_or_capability);
|
2018-02-08 16:36:52 +00:00
|
|
|
|
|
|
|
LeaveMicrotaskContext();
|
|
|
|
SetCurrentContext(current_context);
|
|
|
|
Goto(&loop_next);
|
2017-11-30 15:27:59 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
BIND(&is_unreachable);
|
|
|
|
Unreachable();
|
|
|
|
|
2018-02-08 16:36:52 +00:00
|
|
|
BIND(&if_exception);
|
|
|
|
{
|
|
|
|
// Report unhandled exceptions from microtasks.
|
|
|
|
CallRuntime(Runtime::kReportMessage, current_context,
|
|
|
|
var_exception.value());
|
|
|
|
LeaveMicrotaskContext();
|
|
|
|
SetCurrentContext(current_context);
|
|
|
|
Goto(&loop_next);
|
|
|
|
}
|
2018-02-07 17:59:00 +00:00
|
|
|
|
2018-02-08 16:36:52 +00:00
|
|
|
BIND(&loop_next);
|
2018-02-09 15:24:14 +00:00
|
|
|
Branch(IntPtrLessThan(index.value(), num_tasks), &loop, &init_queue_loop);
|
2018-02-08 16:36:52 +00:00
|
|
|
}
|
|
|
|
}
|
2018-02-07 17:59:00 +00:00
|
|
|
}
|
|
|
|
|
2018-06-14 10:13:22 +00:00
|
|
|
TF_BUILTIN(AllocateInNewSpace, CodeStubAssembler) {
|
|
|
|
TNode<Int32T> requested_size =
|
|
|
|
UncheckedCast<Int32T>(Parameter(Descriptor::kRequestedSize));
|
|
|
|
|
|
|
|
TailCallRuntime(Runtime::kAllocateInNewSpace, NoContextConstant(),
|
|
|
|
SmiFromInt32(requested_size));
|
|
|
|
}
|
|
|
|
|
|
|
|
TF_BUILTIN(AllocateInOldSpace, CodeStubAssembler) {
|
|
|
|
TNode<Int32T> requested_size =
|
|
|
|
UncheckedCast<Int32T>(Parameter(Descriptor::kRequestedSize));
|
|
|
|
|
|
|
|
int flags = AllocateTargetSpace::encode(OLD_SPACE);
|
|
|
|
TailCallRuntime(Runtime::kAllocateInTargetSpace, NoContextConstant(),
|
|
|
|
SmiFromInt32(requested_size), SmiConstant(flags));
|
|
|
|
}
|
|
|
|
|
2018-06-22 11:04:29 +00:00
|
|
|
TF_BUILTIN(Abort, CodeStubAssembler) {
|
|
|
|
TNode<Smi> message_id = CAST(Parameter(Descriptor::kMessageOrMessageId));
|
|
|
|
TailCallRuntime(Runtime::kAbort, NoContextConstant(), message_id);
|
|
|
|
}
|
|
|
|
|
2017-12-21 12:48:27 +00:00
|
|
|
TF_BUILTIN(AbortJS, CodeStubAssembler) {
|
2018-06-22 11:04:29 +00:00
|
|
|
TNode<String> message = CAST(Parameter(Descriptor::kMessageOrMessageId));
|
|
|
|
TailCallRuntime(Runtime::kAbortJS, NoContextConstant(), message);
|
2017-12-21 12:48:27 +00:00
|
|
|
}
|
|
|
|
|
2018-05-07 14:09:04 +00:00
|
|
|
void Builtins::Generate_CEntry_Return1_DontSaveFPRegs_ArgvOnStack_NoBuiltinExit(
|
|
|
|
MacroAssembler* masm) {
|
|
|
|
Generate_CEntry(masm, 1, kDontSaveFPRegs, kArgvOnStack, false);
|
|
|
|
}
|
|
|
|
|
|
|
|
void Builtins::Generate_CEntry_Return1_DontSaveFPRegs_ArgvOnStack_BuiltinExit(
|
|
|
|
MacroAssembler* masm) {
|
|
|
|
Generate_CEntry(masm, 1, kDontSaveFPRegs, kArgvOnStack, true);
|
|
|
|
}
|
|
|
|
|
|
|
|
void Builtins::
|
|
|
|
Generate_CEntry_Return1_DontSaveFPRegs_ArgvInRegister_NoBuiltinExit(
|
|
|
|
MacroAssembler* masm) {
|
|
|
|
Generate_CEntry(masm, 1, kDontSaveFPRegs, kArgvInRegister, false);
|
|
|
|
}
|
|
|
|
|
|
|
|
void Builtins::Generate_CEntry_Return1_SaveFPRegs_ArgvOnStack_NoBuiltinExit(
|
|
|
|
MacroAssembler* masm) {
|
|
|
|
Generate_CEntry(masm, 1, kSaveFPRegs, kArgvOnStack, false);
|
|
|
|
}
|
|
|
|
|
|
|
|
void Builtins::Generate_CEntry_Return1_SaveFPRegs_ArgvOnStack_BuiltinExit(
|
|
|
|
MacroAssembler* masm) {
|
|
|
|
Generate_CEntry(masm, 1, kSaveFPRegs, kArgvOnStack, true);
|
|
|
|
}
|
|
|
|
|
|
|
|
void Builtins::Generate_CEntry_Return2_DontSaveFPRegs_ArgvOnStack_NoBuiltinExit(
|
|
|
|
MacroAssembler* masm) {
|
|
|
|
Generate_CEntry(masm, 2, kDontSaveFPRegs, kArgvOnStack, false);
|
|
|
|
}
|
|
|
|
|
|
|
|
void Builtins::Generate_CEntry_Return2_DontSaveFPRegs_ArgvOnStack_BuiltinExit(
|
|
|
|
MacroAssembler* masm) {
|
|
|
|
Generate_CEntry(masm, 2, kDontSaveFPRegs, kArgvOnStack, true);
|
|
|
|
}
|
|
|
|
|
|
|
|
void Builtins::
|
|
|
|
Generate_CEntry_Return2_DontSaveFPRegs_ArgvInRegister_NoBuiltinExit(
|
|
|
|
MacroAssembler* masm) {
|
|
|
|
Generate_CEntry(masm, 2, kDontSaveFPRegs, kArgvInRegister, false);
|
|
|
|
}
|
|
|
|
|
|
|
|
void Builtins::Generate_CEntry_Return2_SaveFPRegs_ArgvOnStack_NoBuiltinExit(
|
|
|
|
MacroAssembler* masm) {
|
|
|
|
Generate_CEntry(masm, 2, kSaveFPRegs, kArgvOnStack, false);
|
|
|
|
}
|
|
|
|
|
|
|
|
void Builtins::Generate_CEntry_Return2_SaveFPRegs_ArgvOnStack_BuiltinExit(
|
|
|
|
MacroAssembler* masm) {
|
|
|
|
Generate_CEntry(masm, 2, kSaveFPRegs, kArgvOnStack, true);
|
|
|
|
}
|
|
|
|
|
2018-05-25 10:25:28 +00:00
|
|
|
void Builtins::Generate_CallApiGetter(MacroAssembler* masm) {
|
|
|
|
// CallApiGetterStub only exists as a stub to avoid duplicating code between
|
|
|
|
// here and code-stubs-<arch>.cc. For example, see CallApiFunctionAndReturn.
|
|
|
|
// Here we abuse the instantiated stub to generate code.
|
|
|
|
CallApiGetterStub stub(masm->isolate());
|
|
|
|
stub.Generate(masm);
|
|
|
|
}
|
|
|
|
|
|
|
|
void Builtins::Generate_CallApiCallback_Argc0(MacroAssembler* masm) {
|
|
|
|
// The common variants of CallApiCallbackStub (i.e. all that are embedded into
|
|
|
|
// the snapshot) are generated as builtins. The rest remain available as code
|
|
|
|
// stubs. Here we abuse the instantiated stub to generate code and avoid
|
|
|
|
// duplication.
|
|
|
|
const int kArgc = 0;
|
|
|
|
CallApiCallbackStub stub(masm->isolate(), kArgc);
|
|
|
|
stub.Generate(masm);
|
|
|
|
}
|
|
|
|
|
|
|
|
void Builtins::Generate_CallApiCallback_Argc1(MacroAssembler* masm) {
|
|
|
|
// The common variants of CallApiCallbackStub (i.e. all that are embedded into
|
|
|
|
// the snapshot) are generated as builtins. The rest remain available as code
|
|
|
|
// stubs. Here we abuse the instantiated stub to generate code and avoid
|
|
|
|
// duplication.
|
|
|
|
const int kArgc = 1;
|
|
|
|
CallApiCallbackStub stub(masm->isolate(), kArgc);
|
|
|
|
stub.Generate(masm);
|
|
|
|
}
|
|
|
|
|
2018-05-07 14:09:04 +00:00
|
|
|
// ES6 [[Get]] operation.
|
|
|
|
TF_BUILTIN(GetProperty, CodeStubAssembler) {
|
|
|
|
Label call_runtime(this, Label::kDeferred), return_undefined(this), end(this);
|
|
|
|
|
|
|
|
Node* object = Parameter(Descriptor::kObject);
|
|
|
|
Node* key = Parameter(Descriptor::kKey);
|
|
|
|
Node* context = Parameter(Descriptor::kContext);
|
|
|
|
VARIABLE(var_result, MachineRepresentation::kTagged);
|
|
|
|
|
|
|
|
CodeStubAssembler::LookupInHolder lookup_property_in_holder =
|
|
|
|
[=, &var_result, &end](Node* receiver, Node* holder, Node* holder_map,
|
|
|
|
Node* holder_instance_type, Node* unique_name,
|
|
|
|
Label* next_holder, Label* if_bailout) {
|
|
|
|
VARIABLE(var_value, MachineRepresentation::kTagged);
|
|
|
|
Label if_found(this);
|
|
|
|
TryGetOwnProperty(context, receiver, holder, holder_map,
|
|
|
|
holder_instance_type, unique_name, &if_found,
|
|
|
|
&var_value, next_holder, if_bailout);
|
|
|
|
BIND(&if_found);
|
|
|
|
{
|
|
|
|
var_result.Bind(var_value.value());
|
|
|
|
Goto(&end);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
CodeStubAssembler::LookupInHolder lookup_element_in_holder =
|
|
|
|
[=](Node* receiver, Node* holder, Node* holder_map,
|
|
|
|
Node* holder_instance_type, Node* index, Label* next_holder,
|
|
|
|
Label* if_bailout) {
|
|
|
|
// Not supported yet.
|
|
|
|
Use(next_holder);
|
|
|
|
Goto(if_bailout);
|
|
|
|
};
|
|
|
|
|
|
|
|
TryPrototypeChainLookup(object, key, lookup_property_in_holder,
|
|
|
|
lookup_element_in_holder, &return_undefined,
|
|
|
|
&call_runtime);
|
|
|
|
|
|
|
|
BIND(&return_undefined);
|
|
|
|
{
|
|
|
|
var_result.Bind(UndefinedConstant());
|
|
|
|
Goto(&end);
|
|
|
|
}
|
|
|
|
|
|
|
|
BIND(&call_runtime);
|
|
|
|
{
|
|
|
|
var_result.Bind(CallRuntime(Runtime::kGetProperty, context, object, key));
|
|
|
|
Goto(&end);
|
|
|
|
}
|
|
|
|
|
|
|
|
BIND(&end);
|
|
|
|
Return(var_result.value());
|
|
|
|
}
|
|
|
|
|
2017-03-16 11:32:01 +00:00
|
|
|
} // namespace internal
|
|
|
|
} // namespace v8
|