PPC: [runtime] Fix ES6 9.2.1 [[Call]] when encountering a classConstructor.

Port ab84025977

Also:
- Fix big-endian compiler hints BYTE_OFFSET macro.
- Clean up PPC code access to compiler hints -- which required some new
  SharedFunctionInfo fields to encapsulate kCompilerHintsSmiTagSize.

Original commit message:
    The current implementation of classes throws the TypeError at the wrong
    point, after activating a new context when directly calling a class
    constructor. According to the spec, the TypeError has to be thrown
    in the caller context.

R=bmeurer@chromium.org, cbruni@chromium.org, joransiu@ca.ibm.com, jyan@ca.ibm.com, michael_dawson@ca.ibm.com, dstence@us.ibm.com
LOG=N
BUG=v8:4428

Review URL: https://codereview.chromium.org/1423713014

Cr-Commit-Position: refs/heads/master@{#31831}
This commit is contained in:
mbrandy 2015-11-05 06:51:37 -08:00 committed by Commit bot
parent 8ad6168d19
commit 6413ef4e63
4 changed files with 71 additions and 69 deletions

View File

@ -3460,28 +3460,13 @@ void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) {
if (!instr->hydrogen()->known_function()) {
// Do not transform the receiver to object for strict mode
// functions.
// functions or builtins.
__ LoadP(scratch,
FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
__ lwz(scratch,
FieldMemOperand(scratch, SharedFunctionInfo::kCompilerHintsOffset));
__ TestBit(scratch,
#if V8_TARGET_ARCH_PPC64
SharedFunctionInfo::kStrictModeFunction,
#else
SharedFunctionInfo::kStrictModeFunction + kSmiTagSize,
#endif
r0);
__ bne(&result_in_receiver, cr0);
// Do not transform the receiver to object for builtins.
__ TestBit(scratch,
#if V8_TARGET_ARCH_PPC64
SharedFunctionInfo::kNative,
#else
SharedFunctionInfo::kNative + kSmiTagSize,
#endif
r0);
__ andi(r0, scratch, Operand((1 << SharedFunctionInfo::kStrictModeBit) |
(1 << SharedFunctionInfo::kNativeBit)));
__ bne(&result_in_receiver, cr0);
}

View File

@ -7105,19 +7105,26 @@ class SharedFunctionInfo: public HeapObject {
SharedFunctionInfo::kCompilerHintsSize * kBitsPerByte);
public:
// Constants for optimizing codegen for strict mode function and
// native tests when using integer-width instructions.
static const int kStrictModeBit =
kStrictModeFunction + kCompilerHintsSmiTagSize;
static const int kStrongModeBit =
kStrongModeFunction + kCompilerHintsSmiTagSize;
static const int kNativeBit = kNative + kCompilerHintsSmiTagSize;
static const int kBoundBit = kBoundFunction + kCompilerHintsSmiTagSize;
static const int kClassConstructorBits =
FunctionKind::kClassConstructor
<< (kFunctionKind + kCompilerHintsSmiTagSize);
// Constants for optimizing codegen for strict mode function and
// native tests.
// Allows to use byte-width instructions.
static const int kStrictModeBitWithinByte =
(kStrictModeFunction + kCompilerHintsSmiTagSize) % kBitsPerByte;
static const int kStrongModeBitWithinByte =
(kStrongModeFunction + kCompilerHintsSmiTagSize) % kBitsPerByte;
static const int kNativeBitWithinByte =
(kNative + kCompilerHintsSmiTagSize) % kBitsPerByte;
static const int kBoundBitWithinByte =
(kBoundFunction + kCompilerHintsSmiTagSize) % kBitsPerByte;
static const int kStrictModeBitWithinByte = kStrictModeBit % kBitsPerByte;
static const int kStrongModeBitWithinByte = kStrongModeBit % kBitsPerByte;
static const int kNativeBitWithinByte = kNativeBit % kBitsPerByte;
static const int kBoundBitWithinByte = kBoundBit % kBitsPerByte;
static const int kClassConstructorBitsWithinByte =
FunctionKind::kClassConstructor << kCompilerHintsSmiTagSize;
@ -7128,7 +7135,7 @@ class SharedFunctionInfo: public HeapObject {
kCompilerHintsOffset + \
(compiler_hint + kCompilerHintsSmiTagSize) / kBitsPerByte
#elif defined(V8_TARGET_BIG_ENDIAN)
#define BYTE_OFFSET(compiler_hint) \
#define BYTE_OFFSET(compiler_hint) \
kCompilerHintsOffset + (kCompilerHintsSize - 1) - \
((compiler_hint + kCompilerHintsSmiTagSize) / kBitsPerByte)
#else

View File

@ -1596,19 +1596,28 @@ void Builtins::Generate_CallFunction(MacroAssembler* masm) {
Label convert, convert_global_proxy, convert_to_object, done_convert;
__ AssertFunction(r4);
// TODO(bmeurer): Throw a TypeError if function's [[FunctionKind]] internal
// slot is "classConstructor".
__ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
__ lwz(r6, FieldMemOperand(r5, SharedFunctionInfo::kCompilerHintsOffset));
{
Label non_class_constructor;
// Check whether the current function is a classConstructor.
__ TestBitMask(r6, SharedFunctionInfo::kClassConstructorBits, r0);
__ beq(&non_class_constructor, cr0);
// Step: 2, If we call a classConstructor Function throw a TypeError.
{
FrameAndConstantPoolScope frame(masm, StackFrame::INTERNAL);
__ CallRuntime(Runtime::kThrowConstructorNonCallableError, 0);
}
__ bind(&non_class_constructor);
}
// Enter the context of the function; ToObject has to run in the function
// context, and we also need to take the global proxy from the function
// context in case of conversion.
STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset ==
SharedFunctionInfo::kStrictModeByteOffset);
__ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
__ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
// We need to convert the receiver for non-native sloppy mode functions.
__ lbz(r6, FieldMemOperand(r5, SharedFunctionInfo::kNativeByteOffset));
__ andi(r0, r6, Operand((1 << SharedFunctionInfo::kNativeBitWithinByte) |
(1 << SharedFunctionInfo::kStrictModeBitWithinByte)));
__ andi(r0, r6, Operand((1 << SharedFunctionInfo::kStrictModeBit) |
(1 << SharedFunctionInfo::kNativeBit)));
__ bne(&done_convert, cr0);
{
__ ShiftLeftImm(r6, r3, Operand(kPointerSizeLog2));
@ -1866,13 +1875,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
Label no_strong_error;
__ LoadP(r7, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
__ lwz(r8, FieldMemOperand(r7, SharedFunctionInfo::kCompilerHintsOffset));
__ TestBit(r8,
#if V8_TARGET_ARCH_PPC64
SharedFunctionInfo::kStrongModeFunction,
#else
SharedFunctionInfo::kStrongModeFunction + kSmiTagSize,
#endif
r0);
__ TestBit(r8, SharedFunctionInfo::kStrongModeBit, r0);
__ beq(&no_strong_error, cr0);
// What we really care about is the required number of arguments.

View File

@ -1430,13 +1430,7 @@ void InstanceOfStub::Generate(MacroAssembler* masm) {
FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
__ lwz(scratch, FieldMemOperand(shared_info,
SharedFunctionInfo::kCompilerHintsOffset));
__ TestBit(scratch,
#if V8_TARGET_ARCH_PPC64
SharedFunctionInfo::kBoundFunction,
#else
SharedFunctionInfo::kBoundFunction + kSmiTagSize,
#endif
r0);
__ TestBit(scratch, SharedFunctionInfo::kBoundBit, r0);
__ bne(&slow_case, cr0);
// Get the "prototype" (or initial map) of the {function}.
@ -2536,26 +2530,14 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, bool is_super) {
static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) {
// ----------- S t a t e -------------
// -- r4 : the function to call
// -- r6 : the function's shared function info
// -----------------------------------
// Do not transform the receiver for strict mode functions and natives.
__ LoadP(r6, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
__ lwz(r7, FieldMemOperand(r6, SharedFunctionInfo::kCompilerHintsOffset));
__ TestBit(r7,
#if V8_TARGET_ARCH_PPC64
SharedFunctionInfo::kStrictModeFunction,
#else
SharedFunctionInfo::kStrictModeFunction + kSmiTagSize,
#endif
r0);
__ bne(cont, cr0);
// Do not transform the receiver for native.
__ TestBit(r7,
#if V8_TARGET_ARCH_PPC64
SharedFunctionInfo::kNative,
#else
SharedFunctionInfo::kNative + kSmiTagSize,
#endif
r0);
__ andi(r0, r7, Operand((1 << SharedFunctionInfo::kStrictModeBit) |
(1 << SharedFunctionInfo::kNativeBit)));
__ bne(cont, cr0);
}
@ -2581,6 +2563,24 @@ static void EmitWrapCase(MacroAssembler* masm, int argc, Label* cont) {
}
static void EmitClassConstructorCallCheck(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- r4 : the function to call
// -- r6 : the function's shared function info
// -----------------------------------
// ClassConstructor Check: ES6 section 9.2.1 [[Call]]
Label non_class_constructor;
// Check whether the current function is a classConstructor.
__ lwz(r7, FieldMemOperand(r6, SharedFunctionInfo::kCompilerHintsOffset));
__ TestBitMask(r7, SharedFunctionInfo::kClassConstructorBits, r0);
__ beq(&non_class_constructor, cr0);
// If we call a classConstructor Function throw a TypeError
// indirectly via the CallFunction builtin.
__ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET);
__ bind(&non_class_constructor);
}
static void CallFunctionNoFeedback(MacroAssembler* masm, int argc,
bool needs_checks, bool call_as_method) {
// r4 : the function to call
@ -2596,6 +2596,9 @@ static void CallFunctionNoFeedback(MacroAssembler* masm, int argc,
__ bne(&slow);
}
__ LoadP(r6, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
EmitClassConstructorCallCheck(masm);
// Fast-case: Invoke the function now.
// r4: pushed function
ParameterCount actual(argc);
@ -2769,6 +2772,10 @@ void CallICStub::Generate(MacroAssembler* masm) {
__ StoreP(r6, FieldMemOperand(r9, count_offset), r0);
__ bind(&have_js_function);
__ LoadP(r6, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
EmitClassConstructorCallCheck(masm);
if (CallAsMethod()) {
EmitContinueIfStrictOrNative(masm, &cont);
// Compute the receiver in sloppy mode.