Add Smi::Zero and replace all Smi::FromInt(0) calls

BUG=

Committed: https://crrev.com/7db0ecdec3cf330766575cb7973b983f3f1e3020
Review-Url: https://codereview.chromium.org/2381843002
Cr-Original-Commit-Position: refs/heads/master@{#40080}
Cr-Commit-Position: refs/heads/master@{#40087}
This commit is contained in:
jgruber 2016-10-07 06:05:07 -07:00 committed by Commit bot
parent ec132e05ec
commit 9ef4c3af25
116 changed files with 460 additions and 482 deletions

View File

@ -738,7 +738,7 @@ void Accessors::FunctionLengthGetter(
Handle<JSFunction>::cast(Utils::OpenHandle(*info.Holder()));
Handle<Object> result;
if (!JSFunction::GetLength(isolate, function).ToHandle(&result)) {
result = handle(Smi::FromInt(0), isolate);
result = handle(Smi::kZero, isolate);
isolate->OptionalRescheduleException(false);
}
@ -1072,7 +1072,7 @@ void Accessors::BoundFunctionLengthGetter(
Handle<JSFunction> target(JSFunction::cast(function->bound_target_function()),
isolate);
if (!JSFunction::GetLength(isolate, target).ToHandle(&target_length)) {
target_length = handle(Smi::FromInt(0), isolate);
target_length = handle(Smi::kZero, isolate);
isolate->OptionalRescheduleException(false);
return;
}

View File

@ -1061,7 +1061,7 @@ void Template::Set(v8::Local<Name> name, v8::Local<Data> value,
auto value_obj = Utils::OpenHandle(*value);
CHECK(!value_obj->IsJSReceiver() || value_obj->IsTemplateInfo());
if (value_obj->IsObjectTemplateInfo()) {
templ->set_serial_number(i::Smi::FromInt(0));
templ->set_serial_number(i::Smi::kZero);
if (templ->IsFunctionTemplateInfo()) {
i::Handle<i::FunctionTemplateInfo>::cast(templ)->set_do_not_cache(true);
}
@ -1395,7 +1395,7 @@ static Local<ObjectTemplate> ObjectTemplateNew(
obj->set_serial_number(i::Smi::FromInt(next_serial_number));
if (!constructor.IsEmpty())
obj->set_constructor(*Utils::OpenHandle(*constructor));
obj->set_data(i::Smi::FromInt(0));
obj->set_data(i::Smi::kZero);
return Utils::ToLocal(obj);
}

View File

@ -28,7 +28,7 @@ class Consts {
template <typename T> inline T ToCData(v8::internal::Object* obj) {
STATIC_ASSERT(sizeof(T) == sizeof(v8::internal::Address));
if (obj == v8::internal::Smi::FromInt(0)) return nullptr;
if (obj == v8::internal::Smi::kZero) return nullptr;
return reinterpret_cast<T>(
reinterpret_cast<intptr_t>(
v8::internal::Foreign::cast(obj)->foreign_address()));
@ -39,7 +39,7 @@ template <typename T>
inline v8::internal::Handle<v8::internal::Object> FromCData(
v8::internal::Isolate* isolate, T obj) {
STATIC_ASSERT(sizeof(T) == sizeof(v8::internal::Address));
if (obj == nullptr) return handle(v8::internal::Smi::FromInt(0), isolate);
if (obj == nullptr) return handle(v8::internal::Smi::kZero, isolate);
return isolate->factory()->NewForeign(
reinterpret_cast<v8::internal::Address>(reinterpret_cast<intptr_t>(obj)));
}

View File

@ -544,7 +544,7 @@ void CompareICStub::GenerateGeneric(MacroAssembler* masm) {
// If either is a Smi (we know that not both are), then they can only
// be strictly equal if the other is a HeapNumber.
STATIC_ASSERT(kSmiTag == 0);
DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
DCHECK_EQ(static_cast<Smi*>(0), Smi::kZero);
__ and_(r2, lhs, Operand(rhs));
__ JumpIfNotSmi(r2, &not_smis);
// One operand is a smi. EmitSmiNonsmiComparison generates code that can:
@ -4216,7 +4216,7 @@ void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) {
const int kParameterMapHeaderSize =
FixedArray::kHeaderSize + 2 * kPointerSize;
// If there are no mapped parameters, we do not need the parameter_map.
__ cmp(r6, Operand(Smi::FromInt(0)));
__ cmp(r6, Operand(Smi::kZero));
__ mov(r9, Operand::Zero(), LeaveCC, eq);
__ mov(r9, Operand(r6, LSL, 1), LeaveCC, ne);
__ add(r9, r9, Operand(kParameterMapHeaderSize), LeaveCC, ne);
@ -4273,7 +4273,7 @@ void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) {
// r6 = mapped parameter count (tagged)
// Initialize parameter map. If there are no mapped arguments, we're done.
Label skip_parameter_map;
__ cmp(r6, Operand(Smi::FromInt(0)));
__ cmp(r6, Operand(Smi::kZero));
// Move backing store address to r1, because it is
// expected there when filling in the unmapped arguments.
__ mov(r1, r4, LeaveCC, eq);
@ -4321,7 +4321,7 @@ void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) {
__ str(ip, MemOperand(r1, r0));
__ add(r9, r9, Operand(Smi::FromInt(1)));
__ bind(&parameters_test);
__ cmp(r5, Operand(Smi::FromInt(0)));
__ cmp(r5, Operand(Smi::kZero));
__ b(ne, &parameters_loop);
// Restore r0 = new object (tagged) and r5 = argument count (tagged).
@ -4881,7 +4881,7 @@ void CallApiGetterStub::Generate(MacroAssembler* masm) {
__ Push(scratch, scratch);
__ mov(scratch, Operand(ExternalReference::isolate_address(isolate())));
__ Push(scratch, holder);
__ Push(Smi::FromInt(0)); // should_throw_on_error -> false
__ Push(Smi::kZero); // should_throw_on_error -> false
__ ldr(scratch, FieldMemOperand(callback, AccessorInfo::kNameOffset));
__ push(scratch);
// v8::PropertyCallbackInfo::args_ array and name handle.

View File

@ -3653,7 +3653,7 @@ void MacroAssembler::EmitSeqStringSetCharCheck(Register string,
cmp(index, ip);
Check(lt, kIndexIsTooLarge);
cmp(index, Operand(Smi::FromInt(0)));
cmp(index, Operand(Smi::kZero));
Check(ge, kIndexIsNegative);
SmiUntag(index, index);
@ -3943,7 +3943,7 @@ void MacroAssembler::CheckEnumCache(Label* call_runtime) {
// For all objects but the receiver, check that the cache is empty.
EnumLength(r3, r1);
cmp(r3, Operand(Smi::FromInt(0)));
cmp(r3, Operand(Smi::kZero));
b(ne, call_runtime);
bind(&start);

View File

@ -4271,7 +4271,7 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
__ LoadRoot(x1, Heap::kEmptyFixedArrayRootIndex);
__ Str(x1, FieldMemOperand(x0, JSArray::kPropertiesOffset));
__ Str(x1, FieldMemOperand(x0, JSArray::kElementsOffset));
__ Mov(x1, Smi::FromInt(0));
__ Mov(x1, Smi::kZero);
__ Str(x1, FieldMemOperand(x0, JSArray::kLengthOffset));
STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
__ Ret();
@ -5202,7 +5202,7 @@ void CallApiGetterStub::Generate(MacroAssembler* masm) {
__ Mov(scratch2, Operand(ExternalReference::isolate_address(isolate())));
__ Ldr(scratch3, FieldMemOperand(callback, AccessorInfo::kDataOffset));
__ Push(scratch3, scratch, scratch, scratch2, holder);
__ Push(Smi::FromInt(0)); // should_throw_on_error -> false
__ Push(Smi::kZero); // should_throw_on_error -> false
__ Ldr(scratch, FieldMemOperand(callback, AccessorInfo::kNameOffset));
__ Push(scratch);

View File

@ -3830,7 +3830,7 @@ void MacroAssembler::EmitSeqStringSetCharCheck(
Cmp(index, index_type == kIndexIsSmi ? scratch : Operand::UntagSmi(scratch));
Check(lt, kIndexIsTooLarge);
DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
DCHECK_EQ(static_cast<Smi*>(0), Smi::kZero);
Cmp(index, 0);
Check(ge, kIndexIsNegative);
}

View File

@ -612,7 +612,7 @@ void ArrayLiteral::BuildConstantElements(Isolate* isolate) {
}
if (boilerplate_value->IsUninitialized(isolate)) {
boilerplate_value = handle(Smi::FromInt(0), isolate);
boilerplate_value = handle(Smi::kZero, isolate);
is_simple = false;
}

View File

@ -315,7 +315,7 @@ Handle<ScopeInfo> ScopeInfo::CreateForWithScope(
int index = kVariablePartIndex;
DCHECK_EQ(index, scope_info->ParameterNamesIndex());
DCHECK_EQ(index, scope_info->StackLocalFirstSlotIndex());
scope_info->set(index++, Smi::FromInt(0));
scope_info->set(index++, Smi::kZero);
DCHECK_EQ(index, scope_info->StackLocalNamesIndex());
DCHECK_EQ(index, scope_info->ReceiverInfoIndex());
DCHECK_EQ(index, scope_info->FunctionNameInfoIndex());

View File

@ -1318,7 +1318,7 @@ void Genesis::InitializeGlobal(Handle<JSGlobalObject> global_object,
// Create the %NumberPrototype%
Handle<JSValue> prototype =
Handle<JSValue>::cast(factory->NewJSObject(number_fun, TENURED));
prototype->set_value(Smi::FromInt(0));
prototype->set_value(Smi::kZero);
Accessors::FunctionSetPrototype(number_fun, prototype).Assert();
// Install the "constructor" property on the {prototype}.
@ -4215,7 +4215,7 @@ Genesis::Genesis(Isolate* isolate,
isolate->counters()->contexts_created_from_scratch()->Increment();
// Re-initialize the counter because it got incremented during snapshot
// creation.
isolate->native_context()->set_errors_thrown(Smi::FromInt(0));
isolate->native_context()->set_errors_thrown(Smi::kZero);
}
// Install experimental natives. Do not include them into the

View File

@ -260,7 +260,7 @@ void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
// 2b. No arguments, return +0.
__ bind(&no_arguments);
__ Move(r0, Smi::FromInt(0));
__ Move(r0, Smi::kZero);
__ Ret(1);
}
@ -288,7 +288,7 @@ void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
__ ldr(r2, MemOperand(sp, r0, LSL, kPointerSizeLog2));
__ b(&done);
__ bind(&no_arguments);
__ Move(r2, Smi::FromInt(0));
__ Move(r2, Smi::kZero);
__ bind(&done);
}
@ -1326,7 +1326,7 @@ void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
// trampoline.
Smi* interpreter_entry_return_pc_offset(
masm->isolate()->heap()->interpreter_entry_return_pc_offset());
DCHECK_NE(interpreter_entry_return_pc_offset, Smi::FromInt(0));
DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
__ Move(r2, masm->isolate()->builtins()->InterpreterEntryTrampoline());
__ add(lr, r2, Operand(interpreter_entry_return_pc_offset->value() +
Code::kHeaderSize - kHeapObjectTag));
@ -1843,7 +1843,7 @@ static void Generate_OnStackReplacementHelper(MacroAssembler* masm,
// If the code object is null, just return to the caller.
Label skip;
__ cmp(r0, Operand(Smi::FromInt(0)));
__ cmp(r0, Operand(Smi::kZero));
__ b(ne, &skip);
__ Ret();
@ -2757,7 +2757,7 @@ void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
// -----------------------------------
__ SmiTag(r1);
__ Push(r1);
__ Move(cp, Smi::FromInt(0));
__ Move(cp, Smi::kZero);
__ TailCallRuntime(Runtime::kAllocateInNewSpace);
}
@ -2770,7 +2770,7 @@ void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
__ SmiTag(r1);
__ Move(r2, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
__ Push(r1, r2);
__ Move(cp, Smi::FromInt(0));
__ Move(cp, Smi::kZero);
__ TailCallRuntime(Runtime::kAllocateInTargetSpace);
}
@ -2781,7 +2781,7 @@ void Builtins::Generate_Abort(MacroAssembler* masm) {
// -- lr : return address
// -----------------------------------
__ Push(r1);
__ Move(cp, Smi::FromInt(0));
__ Move(cp, Smi::kZero);
__ TailCallRuntime(Runtime::kAbort);
}

View File

@ -278,7 +278,7 @@ void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
__ Ldr(x2, MemOperand(jssp, x0, LSL, kPointerSizeLog2));
__ B(&done);
__ Bind(&no_arguments);
__ Mov(x2, Smi::FromInt(0));
__ Mov(x2, Smi::kZero);
__ Bind(&done);
}
@ -1337,7 +1337,7 @@ void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
// trampoline.
Smi* interpreter_entry_return_pc_offset(
masm->isolate()->heap()->interpreter_entry_return_pc_offset());
DCHECK_NE(interpreter_entry_return_pc_offset, Smi::FromInt(0));
DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
__ LoadObject(x1, masm->isolate()->builtins()->InterpreterEntryTrampoline());
__ Add(lr, x1, Operand(interpreter_entry_return_pc_offset->value() +
Code::kHeaderSize - kHeapObjectTag));
@ -1847,7 +1847,7 @@ static void Generate_OnStackReplacementHelper(MacroAssembler* masm,
// If the code object is null, just return to the caller.
Label skip;
__ CompareAndBranch(x0, Smi::FromInt(0), ne, &skip);
__ CompareAndBranch(x0, Smi::kZero, ne, &skip);
__ Ret();
__ Bind(&skip);
@ -2831,7 +2831,7 @@ void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
// -----------------------------------
__ SmiTag(x1);
__ Push(x1);
__ Move(cp, Smi::FromInt(0));
__ Move(cp, Smi::kZero);
__ TailCallRuntime(Runtime::kAllocateInNewSpace);
}
@ -2845,7 +2845,7 @@ void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
__ SmiTag(x1);
__ Move(x2, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
__ Push(x1, x2);
__ Move(cp, Smi::FromInt(0));
__ Move(cp, Smi::kZero);
__ TailCallRuntime(Runtime::kAllocateInTargetSpace);
}
@ -2858,7 +2858,7 @@ void Builtins::Generate_Abort(MacroAssembler* masm) {
// -----------------------------------
MacroAssembler::NoUseRealAbortsScope no_use_real_aborts(masm);
__ Push(x1);
__ Move(cp, Smi::FromInt(0));
__ Move(cp, Smi::kZero);
__ TailCallRuntime(Runtime::kAbort);
}

View File

@ -1084,7 +1084,7 @@ Object* Slow_ArrayConcat(BuiltinArguments* args, Handle<Object> species,
storage = SeededNumberDictionary::New(isolate, at_least_space_for);
} else {
DCHECK(species->IsConstructor());
Handle<Object> length(Smi::FromInt(0), isolate);
Handle<Object> length(Smi::kZero, isolate);
Handle<Object> storage_object;
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
isolate, storage_object,

View File

@ -390,7 +390,7 @@ void Builtins::Generate_ToLength(CodeStubAssembler* assembler) {
assembler->Return(assembler->NumberConstant(kMaxSafeInteger));
assembler->Bind(&return_zero);
assembler->Return(assembler->SmiConstant(Smi::FromInt(0)));
assembler->Return(assembler->SmiConstant(Smi::kZero));
}
}

View File

@ -88,7 +88,7 @@ BUILTIN(DataViewConstructor_ConstructStub) {
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, result,
JSObject::New(target, new_target));
for (int i = 0; i < ArrayBufferView::kInternalFieldCount; ++i) {
Handle<JSDataView>::cast(result)->SetInternalField(i, Smi::FromInt(0));
Handle<JSDataView>::cast(result)->SetInternalField(i, Smi::kZero);
}
// 12. Set O's [[ViewedArrayBuffer]] internal slot to buffer.

View File

@ -198,7 +198,7 @@ Object* DoFunctionBind(Isolate* isolate, BuiltinArguments args) {
if (!target->IsJSFunction() ||
length_lookup.state() != LookupIterator::ACCESSOR ||
!length_lookup.GetAccessors()->IsAccessorInfo()) {
Handle<Object> length(Smi::FromInt(0), isolate);
Handle<Object> length(Smi::kZero, isolate);
Maybe<PropertyAttributes> attributes =
JSReceiver::GetPropertyAttributes(&length_lookup);
if (!attributes.IsJust()) return isolate->heap()->exception();

View File

@ -316,7 +316,7 @@ void Builtins::Generate_MathFround(CodeStubAssembler* assembler) {
BUILTIN(MathHypot) {
HandleScope scope(isolate);
int const length = args.length() - 1;
if (length == 0) return Smi::FromInt(0);
if (length == 0) return Smi::kZero;
DCHECK_LT(0, length);
double max = 0;
bool one_arg_is_nan = false;
@ -345,7 +345,7 @@ BUILTIN(MathHypot) {
}
if (max == 0) {
return Smi::FromInt(0);
return Smi::kZero;
}
DCHECK_GT(max, 0);

View File

@ -322,7 +322,7 @@ void Builtins::Generate_RegExpPrototypeExec(CodeStubAssembler* a) {
Node* const null = a->NullConstant();
Node* const int_zero = a->IntPtrConstant(0);
Node* const smi_zero = a->SmiConstant(Smi::FromInt(0));
Node* const smi_zero = a->SmiConstant(Smi::kZero);
// Ensure {receiver} is a JSRegExp.
Node* const regexp_map = a->ThrowIfNotInstanceType(

View File

@ -805,13 +805,13 @@ BUILTIN(StringPrototypeLocaleCompare) {
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
isolate, str2, Object::ToString(isolate, args.at<Object>(1)));
if (str1.is_identical_to(str2)) return Smi::FromInt(0); // Equal.
if (str1.is_identical_to(str2)) return Smi::kZero; // Equal.
int str1_length = str1->length();
int str2_length = str2->length();
// Decide trivial cases without flattening.
if (str1_length == 0) {
if (str2_length == 0) return Smi::FromInt(0); // Equal.
if (str2_length == 0) return Smi::kZero; // Equal.
return Smi::FromInt(-str2_length);
} else {
if (str2_length == 0) return Smi::FromInt(str1_length);
@ -891,7 +891,7 @@ void Builtins::Generate_StringPrototypeSubstr(CodeStubAssembler* a) {
Node* const length = a->Parameter(2);
Node* const context = a->Parameter(5);
Node* const zero = a->SmiConstant(Smi::FromInt(0));
Node* const zero = a->SmiConstant(Smi::kZero);
// Check that {receiver} is coercible to Object and convert it to a String.
Node* const string =
@ -1030,7 +1030,7 @@ compiler::Node* ToSmiBetweenZeroAnd(CodeStubAssembler* a,
a->Bind(&if_isoutofbounds);
{
Node* const zero = a->SmiConstant(Smi::FromInt(0));
Node* const zero = a->SmiConstant(Smi::kZero);
var_result.Bind(a->Select(a->SmiLessThan(value_int, zero), zero, limit));
a->Goto(&out);
}
@ -1042,7 +1042,7 @@ compiler::Node* ToSmiBetweenZeroAnd(CodeStubAssembler* a,
a->Assert(a->WordEqual(a->LoadMap(value_int), a->HeapNumberMapConstant()));
Node* const float_zero = a->Float64Constant(0.);
Node* const smi_zero = a->SmiConstant(Smi::FromInt(0));
Node* const smi_zero = a->SmiConstant(Smi::kZero);
Node* const value_float = a->LoadHeapNumberValue(value_int);
var_result.Bind(a->Select(a->Float64LessThan(value_float, float_zero),
smi_zero, limit));
@ -1175,7 +1175,7 @@ void Builtins::Generate_StringPrototypeIterator(CodeStubAssembler* assembler) {
Heap::kEmptyFixedArrayRootIndex);
assembler->StoreObjectFieldNoWriteBarrier(
iterator, JSStringIterator::kStringOffset, string);
Node* index = assembler->SmiConstant(Smi::FromInt(0));
Node* index = assembler->SmiConstant(Smi::kZero);
assembler->StoreObjectFieldNoWriteBarrier(
iterator, JSStringIterator::kNextIndexOffset, index);
assembler->Return(iterator);

View File

@ -473,7 +473,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
__ mov(eax, FieldOperand(ebx, JSGeneratorObject::kOperandStackOffset));
{
Label done_loop, loop;
__ Move(ecx, Smi::FromInt(0));
__ Move(ecx, Smi::kZero);
__ bind(&loop);
__ cmp(ecx, FieldOperand(eax, FixedArray::kLengthOffset));
__ j(equal, &done_loop, Label::kNear);
@ -1010,7 +1010,7 @@ void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
// trampoline.
Smi* interpreter_entry_return_pc_offset(
masm->isolate()->heap()->interpreter_entry_return_pc_offset());
DCHECK_NE(interpreter_entry_return_pc_offset, Smi::FromInt(0));
DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
__ LoadHeapObject(ebx,
masm->isolate()->builtins()->InterpreterEntryTrampoline());
__ add(ebx, Immediate(interpreter_entry_return_pc_offset->value() +
@ -1923,7 +1923,7 @@ void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
__ mov(ebx, Operand(esp, eax, times_pointer_size, 0));
__ jmp(&done, Label::kNear);
__ bind(&no_arguments);
__ Move(ebx, Smi::FromInt(0));
__ Move(ebx, Smi::kZero);
__ bind(&done);
}
@ -2821,7 +2821,7 @@ void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
__ PopReturnAddressTo(ecx);
__ Push(edx);
__ PushReturnAddressFrom(ecx);
__ Move(esi, Smi::FromInt(0));
__ Move(esi, Smi::kZero);
__ TailCallRuntime(Runtime::kAllocateInNewSpace);
}
@ -2836,7 +2836,7 @@ void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
__ Push(edx);
__ Push(Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
__ PushReturnAddressFrom(ecx);
__ Move(esi, Smi::FromInt(0));
__ Move(esi, Smi::kZero);
__ TailCallRuntime(Runtime::kAllocateInTargetSpace);
}
@ -2849,7 +2849,7 @@ void Builtins::Generate_Abort(MacroAssembler* masm) {
__ PopReturnAddressTo(ecx);
__ Push(edx);
__ PushReturnAddressFrom(ecx);
__ Move(esi, Smi::FromInt(0));
__ Move(esi, Smi::kZero);
__ TailCallRuntime(Runtime::kAbort);
}

View File

@ -266,7 +266,7 @@ void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
// 2b. No arguments, return +0.
__ bind(&no_arguments);
__ Move(v0, Smi::FromInt(0));
__ Move(v0, Smi::kZero);
__ DropAndRet(1);
}
@ -295,7 +295,7 @@ void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
__ lw(a0, MemOperand(at));
__ jmp(&done);
__ bind(&no_arguments);
__ Move(a0, Smi::FromInt(0));
__ Move(a0, Smi::kZero);
__ bind(&done);
}
@ -1325,7 +1325,7 @@ void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
// trampoline.
Smi* interpreter_entry_return_pc_offset(
masm->isolate()->heap()->interpreter_entry_return_pc_offset());
DCHECK_NE(interpreter_entry_return_pc_offset, Smi::FromInt(0));
DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
__ li(t0, Operand(masm->isolate()->builtins()->InterpreterEntryTrampoline()));
__ Addu(ra, t0, Operand(interpreter_entry_return_pc_offset->value() +
Code::kHeaderSize - kHeapObjectTag));
@ -1842,7 +1842,7 @@ static void Generate_OnStackReplacementHelper(MacroAssembler* masm,
}
// If the code object is null, just return to the caller.
__ Ret(eq, v0, Operand(Smi::FromInt(0)));
__ Ret(eq, v0, Operand(Smi::kZero));
// Drop any potential handler frame that is be sitting on top of the actual
// JavaScript frame. This is the case then OSR is triggered from bytecode.
@ -2824,7 +2824,7 @@ void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
// -----------------------------------
__ SmiTag(a0);
__ Push(a0);
__ Move(cp, Smi::FromInt(0));
__ Move(cp, Smi::kZero);
__ TailCallRuntime(Runtime::kAllocateInNewSpace);
}
@ -2837,7 +2837,7 @@ void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
__ SmiTag(a0);
__ Move(a1, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
__ Push(a0, a1);
__ Move(cp, Smi::FromInt(0));
__ Move(cp, Smi::kZero);
__ TailCallRuntime(Runtime::kAllocateInTargetSpace);
}
@ -2848,7 +2848,7 @@ void Builtins::Generate_Abort(MacroAssembler* masm) {
// -- ra : return address
// -----------------------------------
__ Push(a0);
__ Move(cp, Smi::FromInt(0));
__ Move(cp, Smi::kZero);
__ TailCallRuntime(Runtime::kAbort);
}

View File

@ -264,7 +264,7 @@ void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
// 2b. No arguments, return +0.
__ bind(&no_arguments);
__ Move(v0, Smi::FromInt(0));
__ Move(v0, Smi::kZero);
__ DropAndRet(1);
}
@ -293,7 +293,7 @@ void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
__ ld(a0, MemOperand(at));
__ jmp(&done);
__ bind(&no_arguments);
__ Move(a0, Smi::FromInt(0));
__ Move(a0, Smi::kZero);
__ bind(&done);
}
@ -1317,7 +1317,7 @@ void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
// trampoline.
Smi* interpreter_entry_return_pc_offset(
masm->isolate()->heap()->interpreter_entry_return_pc_offset());
DCHECK_NE(interpreter_entry_return_pc_offset, Smi::FromInt(0));
DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
__ li(t0, Operand(masm->isolate()->builtins()->InterpreterEntryTrampoline()));
__ Daddu(ra, t0, Operand(interpreter_entry_return_pc_offset->value() +
Code::kHeaderSize - kHeapObjectTag));
@ -1836,7 +1836,7 @@ static void Generate_OnStackReplacementHelper(MacroAssembler* masm,
}
// If the code object is null, just return to the caller.
__ Ret(eq, v0, Operand(Smi::FromInt(0)));
__ Ret(eq, v0, Operand(Smi::kZero));
// Drop any potential handler frame that is be sitting on top of the actual
// JavaScript frame. This is the case then OSR is triggered from bytecode.
@ -2817,7 +2817,7 @@ void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
// -----------------------------------
__ SmiTag(a0);
__ Push(a0);
__ Move(cp, Smi::FromInt(0));
__ Move(cp, Smi::kZero);
__ TailCallRuntime(Runtime::kAllocateInNewSpace);
}
@ -2830,7 +2830,7 @@ void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
__ SmiTag(a0);
__ Move(a1, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
__ Push(a0, a1);
__ Move(cp, Smi::FromInt(0));
__ Move(cp, Smi::kZero);
__ TailCallRuntime(Runtime::kAllocateInTargetSpace);
}
@ -2841,7 +2841,7 @@ void Builtins::Generate_Abort(MacroAssembler* masm) {
// -- ra : return address
// -----------------------------------
__ Push(a0);
__ Move(cp, Smi::FromInt(0));
__ Move(cp, Smi::kZero);
__ TailCallRuntime(Runtime::kAbort);
}

View File

@ -267,7 +267,7 @@ void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
// 2b. No arguments, return +0.
__ bind(&no_arguments);
__ LoadSmiLiteral(r3, Smi::FromInt(0));
__ LoadSmiLiteral(r3, Smi::kZero);
__ Ret(1);
}
@ -297,7 +297,7 @@ void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
__ LoadPX(r5, MemOperand(sp, r5));
__ b(&done);
__ bind(&no_arguments);
__ LoadSmiLiteral(r5, Smi::FromInt(0));
__ LoadSmiLiteral(r5, Smi::kZero);
__ bind(&done);
}
@ -1353,7 +1353,7 @@ void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
// trampoline.
Smi* interpreter_entry_return_pc_offset(
masm->isolate()->heap()->interpreter_entry_return_pc_offset());
DCHECK_NE(interpreter_entry_return_pc_offset, Smi::FromInt(0));
DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
__ Move(r5, masm->isolate()->builtins()->InterpreterEntryTrampoline());
__ addi(r0, r5, Operand(interpreter_entry_return_pc_offset->value() +
Code::kHeaderSize - kHeapObjectTag));
@ -1888,7 +1888,7 @@ static void Generate_OnStackReplacementHelper(MacroAssembler* masm,
// If the code object is null, just return to the caller.
Label skip;
__ CmpSmiLiteral(r3, Smi::FromInt(0), r0);
__ CmpSmiLiteral(r3, Smi::kZero, r0);
__ bne(&skip);
__ Ret();
@ -2854,7 +2854,7 @@ void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
// -----------------------------------
__ SmiTag(r4);
__ Push(r4);
__ LoadSmiLiteral(cp, Smi::FromInt(0));
__ LoadSmiLiteral(cp, Smi::kZero);
__ TailCallRuntime(Runtime::kAllocateInNewSpace);
}
@ -2867,7 +2867,7 @@ void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
__ SmiTag(r4);
__ LoadSmiLiteral(r5, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
__ Push(r4, r5);
__ LoadSmiLiteral(cp, Smi::FromInt(0));
__ LoadSmiLiteral(cp, Smi::kZero);
__ TailCallRuntime(Runtime::kAllocateInTargetSpace);
}
@ -2878,7 +2878,7 @@ void Builtins::Generate_Abort(MacroAssembler* masm) {
// -- lr : return address
// -----------------------------------
__ push(r4);
__ LoadSmiLiteral(cp, Smi::FromInt(0));
__ LoadSmiLiteral(cp, Smi::kZero);
__ TailCallRuntime(Runtime::kAbort);
}

View File

@ -267,7 +267,7 @@ void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
// 2b. No arguments, return +0.
__ bind(&no_arguments);
__ LoadSmiLiteral(r2, Smi::FromInt(0));
__ LoadSmiLiteral(r2, Smi::kZero);
__ Ret(1);
}
@ -296,7 +296,7 @@ void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
__ LoadP(r4, MemOperand(sp, r4));
__ b(&done);
__ bind(&no_arguments);
__ LoadSmiLiteral(r4, Smi::FromInt(0));
__ LoadSmiLiteral(r4, Smi::kZero);
__ bind(&done);
}
@ -1357,7 +1357,7 @@ void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
// trampoline.
Smi* interpreter_entry_return_pc_offset(
masm->isolate()->heap()->interpreter_entry_return_pc_offset());
DCHECK_NE(interpreter_entry_return_pc_offset, Smi::FromInt(0));
DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
__ Move(r4, masm->isolate()->builtins()->InterpreterEntryTrampoline());
__ AddP(r14, r4, Operand(interpreter_entry_return_pc_offset->value() +
Code::kHeaderSize - kHeapObjectTag));
@ -1895,7 +1895,7 @@ static void Generate_OnStackReplacementHelper(MacroAssembler* masm,
// If the code object is null, just return to the caller.
Label skip;
__ CmpSmiLiteral(r2, Smi::FromInt(0), r0);
__ CmpSmiLiteral(r2, Smi::kZero, r0);
__ bne(&skip);
__ Ret();
@ -2865,7 +2865,7 @@ void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
// -----------------------------------
__ SmiTag(r3);
__ Push(r3);
__ LoadSmiLiteral(cp, Smi::FromInt(0));
__ LoadSmiLiteral(cp, Smi::kZero);
__ TailCallRuntime(Runtime::kAllocateInNewSpace);
}
@ -2878,7 +2878,7 @@ void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
__ SmiTag(r3);
__ LoadSmiLiteral(r4, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
__ Push(r3, r4);
__ LoadSmiLiteral(cp, Smi::FromInt(0));
__ LoadSmiLiteral(cp, Smi::kZero);
__ TailCallRuntime(Runtime::kAllocateInTargetSpace);
}
@ -2889,7 +2889,7 @@ void Builtins::Generate_Abort(MacroAssembler* masm) {
// -- lr : return address
// -----------------------------------
__ push(r3);
__ LoadSmiLiteral(cp, Smi::FromInt(0));
__ LoadSmiLiteral(cp, Smi::kZero);
__ TailCallRuntime(Runtime::kAbort);
}

View File

@ -658,7 +658,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
// it is present) and load it into kInterpreterBytecodeArrayRegister.
__ movp(rax, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
Label load_debug_bytecode_array, bytecode_array_loaded;
DCHECK_EQ(Smi::FromInt(0), DebugInfo::uninitialized());
DCHECK_EQ(Smi::kZero, DebugInfo::uninitialized());
__ cmpp(FieldOperand(rax, SharedFunctionInfo::kDebugInfoOffset),
Immediate(0));
__ j(not_equal, &load_debug_bytecode_array);
@ -986,7 +986,7 @@ void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
// trampoline.
Smi* interpreter_entry_return_pc_offset(
masm->isolate()->heap()->interpreter_entry_return_pc_offset());
DCHECK_NE(interpreter_entry_return_pc_offset, Smi::FromInt(0));
DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
__ Move(rbx, masm->isolate()->builtins()->InterpreterEntryTrampoline());
__ addp(rbx, Immediate(interpreter_entry_return_pc_offset->value() +
Code::kHeaderSize - kHeapObjectTag));
@ -1888,7 +1888,7 @@ void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
__ movp(rbx, args.GetArgumentOperand(1));
__ jmp(&done, Label::kNear);
__ bind(&no_arguments);
__ Move(rbx, Smi::FromInt(0));
__ Move(rbx, Smi::kZero);
__ bind(&done);
}
@ -2141,7 +2141,7 @@ void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
__ PopReturnAddressTo(rcx);
__ Push(rdx);
__ PushReturnAddressFrom(rcx);
__ Move(rsi, Smi::FromInt(0));
__ Move(rsi, Smi::kZero);
__ TailCallRuntime(Runtime::kAllocateInNewSpace);
}
@ -2156,7 +2156,7 @@ void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
__ Push(rdx);
__ Push(Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
__ PushReturnAddressFrom(rcx);
__ Move(rsi, Smi::FromInt(0));
__ Move(rsi, Smi::kZero);
__ TailCallRuntime(Runtime::kAllocateInTargetSpace);
}
@ -2169,7 +2169,7 @@ void Builtins::Generate_Abort(MacroAssembler* masm) {
__ PopReturnAddressTo(rcx);
__ Push(rdx);
__ PushReturnAddressFrom(rcx);
__ Move(rsi, Smi::FromInt(0));
__ Move(rsi, Smi::kZero);
__ TailCallRuntime(Runtime::kAbort);
}

View File

@ -474,7 +474,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
__ mov(eax, FieldOperand(ebx, JSGeneratorObject::kOperandStackOffset));
{
Label done_loop, loop;
__ Move(ecx, Smi::FromInt(0));
__ Move(ecx, Smi::kZero);
__ bind(&loop);
__ cmp(ecx, FieldOperand(eax, FixedArray::kLengthOffset));
__ j(equal, &done_loop, Label::kNear);
@ -1011,7 +1011,7 @@ void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
// trampoline.
Smi* interpreter_entry_return_pc_offset(
masm->isolate()->heap()->interpreter_entry_return_pc_offset());
DCHECK_NE(interpreter_entry_return_pc_offset, Smi::FromInt(0));
DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
__ LoadHeapObject(ebx,
masm->isolate()->builtins()->InterpreterEntryTrampoline());
__ add(ebx, Immediate(interpreter_entry_return_pc_offset->value() +
@ -1956,7 +1956,7 @@ void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
__ mov(ebx, Operand(esp, eax, times_pointer_size, 0));
__ jmp(&done, Label::kNear);
__ bind(&no_arguments);
__ Move(ebx, Smi::FromInt(0));
__ Move(ebx, Smi::kZero);
__ bind(&done);
}
@ -2861,7 +2861,7 @@ void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
__ PopReturnAddressTo(ecx);
__ Push(edx);
__ PushReturnAddressFrom(ecx);
__ Move(esi, Smi::FromInt(0));
__ Move(esi, Smi::kZero);
__ TailCallRuntime(Runtime::kAllocateInNewSpace);
}
@ -2876,7 +2876,7 @@ void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
__ Push(edx);
__ Push(Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
__ PushReturnAddressFrom(ecx);
__ Move(esi, Smi::FromInt(0));
__ Move(esi, Smi::kZero);
__ TailCallRuntime(Runtime::kAllocateInTargetSpace);
}
@ -2889,7 +2889,7 @@ void Builtins::Generate_Abort(MacroAssembler* masm) {
__ PopReturnAddressTo(ecx);
__ Push(edx);
__ PushReturnAddressFrom(ecx);
__ Move(esi, Smi::FromInt(0));
__ Move(esi, Smi::kZero);
__ TailCallRuntime(Runtime::kAbort);
}

View File

@ -38,9 +38,7 @@ void CodeStubAssembler::Assert(Node* condition) {
#endif
}
Node* CodeStubAssembler::NoContextConstant() {
return SmiConstant(Smi::FromInt(0));
}
Node* CodeStubAssembler::NoContextConstant() { return SmiConstant(Smi::kZero); }
#define HEAP_CONSTANT_ACCESSOR(rootName, name) \
Node* CodeStubAssembler::name##Constant() { \
@ -614,7 +612,7 @@ Node* CodeStubAssembler::AllocateRawUnaligned(Node* size_in_bytes,
Bind(&runtime_call);
// AllocateInTargetSpace does not use the context.
Node* context = SmiConstant(Smi::FromInt(0));
Node* context = SmiConstant(Smi::kZero);
Node* runtime_result;
if (flags & kPretenured) {
@ -1677,8 +1675,7 @@ void CodeStubAssembler::CopyStringCharacters(compiler::Node* from_string,
// Nothing to do for zero characters.
GotoIf(SmiLessThanOrEqual(character_count, SmiConstant(Smi::FromInt(0))),
&out);
GotoIf(SmiLessThanOrEqual(character_count, SmiConstant(Smi::kZero)), &out);
// Calculate offsets into the strings.
@ -2734,7 +2731,7 @@ Node* CodeStubAssembler::SubString(Node* context, Node* string, Node* from,
GotoIf(SmiAbove(substr_length, string_length), &runtime);
// Equal length - check if {from, to} == {0, str.length}.
GotoIf(SmiAbove(from, SmiConstant(Smi::FromInt(0))), &runtime);
GotoIf(SmiAbove(from, SmiConstant(Smi::kZero)), &runtime);
// Return the original string (substr_length == string_length).
@ -3134,7 +3131,7 @@ Node* CodeStubAssembler::ToInteger(Node* context, Node* input,
}
Bind(&return_zero);
var_arg.Bind(SmiConstant(Smi::FromInt(0)));
var_arg.Bind(SmiConstant(Smi::kZero));
Goto(&out);
}
@ -5726,7 +5723,7 @@ void CodeStubAssembler::CheckEnumCache(Node* receiver, Label* use_cache,
// For all objects but the receiver, check that the cache is empty.
current_map.Bind(LoadMap(current_js_object.value()));
Node* enum_length = EnumLength(current_map.value());
Node* zero_constant = SmiConstant(Smi::FromInt(0));
Node* zero_constant = SmiConstant(Smi::kZero);
BranchIf(WordEqual(enum_length, zero_constant), &loop, use_runtime);
}
}

View File

@ -22,7 +22,7 @@ namespace internal {
RUNTIME_FUNCTION(UnexpectedStubMiss) {
FATAL("Unexpected deopt of a stub");
return Smi::FromInt(0);
return Smi::kZero;
}
CodeStubDescriptor::CodeStubDescriptor(CodeStub* stub)
@ -2528,7 +2528,7 @@ compiler::Node* FastCloneShallowArrayStub::Generate(
allocation_site =
allocation_site_mode == TRACK_ALLOCATION_SITE ? allocation_site : nullptr;
Node* zero = assembler->SmiConstant(Smi::FromInt(0));
Node* zero = assembler->SmiConstant(Smi::kZero);
assembler->GotoIf(assembler->SmiEqual(capacity, zero), &zero_capacity);
Node* elements_map = assembler->LoadMap(boilerplate_elements);
@ -2835,7 +2835,7 @@ void ArrayNoArgumentConstructorStub::GenerateAssembly(
Node* array = assembler->AllocateJSArray(
elements_kind(), array_map,
assembler->IntPtrConstant(JSArray::kPreallocatedArrayElements),
assembler->SmiConstant(Smi::FromInt(0)), allocation_site);
assembler->SmiConstant(Smi::kZero), allocation_site);
assembler->Return(array);
}
@ -2848,7 +2848,7 @@ void InternalArrayNoArgumentConstructorStub::GenerateAssembly(
Node* array = assembler->AllocateJSArray(
elements_kind(), array_map,
assembler->IntPtrConstant(JSArray::kPreallocatedArrayElements),
assembler->SmiConstant(Smi::FromInt(0)), nullptr);
assembler->SmiConstant(Smi::kZero), nullptr);
assembler->Return(array);
}
@ -2876,7 +2876,7 @@ void SingleArgumentConstructorCommon(CodeStubAssembler* assembler,
if (IsFastPackedElementsKind(elements_kind)) {
Label abort(assembler, Label::kDeferred);
assembler->Branch(
assembler->SmiEqual(size, assembler->SmiConstant(Smi::FromInt(0))),
assembler->SmiEqual(size, assembler->SmiConstant(Smi::kZero)),
&small_smi_size, &abort);
assembler->Bind(&abort);

View File

@ -1308,7 +1308,7 @@ MaybeHandle<JSArray> Compiler::CompileForLiveEdit(Handle<Script> script) {
// generated shared function infos, clear the script's list temporarily
// and restore it at the end of this method.
Handle<Object> old_function_infos(script->shared_function_infos(), isolate);
script->set_shared_function_infos(Smi::FromInt(0));
script->set_shared_function_infos(Smi::kZero);
// Start a compilation.
Zone zone(isolate->allocator());

View File

@ -535,7 +535,7 @@ void CodeGenerator::PopulateDeoptimizationData(Handle<Code> code_object) {
if (info->has_shared_info()) {
data->SetSharedFunctionInfo(*info->shared_info());
} else {
data->SetSharedFunctionInfo(Smi::FromInt(0));
data->SetSharedFunctionInfo(Smi::kZero);
}
Handle<FixedArray> literals = isolate()->factory()->NewFixedArray(
@ -565,7 +565,7 @@ void CodeGenerator::PopulateDeoptimizationData(Handle<Code> code_object) {
CHECK(deoptimization_states_[i]);
data->SetTranslationIndex(
i, Smi::FromInt(deoptimization_states_[i]->translation_id()));
data->SetArgumentsStackHeight(i, Smi::FromInt(0));
data->SetArgumentsStackHeight(i, Smi::kZero);
data->SetPc(i, Smi::FromInt(deoptimization_state->pc_offset()));
}

View File

@ -5021,7 +5021,7 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
// TODO(3095996): Get rid of this. For now, we need to make the
// result register contain a valid pointer because it is already
// contained in the register pointer map.
__ mov(result, Operand(Smi::FromInt(0)));
__ mov(result, Operand(Smi::kZero));
PushSafepointRegistersScope scope(this);
if (instr->size()->IsRegister()) {
@ -5355,7 +5355,7 @@ void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
Register result = ToRegister(instr->result());
Label load_cache, done;
__ EnumLength(result, map);
__ cmp(result, Operand(Smi::FromInt(0)));
__ cmp(result, Operand(Smi::kZero));
__ b(ne, &load_cache);
__ mov(result, Operand(isolate()->factory()->empty_fixed_array()));
__ jmp(&done);

View File

@ -1433,7 +1433,7 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
// TODO(3095996): Get rid of this. For now, we need to make the
// result register contain a valid pointer because it is already
// contained in the register pointer map.
__ Mov(ToRegister(instr->result()), Smi::FromInt(0));
__ Mov(ToRegister(instr->result()), Smi::kZero);
PushSafepointRegistersScope scope(this);
LoadContextFromDeferred(instr->context());
@ -1743,7 +1743,7 @@ void LCodeGen::DoBranch(LBranch* instr) {
EmitBranch(instr, eq);
} else if (type.IsSmi()) {
DCHECK(!info()->IsStub());
EmitCompareAndBranch(instr, ne, value, Smi::FromInt(0));
EmitCompareAndBranch(instr, ne, value, Smi::kZero);
} else if (type.IsJSArray()) {
DCHECK(!info()->IsStub());
EmitGoto(instr->TrueDestination(chunk()));
@ -1786,7 +1786,7 @@ void LCodeGen::DoBranch(LBranch* instr) {
if (expected.Contains(ToBooleanICStub::SMI)) {
// Smis: 0 -> false, all other -> true.
DCHECK(Smi::FromInt(0) == 0);
DCHECK(Smi::kZero == 0);
__ Cbz(value, false_label);
__ JumpIfSmi(value, true_label);
} else if (expected.NeedsMap()) {
@ -5620,7 +5620,7 @@ void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) {
index, reinterpret_cast<uint64_t>(Smi::FromInt(1)), deferred->entry());
__ Mov(index, Operand(index, ASR, 1));
__ Cmp(index, Smi::FromInt(0));
__ Cmp(index, Smi::kZero);
__ B(lt, &out_of_object);
STATIC_ASSERT(kPointerSizeLog2 > kSmiTagSize);

View File

@ -10229,8 +10229,8 @@ void HOptimizedGraphBuilder::GenerateTypedArrayInitialize(
HValue* byte_offset;
bool is_zero_byte_offset;
if (arguments->at(kByteOffsetArg)->IsLiteral()
&& Smi::FromInt(0) ==
if (arguments->at(kByteOffsetArg)->IsLiteral() &&
Smi::kZero ==
*static_cast<Literal*>(arguments->at(kByteOffsetArg))->value()) {
byte_offset = Add<HConstant>(static_cast<int32_t>(0));
is_zero_byte_offset = true;

View File

@ -4815,7 +4815,7 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
// TODO(3095996): Get rid of this. For now, we need to make the
// result register contain a valid pointer because it is already
// contained in the register pointer map.
__ Move(result, Immediate(Smi::FromInt(0)));
__ Move(result, Immediate(Smi::kZero));
PushSafepointRegistersScope scope(this);
if (instr->size()->IsRegister()) {
@ -5119,7 +5119,7 @@ void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
Register result = ToRegister(instr->result());
Label load_cache, done;
__ EnumLength(result, map);
__ cmp(result, Immediate(Smi::FromInt(0)));
__ cmp(result, Immediate(Smi::kZero));
__ j(not_equal, &load_cache, Label::kNear);
__ mov(result, isolate()->factory()->empty_fixed_array());
__ jmp(&done, Label::kNear);

View File

@ -328,9 +328,9 @@ void LCodeGenBase::PopulateDeoptimizationData(Handle<Code> code) {
AllowDeferredHandleDereference allow_handle_dereference;
data->SetSharedFunctionInfo(*info_->shared_info());
} else {
data->SetSharedFunctionInfo(Smi::FromInt(0));
data->SetSharedFunctionInfo(Smi::kZero);
}
data->SetWeakCellCache(Smi::FromInt(0));
data->SetWeakCellCache(Smi::kZero);
Handle<FixedArray> literals =
factory()->NewFixedArray(deoptimization_literals_.length(), TENURED);

View File

@ -5370,7 +5370,7 @@ void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
Register result = ToRegister(instr->result());
Label load_cache, done;
__ EnumLength(result, map);
__ Branch(&load_cache, ne, result, Operand(Smi::FromInt(0)));
__ Branch(&load_cache, ne, result, Operand(Smi::kZero));
__ li(result, Operand(isolate()->factory()->empty_fixed_array()));
__ jmp(&done);

View File

@ -5579,7 +5579,7 @@ void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
Register result = ToRegister(instr->result());
Label load_cache, done;
__ EnumLength(result, map);
__ Branch(&load_cache, ne, result, Operand(Smi::FromInt(0)));
__ Branch(&load_cache, ne, result, Operand(Smi::kZero));
__ li(result, Operand(isolate()->factory()->empty_fixed_array()));
__ jmp(&done);

View File

@ -5282,7 +5282,7 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
// TODO(3095996): Get rid of this. For now, we need to make the
// result register contain a valid pointer because it is already
// contained in the register pointer map.
__ LoadSmiLiteral(result, Smi::FromInt(0));
__ LoadSmiLiteral(result, Smi::kZero);
PushSafepointRegistersScope scope(this);
if (instr->size()->IsRegister()) {
@ -5615,7 +5615,7 @@ void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
Register result = ToRegister(instr->result());
Label load_cache, done;
__ EnumLength(result, map);
__ CmpSmiLiteral(result, Smi::FromInt(0), r0);
__ CmpSmiLiteral(result, Smi::kZero, r0);
__ bne(&load_cache);
__ mov(result, Operand(isolate()->factory()->empty_fixed_array()));
__ b(&done);

View File

@ -5194,7 +5194,7 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
// TODO(3095996): Get rid of this. For now, we need to make the
// result register contain a valid pointer because it is already
// contained in the register pointer map.
__ LoadSmiLiteral(result, Smi::FromInt(0));
__ LoadSmiLiteral(result, Smi::kZero);
PushSafepointRegistersScope scope(this);
if (instr->size()->IsRegister()) {
@ -5512,7 +5512,7 @@ void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
Register result = ToRegister(instr->result());
Label load_cache, done;
__ EnumLength(result, map);
__ CmpSmiLiteral(result, Smi::FromInt(0), r0);
__ CmpSmiLiteral(result, Smi::kZero, r0);
__ bne(&load_cache, Label::kNear);
__ mov(result, Operand(isolate()->factory()->empty_fixed_array()));
__ b(&done, Label::kNear);

View File

@ -1972,7 +1972,7 @@ void LCodeGen::DoBranch(LBranch* instr) {
EmitBranch(instr, equal);
} else if (type.IsSmi()) {
DCHECK(!info()->IsStub());
__ SmiCompare(reg, Smi::FromInt(0));
__ SmiCompare(reg, Smi::kZero);
EmitBranch(instr, not_equal);
} else if (type.IsJSArray()) {
DCHECK(!info()->IsStub());
@ -2014,7 +2014,7 @@ void LCodeGen::DoBranch(LBranch* instr) {
if (expected.Contains(ToBooleanICStub::SMI)) {
// Smis: 0 -> false, all other -> true.
__ Cmp(reg, Smi::FromInt(0));
__ Cmp(reg, Smi::kZero);
__ j(equal, instr->FalseLabel(chunk_));
__ JumpIfSmi(reg, instr->TrueLabel(chunk_));
} else if (expected.NeedsMap()) {
@ -4161,7 +4161,7 @@ void LCodeGen::DoDeferredMaybeGrowElements(LMaybeGrowElements* instr) {
// result register contain a valid pointer because it is already
// contained in the register pointer map.
Register result = rax;
__ Move(result, Smi::FromInt(0));
__ Move(result, Smi::kZero);
// We have to call a stub.
{
@ -4530,7 +4530,7 @@ void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
// result register contain a valid pointer because it is already
// contained in the register pointer map.
Register reg = ToRegister(instr->result());
__ Move(reg, Smi::FromInt(0));
__ Move(reg, Smi::kZero);
{
PushSafepointRegistersScope scope(this);
@ -5094,7 +5094,7 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
// TODO(3095996): Get rid of this. For now, we need to make the
// result register contain a valid pointer because it is already
// contained in the register pointer map.
__ Move(result, Smi::FromInt(0));
__ Move(result, Smi::kZero);
PushSafepointRegistersScope scope(this);
if (instr->size()->IsRegister()) {
@ -5402,7 +5402,7 @@ void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
Register result = ToRegister(instr->result());
Label load_cache, done;
__ EnumLength(result, map);
__ Cmp(result, Smi::FromInt(0));
__ Cmp(result, Smi::kZero);
__ j(not_equal, &load_cache, Label::kNear);
__ LoadRoot(result, Heap::kEmptyFixedArrayRootIndex);
__ jmp(&done, Label::kNear);

View File

@ -5299,7 +5299,7 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
// TODO(3095996): Get rid of this. For now, we need to make the
// result register contain a valid pointer because it is already
// contained in the register pointer map.
__ Move(result, Immediate(Smi::FromInt(0)));
__ Move(result, Immediate(Smi::kZero));
PushSafepointRegistersScope scope(this);
if (instr->size()->IsRegister()) {
@ -5605,7 +5605,7 @@ void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
Register result = ToRegister(instr->result());
Label load_cache, done;
__ EnumLength(result, map);
__ cmp(result, Immediate(Smi::FromInt(0)));
__ cmp(result, Immediate(Smi::kZero));
__ j(not_equal, &load_cache, Label::kNear);
__ mov(result, isolate()->factory()->empty_fixed_array());
__ jmp(&done, Label::kNear);

View File

@ -25,7 +25,7 @@ static const char kDaysInMonths[] =
void DateCache::ResetDateCache() {
static const int kMaxStamp = Smi::kMaxValue;
if (stamp_->value() >= kMaxStamp) {
stamp_ = Smi::FromInt(0);
stamp_ = Smi::kZero;
} else {
stamp_ = Smi::FromInt(stamp_->value() + 1);
}

View File

@ -569,11 +569,11 @@ class Debug {
}
void clear_suspended_generator() {
thread_local_.suspended_generator_ = Smi::FromInt(0);
thread_local_.suspended_generator_ = Smi::kZero;
}
bool has_suspended_generator() const {
return thread_local_.suspended_generator_ != Smi::FromInt(0);
return thread_local_.suspended_generator_ != Smi::kZero;
}
void OnException(Handle<Object> exception, Handle<Object> promise);

View File

@ -1426,7 +1426,7 @@ static const char* DropFrames(Vector<StackFrame*> frames, int top_frame_index,
for (Address a = unused_stack_top;
a < unused_stack_bottom;
a += kPointerSize) {
Memory::Object_at(a) = Smi::FromInt(0);
Memory::Object_at(a) = Smi::kZero;
}
return NULL;

View File

@ -1024,7 +1024,7 @@ void Deoptimizer::DoComputeJSFrame(TranslatedFrame* translated_frame,
// and will be materialized by {Runtime_NotifyDeoptimized}. For additional
// safety we use Smi(0) instead of the potential {arguments_marker} here.
if (is_topmost) {
intptr_t context_value = reinterpret_cast<intptr_t>(Smi::FromInt(0));
intptr_t context_value = reinterpret_cast<intptr_t>(Smi::kZero);
Register context_reg = JavaScriptFrame::context_register();
output_frame->SetRegister(context_reg.code(), context_value);
}
@ -1292,7 +1292,7 @@ void Deoptimizer::DoComputeInterpretedFrame(TranslatedFrame* translated_frame,
// and will be materialized by {Runtime_NotifyDeoptimized}. For additional
// safety we use Smi(0) instead of the potential {arguments_marker} here.
if (is_topmost) {
intptr_t context_value = reinterpret_cast<intptr_t>(Smi::FromInt(0));
intptr_t context_value = reinterpret_cast<intptr_t>(Smi::kZero);
Register context_reg = JavaScriptFrame::context_register();
output_frame->SetRegister(context_reg.code(), context_value);
}
@ -1659,7 +1659,7 @@ void Deoptimizer::DoComputeConstructStubFrame(TranslatedFrame* translated_frame,
// and will be materialized by {Runtime_NotifyDeoptimized}. For additional
// safety we use Smi(0) instead of the potential {arguments_marker} here.
if (is_topmost) {
intptr_t context_value = reinterpret_cast<intptr_t>(Smi::FromInt(0));
intptr_t context_value = reinterpret_cast<intptr_t>(Smi::kZero);
Register context_reg = JavaScriptFrame::context_register();
output_frame->SetRegister(context_reg.code(), context_value);
}
@ -1843,7 +1843,7 @@ void Deoptimizer::DoComputeAccessorStubFrame(TranslatedFrame* translated_frame,
// and will be materialized by {Runtime_NotifyDeoptimized}. For additional
// safety we use Smi(0) instead of the potential {arguments_marker} here.
if (is_topmost) {
intptr_t context_value = reinterpret_cast<intptr_t>(Smi::FromInt(0));
intptr_t context_value = reinterpret_cast<intptr_t>(Smi::kZero);
Register context_reg = JavaScriptFrame::context_register();
output_frame->SetRegister(context_reg.code(), context_value);
}

View File

@ -1970,7 +1970,7 @@ class FastElementsAccessor : public ElementsAccessorBase<Subclass, KindTraits> {
if (new_length == 0) {
receiver->set_elements(heap->empty_fixed_array());
receiver->set_length(Smi::FromInt(0));
receiver->set_length(Smi::kZero);
return isolate->factory()->NewJSArrayWithElements(
backing_store, KindTraits::Kind, delete_count);
}

View File

@ -97,7 +97,7 @@ Handle<PrototypeInfo> Factory::NewPrototypeInfo() {
Handle<PrototypeInfo>::cast(NewStruct(PROTOTYPE_INFO_TYPE));
result->set_prototype_users(WeakFixedArray::Empty());
result->set_registry_slot(PrototypeInfo::UNREGISTERED);
result->set_validity_cell(Smi::FromInt(0));
result->set_validity_cell(Smi::kZero);
result->set_bit_field(0);
return result;
}
@ -179,7 +179,7 @@ Handle<FrameArray> Factory::NewFrameArray(int number_of_frames,
DCHECK_LE(0, number_of_frames);
Handle<FixedArray> result =
NewFixedArrayWithHoles(FrameArray::LengthFor(number_of_frames));
result->set(FrameArray::kFrameCountIndex, Smi::FromInt(0));
result->set(FrameArray::kFrameCountIndex, Smi::kZero);
return Handle<FrameArray>::cast(result);
}
@ -779,7 +779,7 @@ Handle<Context> Factory::NewNativeContext() {
array->set_map_no_write_barrier(*native_context_map());
Handle<Context> context = Handle<Context>::cast(array);
context->set_native_context(*context);
context->set_errors_thrown(Smi::FromInt(0));
context->set_errors_thrown(Smi::kZero);
Handle<WeakCell> weak_cell = NewWeakCell(context);
context->set_self_weak_cell(*weak_cell);
DCHECK(context->IsNativeContext());
@ -970,7 +970,7 @@ Handle<Script> Factory::NewScript(Handle<String> source) {
script->set_line_ends(heap->undefined_value());
script->set_eval_from_shared(heap->undefined_value());
script->set_eval_from_position(0);
script->set_shared_function_infos(Smi::FromInt(0));
script->set_shared_function_infos(Smi::kZero);
script->set_flags(0);
heap->set_script_list(*WeakFixedArray::Add(script_list(), script));
@ -1504,7 +1504,7 @@ Handle<Code> Factory::NewCode(const CodeDesc& desc,
// The code object has not been fully initialized yet. We rely on the
// fact that no allocation will happen from this point on.
DisallowHeapAllocation no_gc;
code->set_gc_metadata(Smi::FromInt(0));
code->set_gc_metadata(Smi::kZero);
code->set_ic_age(isolate()->heap()->global_ic_age());
code->set_instruction_size(desc.instr_size);
code->set_relocation_info(*reloc_info);
@ -1514,7 +1514,7 @@ Handle<Code> Factory::NewCode(const CodeDesc& desc,
code->set_raw_kind_specific_flags2(0);
code->set_is_crankshafted(crankshafted);
code->set_deoptimization_data(*empty_fixed_array(), SKIP_WRITE_BARRIER);
code->set_raw_type_feedback_info(Smi::FromInt(0));
code->set_raw_type_feedback_info(Smi::kZero);
code->set_next_code_link(*undefined_value(), SKIP_WRITE_BARRIER);
code->set_handler_table(*empty_fixed_array(), SKIP_WRITE_BARRIER);
code->set_source_position_table(*empty_byte_array(), SKIP_WRITE_BARRIER);
@ -1706,7 +1706,7 @@ void Factory::NewJSArrayStorage(Handle<JSArray> array,
DCHECK(capacity >= length);
if (capacity == 0) {
array->set_length(Smi::FromInt(0));
array->set_length(Smi::kZero);
array->set_elements(*empty_fixed_array());
return;
}
@ -2012,7 +2012,7 @@ Handle<JSTypedArray> Factory::NewJSTypedArray(ElementsKind elements_kind,
CHECK(number_of_elements <= static_cast<size_t>(Smi::kMaxValue));
size_t byte_length = number_of_elements * element_size;
obj->set_byte_offset(Smi::FromInt(0));
obj->set_byte_offset(Smi::kZero);
i::Handle<i::Object> byte_length_object =
NewNumberFromSize(byte_length, pretenure);
obj->set_byte_length(*byte_length_object);
@ -2459,7 +2459,7 @@ void Factory::SetRegExpIrregexpData(Handle<JSRegExp> regexp,
store->set(JSRegExp::kIrregexpUC16CodeIndex, uninitialized);
store->set(JSRegExp::kIrregexpLatin1CodeSavedIndex, uninitialized);
store->set(JSRegExp::kIrregexpUC16CodeSavedIndex, uninitialized);
store->set(JSRegExp::kIrregexpMaxRegisterCountIndex, Smi::FromInt(0));
store->set(JSRegExp::kIrregexpMaxRegisterCountIndex, Smi::kZero);
store->set(JSRegExp::kIrregexpCaptureCountIndex,
Smi::FromInt(capture_count));
store->set(JSRegExp::kIrregexpCaptureNameMapIndex, uninitialized);

View File

@ -13,7 +13,7 @@ namespace internal {
// static
FieldType* FieldType::None() {
// Do not Smi::FromInt(0) here or for Any(), as that may translate
// Do not Smi::kZero here or for Any(), as that may translate
// as `nullptr` which is not a valid value for `this`.
return reinterpret_cast<FieldType*>(Smi::FromInt(2));
}

View File

@ -355,11 +355,7 @@ void FullCodeGenerator::Generate() {
masm()->CheckConstPool(true, false);
}
void FullCodeGenerator::ClearAccumulator() {
__ mov(r0, Operand(Smi::FromInt(0)));
}
void FullCodeGenerator::ClearAccumulator() { __ mov(r0, Operand(Smi::kZero)); }
void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
__ mov(r2, Operand(profiling_counter_));
@ -1055,7 +1051,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ bind(&use_cache);
__ EnumLength(r1, r0);
__ cmp(r1, Operand(Smi::FromInt(0)));
__ cmp(r1, Operand(Smi::kZero));
__ b(eq, &no_descriptors);
__ LoadInstanceDescriptors(r0, r2);
@ -1064,7 +1060,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
// Set up the four remaining stack slots.
__ push(r0); // Map.
__ mov(r0, Operand(Smi::FromInt(0)));
__ mov(r0, Operand(Smi::kZero));
// Push enumeration cache, enumeration cache length (as smi) and zero.
__ Push(r2, r1, r0);
__ jmp(&loop);
@ -1081,7 +1077,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ ldr(r1, FieldMemOperand(r0, FixedArray::kLengthOffset));
__ Push(r1); // Fixed array length (as smi).
PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS);
__ mov(r0, Operand(Smi::FromInt(0)));
__ mov(r0, Operand(Smi::kZero));
__ Push(r0); // Initial index.
// Generate code for doing the condition check.
@ -1927,7 +1923,7 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
__ mov(right, Operand(scratch1), LeaveCC, ne);
__ b(ne, &done);
__ add(scratch2, right, Operand(left), SetCC);
__ mov(right, Operand(Smi::FromInt(0)), LeaveCC, pl);
__ mov(right, Operand(Smi::kZero), LeaveCC, pl);
__ b(mi, &stub_call);
break;
}
@ -3075,7 +3071,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
} else {
// Reserve space for result of postfix operation.
if (expr->is_postfix() && !context()->IsEffect()) {
__ mov(ip, Operand(Smi::FromInt(0)));
__ mov(ip, Operand(Smi::kZero));
PushOperand(ip);
}
switch (assign_type) {

View File

@ -357,11 +357,7 @@ void FullCodeGenerator::Generate() {
masm()->CheckConstPool(true, false);
}
void FullCodeGenerator::ClearAccumulator() {
__ Mov(x0, Smi::FromInt(0));
}
void FullCodeGenerator::ClearAccumulator() { __ Mov(x0, Smi::kZero); }
void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
__ Mov(x2, Operand(profiling_counter_));

View File

@ -746,7 +746,7 @@ void FullCodeGenerator::VisitSuperCallReference(SuperCallReference* super) {
void FullCodeGenerator::EmitDebugBreakInOptimizedCode(CallRuntime* expr) {
context()->Plug(handle(Smi::FromInt(0), isolate()));
context()->Plug(handle(Smi::kZero, isolate()));
}

View File

@ -343,7 +343,7 @@ void FullCodeGenerator::Generate() {
void FullCodeGenerator::ClearAccumulator() {
__ Move(eax, Immediate(Smi::FromInt(0)));
__ Move(eax, Immediate(Smi::kZero));
}
@ -986,7 +986,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ bind(&use_cache);
__ EnumLength(edx, eax);
__ cmp(edx, Immediate(Smi::FromInt(0)));
__ cmp(edx, Immediate(Smi::kZero));
__ j(equal, &no_descriptors);
__ LoadInstanceDescriptors(eax, ecx);
@ -997,7 +997,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ push(eax); // Map.
__ push(ecx); // Enumeration cache.
__ push(edx); // Number of valid entries for the map in the enum cache.
__ push(Immediate(Smi::FromInt(0))); // Initial index.
__ push(Immediate(Smi::kZero)); // Initial index.
__ jmp(&loop);
__ bind(&no_descriptors);
@ -1012,7 +1012,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset));
__ push(eax); // Fixed array length (as smi).
PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS);
__ push(Immediate(Smi::FromInt(0))); // Initial index.
__ push(Immediate(Smi::kZero)); // Initial index.
// Generate code for doing the condition check.
__ bind(&loop);
@ -2971,7 +2971,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
} else {
// Reserve space for result of postfix operation.
if (expr->is_postfix() && !context()->IsEffect()) {
PushOperand(Smi::FromInt(0));
PushOperand(Smi::kZero);
}
switch (assign_type) {
case NAMED_PROPERTY: {

View File

@ -362,7 +362,7 @@ void FullCodeGenerator::Generate() {
void FullCodeGenerator::ClearAccumulator() {
DCHECK(Smi::FromInt(0) == 0);
DCHECK(Smi::kZero == 0);
__ mov(v0, zero_reg);
}
@ -1051,14 +1051,14 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ bind(&use_cache);
__ EnumLength(a1, v0);
__ Branch(&no_descriptors, eq, a1, Operand(Smi::FromInt(0)));
__ Branch(&no_descriptors, eq, a1, Operand(Smi::kZero));
__ LoadInstanceDescriptors(v0, a2);
__ lw(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheOffset));
__ lw(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheBridgeCacheOffset));
// Set up the four remaining stack slots.
__ li(a0, Operand(Smi::FromInt(0)));
__ li(a0, Operand(Smi::kZero));
// Push map, enumeration cache, enumeration cache length (as smi) and zero.
__ Push(v0, a2, a1, a0);
__ jmp(&loop);
@ -1075,7 +1075,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ lw(a1, FieldMemOperand(v0, FixedArray::kLengthOffset));
__ Push(a1); // Fixed array length (as smi).
PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS);
__ li(a0, Operand(Smi::FromInt(0)));
__ li(a0, Operand(Smi::kZero));
__ Push(a0); // Initial index.
// Generate code for doing the condition check.
@ -1930,7 +1930,7 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
__ Branch(&done, ne, v0, Operand(zero_reg));
__ Addu(scratch2, right, left);
__ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
DCHECK(Smi::FromInt(0) == 0);
DCHECK(Smi::kZero == 0);
__ mov(v0, zero_reg);
break;
}
@ -3082,7 +3082,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
} else {
// Reserve space for result of postfix operation.
if (expr->is_postfix() && !context()->IsEffect()) {
__ li(at, Operand(Smi::FromInt(0)));
__ li(at, Operand(Smi::kZero));
PushOperand(at);
}
switch (assign_type) {

View File

@ -361,7 +361,7 @@ void FullCodeGenerator::Generate() {
void FullCodeGenerator::ClearAccumulator() {
DCHECK(Smi::FromInt(0) == 0);
DCHECK(Smi::kZero == 0);
__ mov(v0, zero_reg);
}
@ -1051,14 +1051,14 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ bind(&use_cache);
__ EnumLength(a1, v0);
__ Branch(&no_descriptors, eq, a1, Operand(Smi::FromInt(0)));
__ Branch(&no_descriptors, eq, a1, Operand(Smi::kZero));
__ LoadInstanceDescriptors(v0, a2);
__ ld(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheOffset));
__ ld(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheBridgeCacheOffset));
// Set up the four remaining stack slots.
__ li(a0, Operand(Smi::FromInt(0)));
__ li(a0, Operand(Smi::kZero));
// Push map, enumeration cache, enumeration cache length (as smi) and zero.
__ Push(v0, a2, a1, a0);
__ jmp(&loop);
@ -1075,7 +1075,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ ld(a1, FieldMemOperand(v0, FixedArray::kLengthOffset));
__ Push(a1); // Fixed array length (as smi).
PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS);
__ li(a0, Operand(Smi::FromInt(0)));
__ li(a0, Operand(Smi::kZero));
__ Push(a0); // Initial index.
// Generate code for doing the condition check.
@ -1930,7 +1930,7 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
__ Branch(USE_DELAY_SLOT, &done, ne, v0, Operand(zero_reg));
__ Daddu(scratch2, right, left);
__ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
DCHECK(Smi::FromInt(0) == 0);
DCHECK(Smi::kZero == 0);
__ mov(v0, zero_reg);
break;
}
@ -3082,7 +3082,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
} else {
// Reserve space for result of postfix operation.
if (expr->is_postfix() && !context()->IsEffect()) {
__ li(at, Operand(Smi::FromInt(0)));
__ li(at, Operand(Smi::kZero));
PushOperand(at);
}
switch (assign_type) {

View File

@ -362,7 +362,7 @@ void FullCodeGenerator::Generate() {
void FullCodeGenerator::ClearAccumulator() {
__ LoadSmiLiteral(r3, Smi::FromInt(0));
__ LoadSmiLiteral(r3, Smi::kZero);
}
@ -1015,7 +1015,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ bind(&use_cache);
__ EnumLength(r4, r3);
__ CmpSmiLiteral(r4, Smi::FromInt(0), r0);
__ CmpSmiLiteral(r4, Smi::kZero, r0);
__ beq(&no_descriptors);
__ LoadInstanceDescriptors(r3, r5);
@ -1025,7 +1025,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
// Set up the four remaining stack slots.
__ push(r3); // Map.
__ LoadSmiLiteral(r3, Smi::FromInt(0));
__ LoadSmiLiteral(r3, Smi::kZero);
// Push enumeration cache, enumeration cache length (as smi) and zero.
__ Push(r5, r4, r3);
__ b(&loop);
@ -1042,7 +1042,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ LoadP(r4, FieldMemOperand(r3, FixedArray::kLengthOffset));
__ Push(r4); // Fixed array length (as smi).
PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS);
__ LoadSmiLiteral(r3, Smi::FromInt(0));
__ LoadSmiLiteral(r3, Smi::kZero);
__ Push(r3); // Initial index.
// Generate code for doing the condition check.
@ -1936,7 +1936,7 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
__ add(scratch2, right, left);
__ cmpi(scratch2, Operand::Zero());
__ blt(&stub_call);
__ LoadSmiLiteral(right, Smi::FromInt(0));
__ LoadSmiLiteral(right, Smi::kZero);
break;
}
case Token::BIT_OR:
@ -3074,7 +3074,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
} else {
// Reserve space for result of postfix operation.
if (expr->is_postfix() && !context()->IsEffect()) {
__ LoadSmiLiteral(ip, Smi::FromInt(0));
__ LoadSmiLiteral(ip, Smi::kZero);
PushOperand(ip);
}
switch (assign_type) {

View File

@ -363,7 +363,7 @@ void FullCodeGenerator::Generate() {
}
void FullCodeGenerator::ClearAccumulator() {
__ LoadSmiLiteral(r2, Smi::FromInt(0));
__ LoadSmiLiteral(r2, Smi::kZero);
}
void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
@ -984,7 +984,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ bind(&use_cache);
__ EnumLength(r3, r2);
__ CmpSmiLiteral(r3, Smi::FromInt(0), r0);
__ CmpSmiLiteral(r3, Smi::kZero, r0);
__ beq(&no_descriptors, Label::kNear);
__ LoadInstanceDescriptors(r2, r4);
@ -994,7 +994,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
// Set up the four remaining stack slots.
__ push(r2); // Map.
__ LoadSmiLiteral(r2, Smi::FromInt(0));
__ LoadSmiLiteral(r2, Smi::kZero);
// Push enumeration cache, enumeration cache length (as smi) and zero.
__ Push(r4, r3, r2);
__ b(&loop);
@ -1011,7 +1011,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ LoadP(r3, FieldMemOperand(r2, FixedArray::kLengthOffset));
__ Push(r3); // Fixed array length (as smi).
PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS);
__ LoadSmiLiteral(r2, Smi::FromInt(0));
__ LoadSmiLiteral(r2, Smi::kZero);
__ Push(r2); // Initial index.
// Generate code for doing the condition check.
@ -1896,7 +1896,7 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
__ AddP(scratch2, right, left);
__ CmpP(scratch2, Operand::Zero());
__ blt(&stub_call);
__ LoadSmiLiteral(right, Smi::FromInt(0));
__ LoadSmiLiteral(right, Smi::kZero);
break;
}
case Token::BIT_OR:
@ -2995,7 +2995,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
} else {
// Reserve space for result of postfix operation.
if (expr->is_postfix() && !context()->IsEffect()) {
__ LoadSmiLiteral(ip, Smi::FromInt(0));
__ LoadSmiLiteral(ip, Smi::kZero);
PushOperand(ip);
}
switch (assign_type) {

View File

@ -1010,7 +1010,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
Label no_descriptors;
__ EnumLength(rdx, rax);
__ Cmp(rdx, Smi::FromInt(0));
__ Cmp(rdx, Smi::kZero);
__ j(equal, &no_descriptors);
__ LoadInstanceDescriptors(rax, rcx);
@ -1021,7 +1021,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ Push(rax); // Map.
__ Push(rcx); // Enumeration cache.
__ Push(rdx); // Number of valid entries for the map in the enum cache.
__ Push(Smi::FromInt(0)); // Initial index.
__ Push(Smi::kZero); // Initial index.
__ jmp(&loop);
__ bind(&no_descriptors);
@ -1037,7 +1037,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ movp(rax, FieldOperand(rax, FixedArray::kLengthOffset));
__ Push(rax); // Fixed array length (as smi).
PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS);
__ Push(Smi::FromInt(0)); // Initial index.
__ Push(Smi::kZero); // Initial index.
// Generate code for doing the condition check.
__ bind(&loop);
@ -2962,7 +2962,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
} else {
// Reserve space for result of postfix operation.
if (expr->is_postfix() && !context()->IsEffect()) {
PushOperand(Smi::FromInt(0));
PushOperand(Smi::kZero);
}
switch (assign_type) {
case NAMED_PROPERTY: {

View File

@ -340,7 +340,7 @@ void FullCodeGenerator::Generate() {
void FullCodeGenerator::ClearAccumulator() {
__ Move(eax, Immediate(Smi::FromInt(0)));
__ Move(eax, Immediate(Smi::kZero));
}
@ -978,7 +978,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ bind(&use_cache);
__ EnumLength(edx, eax);
__ cmp(edx, Immediate(Smi::FromInt(0)));
__ cmp(edx, Immediate(Smi::kZero));
__ j(equal, &no_descriptors);
__ LoadInstanceDescriptors(eax, ecx);
@ -989,7 +989,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ push(eax); // Map.
__ push(ecx); // Enumeration cache.
__ push(edx); // Number of valid entries for the map in the enum cache.
__ push(Immediate(Smi::FromInt(0))); // Initial index.
__ push(Immediate(Smi::kZero)); // Initial index.
__ jmp(&loop);
__ bind(&no_descriptors);
@ -1004,7 +1004,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset));
__ push(eax); // Fixed array length (as smi).
PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS);
__ push(Immediate(Smi::FromInt(0))); // Initial index.
__ push(Immediate(Smi::kZero)); // Initial index.
// Generate code for doing the condition check.
__ bind(&loop);
@ -2963,7 +2963,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
} else {
// Reserve space for result of postfix operation.
if (expr->is_postfix() && !context()->IsEffect()) {
PushOperand(Smi::FromInt(0));
PushOperand(Smi::kZero);
}
switch (assign_type) {
case NAMED_PROPERTY: {

View File

@ -742,9 +742,7 @@ void Heap::ExternalStringTable::ShrinkNewStrings(int position) {
#endif
}
void Heap::ClearInstanceofCache() {
set_instanceof_cache_function(Smi::FromInt(0));
}
void Heap::ClearInstanceofCache() { set_instanceof_cache_function(Smi::kZero); }
Oddball* Heap::ToBoolean(bool condition) {
return condition ? true_value() : false_value();
@ -752,8 +750,8 @@ Oddball* Heap::ToBoolean(bool condition) {
void Heap::CompletelyClearInstanceofCache() {
set_instanceof_cache_map(Smi::FromInt(0));
set_instanceof_cache_function(Smi::FromInt(0));
set_instanceof_cache_map(Smi::kZero);
set_instanceof_cache_function(Smi::kZero);
}
@ -776,27 +774,27 @@ int Heap::NextScriptId() {
}
void Heap::SetArgumentsAdaptorDeoptPCOffset(int pc_offset) {
DCHECK(arguments_adaptor_deopt_pc_offset() == Smi::FromInt(0));
DCHECK(arguments_adaptor_deopt_pc_offset() == Smi::kZero);
set_arguments_adaptor_deopt_pc_offset(Smi::FromInt(pc_offset));
}
void Heap::SetConstructStubDeoptPCOffset(int pc_offset) {
DCHECK(construct_stub_deopt_pc_offset() == Smi::FromInt(0));
DCHECK(construct_stub_deopt_pc_offset() == Smi::kZero);
set_construct_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
}
void Heap::SetGetterStubDeoptPCOffset(int pc_offset) {
DCHECK(getter_stub_deopt_pc_offset() == Smi::FromInt(0));
DCHECK(getter_stub_deopt_pc_offset() == Smi::kZero);
set_getter_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
}
void Heap::SetSetterStubDeoptPCOffset(int pc_offset) {
DCHECK(setter_stub_deopt_pc_offset() == Smi::FromInt(0));
DCHECK(setter_stub_deopt_pc_offset() == Smi::kZero);
set_setter_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
}
void Heap::SetInterpreterEntryReturnPCOffset(int pc_offset) {
DCHECK(interpreter_entry_return_pc_offset() == Smi::FromInt(0));
DCHECK(interpreter_entry_return_pc_offset() == Smi::kZero);
set_interpreter_entry_return_pc_offset(Smi::FromInt(pc_offset));
}

View File

@ -170,10 +170,10 @@ Heap::Heap()
memset(roots_, 0, sizeof(roots_[0]) * kRootListLength);
set_native_contexts_list(NULL);
set_allocation_sites_list(Smi::FromInt(0));
set_encountered_weak_collections(Smi::FromInt(0));
set_encountered_weak_cells(Smi::FromInt(0));
set_encountered_transition_arrays(Smi::FromInt(0));
set_allocation_sites_list(Smi::kZero);
set_encountered_weak_collections(Smi::kZero);
set_encountered_weak_cells(Smi::kZero);
set_encountered_transition_arrays(Smi::kZero);
// Put a dummy entry in the remembered pages so we can find the list the
// minidump even if there are no real unmapped pages.
RememberUnmappedPage(NULL, false);
@ -742,7 +742,7 @@ void Heap::PreprocessStackTraces() {
}
// We must not compact the weak fixed list here, as we may be in the middle
// of writing to it, when the GC triggered. Instead, we reset the root value.
set_weak_stack_trace_list(Smi::FromInt(0));
set_weak_stack_trace_list(Smi::kZero);
}
@ -2061,7 +2061,7 @@ AllocationResult Heap::AllocatePartialMap(InstanceType instance_type,
Map::OwnsDescriptors::encode(true) |
Map::ConstructionCounter::encode(Map::kNoSlackTracking);
reinterpret_cast<Map*>(result)->set_bit_field3(bit_field3);
reinterpret_cast<Map*>(result)->set_weak_cell_cache(Smi::FromInt(0));
reinterpret_cast<Map*>(result)->set_weak_cell_cache(Smi::kZero);
return result;
}
@ -2085,8 +2085,8 @@ AllocationResult Heap::AllocateMap(InstanceType instance_type,
map->set_code_cache(empty_fixed_array(), SKIP_WRITE_BARRIER);
map->set_dependent_code(DependentCode::cast(empty_fixed_array()),
SKIP_WRITE_BARRIER);
map->set_weak_cell_cache(Smi::FromInt(0));
map->set_raw_transitions(Smi::FromInt(0));
map->set_weak_cell_cache(Smi::kZero);
map->set_raw_transitions(Smi::kZero);
map->set_unused_property_fields(0);
map->set_instance_descriptors(empty_descriptor_array());
if (FLAG_unbox_double_fields) {
@ -2158,7 +2158,7 @@ namespace {
void FinalizePartialMap(Heap* heap, Map* map) {
map->set_code_cache(heap->empty_fixed_array());
map->set_dependent_code(DependentCode::cast(heap->empty_fixed_array()));
map->set_raw_transitions(Smi::FromInt(0));
map->set_raw_transitions(Smi::kZero);
map->set_instance_descriptors(heap->empty_descriptor_array());
if (FLAG_unbox_double_fields) {
map->set_layout_descriptor(LayoutDescriptor::FastPointerLayout());
@ -2493,7 +2493,7 @@ AllocationResult Heap::AllocatePropertyCell() {
PropertyCell* cell = PropertyCell::cast(result);
cell->set_dependent_code(DependentCode::cast(empty_fixed_array()),
SKIP_WRITE_BARRIER);
cell->set_property_details(PropertyDetails(Smi::FromInt(0)));
cell->set_property_details(PropertyDetails(Smi::kZero));
cell->set_value(the_hole_value());
return result;
}
@ -2623,8 +2623,7 @@ void Heap::CreateInitialObjects() {
// Initialize the null_value.
Oddball::Initialize(isolate(), factory->null_value(), "null",
handle(Smi::FromInt(0), isolate()), "object",
Oddball::kNull);
handle(Smi::kZero, isolate()), "object", Oddball::kNull);
// Initialize the_hole_value.
Oddball::Initialize(isolate(), factory->the_hole_value(), "hole",
@ -2638,7 +2637,7 @@ void Heap::CreateInitialObjects() {
// Initialize the false_value.
Oddball::Initialize(isolate(), factory->false_value(), "false",
handle(Smi::FromInt(0), isolate()), "boolean",
handle(Smi::kZero, isolate()), "boolean",
Oddball::kFalse);
set_uninitialized_value(
@ -2684,9 +2683,9 @@ void Heap::CreateInitialObjects() {
// expanding the dictionary during bootstrapping.
set_code_stubs(*UnseededNumberDictionary::New(isolate(), 128));
set_instanceof_cache_function(Smi::FromInt(0));
set_instanceof_cache_map(Smi::FromInt(0));
set_instanceof_cache_answer(Smi::FromInt(0));
set_instanceof_cache_function(Smi::kZero);
set_instanceof_cache_map(Smi::kZero);
set_instanceof_cache_answer(Smi::kZero);
{
HandleScope scope(isolate());
@ -2755,7 +2754,7 @@ void Heap::CreateInitialObjects() {
set_undefined_cell(*factory->NewCell(factory->undefined_value()));
// The symbol registry is initialized lazily.
set_symbol_registry(Smi::FromInt(0));
set_symbol_registry(Smi::kZero);
// Microtask queue uses the empty fixed array as a sentinel for "empty".
// Number of queued microtasks stored in Isolate::pending_microtask_count().
@ -2803,7 +2802,7 @@ void Heap::CreateInitialObjects() {
empty_type_feedback_vector->set(TypeFeedbackVector::kMetadataIndex,
empty_fixed_array());
empty_type_feedback_vector->set(TypeFeedbackVector::kInvocationCountIndex,
Smi::FromInt(0));
Smi::kZero);
set_empty_type_feedback_vector(*empty_type_feedback_vector);
// We use a canonical empty LiteralsArray for all functions that neither
@ -2839,7 +2838,7 @@ void Heap::CreateInitialObjects() {
ArrayList::cast(*(factory->NewFixedArray(16, TENURED))));
weak_new_space_object_to_code_list()->SetLength(0);
set_script_list(Smi::FromInt(0));
set_script_list(Smi::kZero);
Handle<SeededNumberDictionary> slow_element_dictionary =
SeededNumberDictionary::New(isolate(), 0, TENURED);
@ -2850,7 +2849,7 @@ void Heap::CreateInitialObjects() {
// Handling of script id generation is in Heap::NextScriptId().
set_last_script_id(Smi::FromInt(v8::UnboundScript::kNoScriptId));
set_next_template_serial_number(Smi::FromInt(0));
set_next_template_serial_number(Smi::kZero);
// Allocate the empty script.
Handle<Script> script = factory->NewScript(factory->empty_string());
@ -2883,9 +2882,9 @@ void Heap::CreateInitialObjects() {
set_serialized_templates(empty_fixed_array());
set_weak_stack_trace_list(Smi::FromInt(0));
set_weak_stack_trace_list(Smi::kZero);
set_noscript_shared_function_infos(Smi::FromInt(0));
set_noscript_shared_function_infos(Smi::kZero);
// Initialize keyed lookup cache.
isolate_->keyed_lookup_cache()->Clear();
@ -3287,7 +3286,7 @@ AllocationResult Heap::AllocateFixedTypedArrayWithExternalPointer(
result->set_map_no_write_barrier(MapForFixedTypedArray(array_type));
FixedTypedArrayBase* elements = FixedTypedArrayBase::cast(result);
elements->set_base_pointer(Smi::FromInt(0), SKIP_WRITE_BARRIER);
elements->set_base_pointer(Smi::kZero, SKIP_WRITE_BARRIER);
elements->set_external_pointer(external_pointer, SKIP_WRITE_BARRIER);
elements->set_length(length);
return elements;
@ -3371,7 +3370,7 @@ AllocationResult Heap::AllocateCode(int object_size, bool immovable) {
DCHECK(!memory_allocator()->code_range()->valid() ||
memory_allocator()->code_range()->contains(code->address()) ||
object_size <= code_space()->AreaSize());
code->set_gc_metadata(Smi::FromInt(0));
code->set_gc_metadata(Smi::kZero);
code->set_ic_age(global_ic_age_);
return code;
}
@ -3468,7 +3467,7 @@ void Heap::InitializeJSObjectFromMap(JSObject* obj, FixedArray* properties,
// TODO(1240798): Initialize the object's body using valid initial values
// according to the object's initial map. For example, if the map's
// instance type is JS_ARRAY_TYPE, the length field should be initialized
// to a number (e.g. Smi::FromInt(0)) and the elements initialized to a
// to a number (e.g. Smi::kZero) and the elements initialized to a
// fixed array (e.g. Heap::empty_fixed_array()). Currently, the object
// verification code has to cope with (temporarily) invalid objects. See
// for example, JSArray::JSArrayVerify).
@ -5548,8 +5547,8 @@ void Heap::SetStackLimits() {
}
void Heap::ClearStackLimits() {
roots_[kStackLimitRootIndex] = Smi::FromInt(0);
roots_[kRealStackLimitRootIndex] = Smi::FromInt(0);
roots_[kStackLimitRootIndex] = Smi::kZero;
roots_[kRealStackLimitRootIndex] = Smi::kZero;
}
void Heap::PrintAlloctionsHash() {

View File

@ -628,9 +628,9 @@ void IncrementalMarking::ProcessWeakCells() {
Object* the_hole_value = heap()->the_hole_value();
Object* weak_cell_obj = heap()->encountered_weak_cells();
Object* weak_cell_head = Smi::FromInt(0);
Object* weak_cell_head = Smi::kZero;
WeakCell* prev_weak_cell_obj = NULL;
while (weak_cell_obj != Smi::FromInt(0)) {
while (weak_cell_obj != Smi::kZero) {
WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj);
// We do not insert cleared weak cells into the list, so the value
// cannot be a Smi here.
@ -648,7 +648,7 @@ void IncrementalMarking::ProcessWeakCells() {
weak_cell_obj = weak_cell->next();
weak_cell->clear_next(the_hole_value);
} else {
if (weak_cell_head == Smi::FromInt(0)) {
if (weak_cell_head == Smi::kZero) {
weak_cell_head = weak_cell;
}
prev_weak_cell_obj = weak_cell;

View File

@ -2480,7 +2480,7 @@ void MarkCompactCollector::ClearSimpleMapTransitions(
Object* non_live_map_list) {
Object* the_hole_value = heap()->the_hole_value();
Object* weak_cell_obj = non_live_map_list;
while (weak_cell_obj != Smi::FromInt(0)) {
while (weak_cell_obj != Smi::kZero) {
WeakCell* weak_cell = WeakCell::cast(weak_cell_obj);
Map* map = Map::cast(weak_cell->value());
DCHECK(Marking::IsWhite(ObjectMarking::MarkBitFrom(map)));
@ -2504,7 +2504,7 @@ void MarkCompactCollector::ClearSimpleMapTransition(Map* map,
// A previously existing simple transition (stored in a WeakCell) is going
// to be cleared. Clear the useless cell pointer, and take ownership
// of the descriptor array.
map->set_raw_transitions(Smi::FromInt(0));
map->set_raw_transitions(Smi::kZero);
int number_of_own_descriptors = map->NumberOfOwnDescriptors();
DescriptorArray* descriptors = map->instance_descriptors();
if (descriptors == dead_transition->instance_descriptors() &&
@ -2519,7 +2519,7 @@ void MarkCompactCollector::ClearSimpleMapTransition(Map* map,
void MarkCompactCollector::ClearFullMapTransitions() {
HeapObject* undefined = heap()->undefined_value();
Object* obj = heap()->encountered_transition_arrays();
while (obj != Smi::FromInt(0)) {
while (obj != Smi::kZero) {
TransitionArray* array = TransitionArray::cast(obj);
int num_transitions = array->number_of_entries();
DCHECK_EQ(TransitionArray::NumberOfTransitions(array), num_transitions);
@ -2539,7 +2539,7 @@ void MarkCompactCollector::ClearFullMapTransitions() {
obj = array->next_link();
array->set_next_link(undefined, SKIP_WRITE_BARRIER);
}
heap()->set_encountered_transition_arrays(Smi::FromInt(0));
heap()->set_encountered_transition_arrays(Smi::kZero);
}
@ -2643,7 +2643,7 @@ void MarkCompactCollector::TrimEnumCache(Map* map,
void MarkCompactCollector::ProcessWeakCollections() {
Object* weak_collection_obj = heap()->encountered_weak_collections();
while (weak_collection_obj != Smi::FromInt(0)) {
while (weak_collection_obj != Smi::kZero) {
JSWeakCollection* weak_collection =
reinterpret_cast<JSWeakCollection*>(weak_collection_obj);
DCHECK(MarkCompactCollector::IsMarked(weak_collection));
@ -2669,7 +2669,7 @@ void MarkCompactCollector::ProcessWeakCollections() {
void MarkCompactCollector::ClearWeakCollections() {
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_WEAK_COLLECTIONS);
Object* weak_collection_obj = heap()->encountered_weak_collections();
while (weak_collection_obj != Smi::FromInt(0)) {
while (weak_collection_obj != Smi::kZero) {
JSWeakCollection* weak_collection =
reinterpret_cast<JSWeakCollection*>(weak_collection_obj);
DCHECK(MarkCompactCollector::IsMarked(weak_collection));
@ -2685,19 +2685,19 @@ void MarkCompactCollector::ClearWeakCollections() {
weak_collection_obj = weak_collection->next();
weak_collection->set_next(heap()->undefined_value());
}
heap()->set_encountered_weak_collections(Smi::FromInt(0));
heap()->set_encountered_weak_collections(Smi::kZero);
}
void MarkCompactCollector::AbortWeakCollections() {
Object* weak_collection_obj = heap()->encountered_weak_collections();
while (weak_collection_obj != Smi::FromInt(0)) {
while (weak_collection_obj != Smi::kZero) {
JSWeakCollection* weak_collection =
reinterpret_cast<JSWeakCollection*>(weak_collection_obj);
weak_collection_obj = weak_collection->next();
weak_collection->set_next(heap()->undefined_value());
}
heap()->set_encountered_weak_collections(Smi::FromInt(0));
heap()->set_encountered_weak_collections(Smi::kZero);
}
@ -2709,8 +2709,8 @@ void MarkCompactCollector::ClearWeakCells(Object** non_live_map_list,
Object* the_hole_value = heap->the_hole_value();
DependentCode* dependent_code_head =
DependentCode::cast(heap->empty_fixed_array());
Object* non_live_map_head = Smi::FromInt(0);
while (weak_cell_obj != Smi::FromInt(0)) {
Object* non_live_map_head = Smi::kZero;
while (weak_cell_obj != Smi::kZero) {
WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj);
Object* next_weak_cell = weak_cell->next();
bool clear_value = true;
@ -2770,7 +2770,7 @@ void MarkCompactCollector::ClearWeakCells(Object** non_live_map_list,
}
weak_cell_obj = next_weak_cell;
}
heap->set_encountered_weak_cells(Smi::FromInt(0));
heap->set_encountered_weak_cells(Smi::kZero);
*non_live_map_list = non_live_map_head;
*dependent_code_list = dependent_code_head;
}
@ -2779,24 +2779,24 @@ void MarkCompactCollector::ClearWeakCells(Object** non_live_map_list,
void MarkCompactCollector::AbortWeakCells() {
Object* the_hole_value = heap()->the_hole_value();
Object* weak_cell_obj = heap()->encountered_weak_cells();
while (weak_cell_obj != Smi::FromInt(0)) {
while (weak_cell_obj != Smi::kZero) {
WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj);
weak_cell_obj = weak_cell->next();
weak_cell->clear_next(the_hole_value);
}
heap()->set_encountered_weak_cells(Smi::FromInt(0));
heap()->set_encountered_weak_cells(Smi::kZero);
}
void MarkCompactCollector::AbortTransitionArrays() {
HeapObject* undefined = heap()->undefined_value();
Object* obj = heap()->encountered_transition_arrays();
while (obj != Smi::FromInt(0)) {
while (obj != Smi::kZero) {
TransitionArray* array = TransitionArray::cast(obj);
obj = array->next_link();
array->set_next_link(undefined, SKIP_WRITE_BARRIER);
}
heap()->set_encountered_transition_arrays(Smi::FromInt(0));
heap()->set_encountered_transition_arrays(Smi::kZero);
}
void MarkCompactCollector::RecordRelocSlot(Code* host, RelocInfo* rinfo,

View File

@ -1277,7 +1277,7 @@ Object* PagedSpace::FindObject(Address addr) {
// Note: this function can only be called on iterable spaces.
DCHECK(!heap()->mark_compact_collector()->in_use());
if (!Contains(addr)) return Smi::FromInt(0); // Signaling not found.
if (!Contains(addr)) return Smi::kZero; // Signaling not found.
Page* p = Page::FromAddress(addr);
HeapObjectIterator it(p);
@ -1288,7 +1288,7 @@ Object* PagedSpace::FindObject(Address addr) {
}
UNREACHABLE();
return Smi::FromInt(0);
return Smi::kZero;
}
void PagedSpace::ShrinkImmortalImmovablePages() {
@ -2995,7 +2995,7 @@ AllocationResult LargeObjectSpace::AllocateRaw(int object_size,
// We only need to do this in debug builds or if verify_heap is on.
reinterpret_cast<Object**>(object->address())[0] =
heap()->fixed_array_map();
reinterpret_cast<Object**>(object->address())[1] = Smi::FromInt(0);
reinterpret_cast<Object**>(object->address())[1] = Smi::kZero;
}
heap()->StartIncrementalMarkingIfAllocationLimitIsReached(Heap::kNoGCFlags,
@ -3024,7 +3024,7 @@ Object* LargeObjectSpace::FindObject(Address a) {
if (page != NULL) {
return page->GetObject();
}
return Smi::FromInt(0); // Signaling not found.
return Smi::kZero; // Signaling not found.
}

View File

@ -1062,7 +1062,7 @@ void CompareICStub::GenerateGeneric(MacroAssembler* masm) {
// If either is a Smi (we know that not both are), then they can only
// be equal if the other is a HeapNumber. If so, use the slow case.
STATIC_ASSERT(kSmiTag == 0);
DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
DCHECK_EQ(static_cast<Smi*>(0), Smi::kZero);
__ mov(ecx, Immediate(kSmiTagMask));
__ and_(ecx, eax);
__ test(ecx, edx);
@ -3487,7 +3487,7 @@ static void HandlePolymorphicKeyedStoreCase(MacroAssembler* masm,
// - esp[12] -- value
// - receiver, key, handler in registers.
Register counter = key;
__ mov(counter, Immediate(Smi::FromInt(0)));
__ mov(counter, Immediate(Smi::kZero));
__ bind(&next_loop);
__ mov(cached_map, FieldOperand(feedback, counter, times_half_pointer_size,
FixedArray::kHeaderSize));
@ -4210,8 +4210,7 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
__ mov(ecx, isolate()->factory()->empty_fixed_array());
__ mov(FieldOperand(eax, JSArray::kPropertiesOffset), ecx);
__ mov(FieldOperand(eax, JSArray::kElementsOffset), ecx);
__ mov(FieldOperand(eax, JSArray::kLengthOffset),
Immediate(Smi::FromInt(0)));
__ mov(FieldOperand(eax, JSArray::kLengthOffset), Immediate(Smi::kZero));
STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
__ Ret();
@ -4252,7 +4251,7 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
__ mov(FieldOperand(edx, FixedArray::kLengthOffset), eax);
{
Label loop, done_loop;
__ Move(ecx, Smi::FromInt(0));
__ Move(ecx, Smi::kZero);
__ bind(&loop);
__ cmp(ecx, eax);
__ j(equal, &done_loop, Label::kNear);
@ -4641,7 +4640,7 @@ void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
__ mov(FieldOperand(edx, FixedArray::kLengthOffset), eax);
{
Label loop, done_loop;
__ Move(ecx, Smi::FromInt(0));
__ Move(ecx, Smi::kZero);
__ bind(&loop);
__ cmp(ecx, eax);
__ j(equal, &done_loop, Label::kNear);
@ -5160,7 +5159,7 @@ void CallApiGetterStub::Generate(MacroAssembler* masm) {
__ PushRoot(Heap::kUndefinedValueRootIndex);
__ push(Immediate(ExternalReference::isolate_address(isolate())));
__ push(holder);
__ push(Immediate(Smi::FromInt(0))); // should_throw_on_error -> false
__ push(Immediate(Smi::kZero)); // should_throw_on_error -> false
__ push(FieldOperand(callback, AccessorInfo::kNameOffset));
__ push(scratch); // Restore return address.

View File

@ -3090,7 +3090,7 @@ void MacroAssembler::EmitSeqStringSetCharCheck(Register string,
cmp(index, FieldOperand(string, String::kLengthOffset));
Check(less, kIndexIsTooLarge);
cmp(index, Immediate(Smi::FromInt(0)));
cmp(index, Immediate(Smi::kZero));
Check(greater_equal, kIndexIsNegative);
// Restore the index
@ -3343,7 +3343,7 @@ void MacroAssembler::CheckEnumCache(Label* call_runtime) {
// For all objects but the receiver, check that the cache is empty.
EnumLength(edx, ebx);
cmp(edx, Immediate(Smi::FromInt(0)));
cmp(edx, Immediate(Smi::kZero));
j(not_equal, call_runtime);
bind(&start);

View File

@ -181,7 +181,7 @@ static void GenerateFastArrayLoad(MacroAssembler* masm, Register receiver,
// Out of bounds. Check the prototype chain to see if we can just return
// 'undefined'.
__ Cmp(key, Operand(Smi::FromInt(0)));
__ Cmp(key, Operand(Smi::kZero));
__ B(lt, slow); // Negative keys can't take the fast OOB path.
__ Bind(&check_prototypes);
__ Ldr(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset));

View File

@ -198,7 +198,7 @@ static void GenerateFastArrayLoad(MacroAssembler* masm, Register receiver,
// Out-of-bounds. Check the prototype chain to see if we can just return
// 'undefined'.
__ SmiCompare(key, Smi::FromInt(0));
__ SmiCompare(key, Smi::kZero);
__ j(less, slow); // Negative keys can't take the fast OOB path.
__ bind(&check_prototypes);
__ movp(scratch, FieldOperand(receiver, HeapObject::kMapOffset));

View File

@ -1206,7 +1206,7 @@ void BytecodeGenerator::VisitForInStatement(ForInStatement* stmt) {
// Set up loop counter
Register index = register_allocator()->NewRegister();
builder()->LoadLiteral(Smi::FromInt(0));
builder()->LoadLiteral(Smi::kZero);
builder()->StoreAccumulatorInRegister(index);
// The loop

View File

@ -2400,7 +2400,7 @@ void Interpreter::DoForInPrepare(InterpreterAssembler* assembler) {
Node* object_reg = __ BytecodeOperandReg(0);
Node* receiver = __ LoadRegister(object_reg);
Node* context = __ GetContext();
Node* const zero_smi = __ SmiConstant(Smi::FromInt(0));
Node* const zero_smi = __ SmiConstant(Smi::kZero);
Label nothing_to_iterate(assembler, Label::kDeferred),
use_enum_cache(assembler), use_runtime(assembler, Label::kDeferred);

View File

@ -3208,7 +3208,7 @@ void Isolate::AddDetachedContext(Handle<Context> context) {
Handle<FixedArray> detached_contexts = factory()->detached_contexts();
int length = detached_contexts->length();
detached_contexts = factory()->CopyFixedArrayAndGrow(detached_contexts, 2);
detached_contexts->set(length, Smi::FromInt(0));
detached_contexts->set(length, Smi::kZero);
detached_contexts->set(length + 1, *cell);
heap()->set_detached_contexts(*detached_contexts);
}

View File

@ -780,7 +780,7 @@ Maybe<bool> KeyAccumulator::CollectOwnJSProxyKeys(Handle<JSReceiver> receiver,
target_keys->get(i));
nonconfigurable_keys_length++;
// The key was moved, null it out in the original list.
target_keys->set(i, Smi::FromInt(0));
target_keys->set(i, Smi::kZero);
} else {
// 14c. Else,
// 14c i. Append key as an element of targetConfigurableKeys.

View File

@ -18,7 +18,7 @@ LayoutDescriptor* LayoutDescriptor::FromSmi(Smi* smi) {
Handle<LayoutDescriptor> LayoutDescriptor::New(Isolate* isolate, int length) {
if (length <= kSmiValueSize) {
// The whole bit vector fits into a smi.
return handle(LayoutDescriptor::FromSmi(Smi::FromInt(0)), isolate);
return handle(LayoutDescriptor::FromSmi(Smi::kZero), isolate);
}
length = GetSlowModeBackingStoreLength(length);
return Handle<LayoutDescriptor>::cast(isolate->factory()->NewFixedTypedArray(
@ -37,7 +37,7 @@ bool LayoutDescriptor::InobjectUnboxedField(int inobject_properties,
LayoutDescriptor* LayoutDescriptor::FastPointerLayout() {
return LayoutDescriptor::FromSmi(Smi::FromInt(0));
return LayoutDescriptor::FromSmi(Smi::kZero);
}

View File

@ -566,7 +566,7 @@ void CompareICStub::GenerateGeneric(MacroAssembler* masm) {
// If either is a Smi (we know that not both are), then they can only
// be strictly equal if the other is a HeapNumber.
STATIC_ASSERT(kSmiTag == 0);
DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
DCHECK_EQ(static_cast<Smi*>(0), Smi::kZero);
__ And(t2, lhs, Operand(rhs));
__ JumpIfNotSmi(t2, &not_smis, t0);
// One operand is a smi. EmitSmiNonsmiComparison generates code that can:
@ -4260,7 +4260,7 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
__ LoadRoot(a1, Heap::kEmptyFixedArrayRootIndex);
__ sw(a1, FieldMemOperand(v0, JSArray::kPropertiesOffset));
__ sw(a1, FieldMemOperand(v0, JSArray::kElementsOffset));
__ Move(a1, Smi::FromInt(0));
__ Move(a1, Smi::kZero);
__ Ret(USE_DELAY_SLOT);
__ sw(a1, FieldMemOperand(v0, JSArray::kLengthOffset)); // In delay slot
STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
@ -4421,7 +4421,7 @@ void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) {
FixedArray::kHeaderSize + 2 * kPointerSize;
// If there are no mapped parameters, we do not need the parameter_map.
Label param_map_size;
DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
DCHECK_EQ(static_cast<Smi*>(0), Smi::kZero);
__ Branch(USE_DELAY_SLOT, &param_map_size, eq, t2, Operand(zero_reg));
__ mov(t5, zero_reg); // In delay slot: param map size = 0 when t2 == 0.
__ sll(t5, t2, 1);
@ -4486,13 +4486,13 @@ void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) {
// Initialize parameter map. If there are no mapped arguments, we're done.
Label skip_parameter_map;
Label skip3;
__ Branch(&skip3, ne, t2, Operand(Smi::FromInt(0)));
__ Branch(&skip3, ne, t2, Operand(Smi::kZero));
// Move backing store address to a1, because it is
// expected there when filling in the unmapped arguments.
__ mov(a1, t0);
__ bind(&skip3);
__ Branch(&skip_parameter_map, eq, t2, Operand(Smi::FromInt(0)));
__ Branch(&skip_parameter_map, eq, t2, Operand(Smi::kZero));
__ LoadRoot(t1, Heap::kSloppyArgumentsElementsMapRootIndex);
__ sw(t1, FieldMemOperand(t0, FixedArray::kMapOffset));
@ -4537,7 +4537,7 @@ void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) {
__ sw(t3, MemOperand(t6));
__ Addu(t5, t5, Operand(Smi::FromInt(1)));
__ bind(&parameters_test);
__ Branch(&parameters_loop, ne, t1, Operand(Smi::FromInt(0)));
__ Branch(&parameters_loop, ne, t1, Operand(Smi::kZero));
// t1 = argument count (tagged).
__ lw(t1, FieldMemOperand(v0, JSSloppyArgumentsObject::kLengthOffset));
@ -5074,7 +5074,7 @@ void CallApiGetterStub::Generate(MacroAssembler* masm) {
__ sw(scratch, MemOperand(sp, (PCA::kIsolateIndex + 1) * kPointerSize));
__ sw(holder, MemOperand(sp, (PCA::kHolderIndex + 1) * kPointerSize));
// should_throw_on_error -> false
DCHECK(Smi::FromInt(0) == nullptr);
DCHECK(Smi::kZero == nullptr);
__ sw(zero_reg,
MemOperand(sp, (PCA::kShouldThrowOnErrorIndex + 1) * kPointerSize));
__ lw(scratch, FieldMemOperand(callback, AccessorInfo::kNameOffset));

View File

@ -6473,7 +6473,7 @@ void MacroAssembler::EmitSeqStringSetCharCheck(Register string,
lw(at, FieldMemOperand(string, String::kLengthOffset));
Check(lt, kIndexIsTooLarge, index, Operand(at));
DCHECK(Smi::FromInt(0) == 0);
DCHECK(Smi::kZero == 0);
Check(ge, kIndexIsNegative, index, Operand(zero_reg));
SmiUntag(index, index);
@ -6733,7 +6733,7 @@ void MacroAssembler::CheckEnumCache(Label* call_runtime) {
// For all objects but the receiver, check that the cache is empty.
EnumLength(a3, a1);
Branch(call_runtime, ne, a3, Operand(Smi::FromInt(0)));
Branch(call_runtime, ne, a3, Operand(Smi::kZero));
bind(&start);

View File

@ -563,7 +563,7 @@ void CompareICStub::GenerateGeneric(MacroAssembler* masm) {
// If either is a Smi (we know that not both are), then they can only
// be strictly equal if the other is a HeapNumber.
STATIC_ASSERT(kSmiTag == 0);
DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
DCHECK_EQ(static_cast<Smi*>(0), Smi::kZero);
__ And(a6, lhs, Operand(rhs));
__ JumpIfNotSmi(a6, &not_smis, a4);
// One operand is a smi. EmitSmiNonsmiComparison generates code that can:
@ -4267,7 +4267,7 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
__ LoadRoot(a1, Heap::kEmptyFixedArrayRootIndex);
__ sd(a1, FieldMemOperand(v0, JSArray::kPropertiesOffset));
__ sd(a1, FieldMemOperand(v0, JSArray::kElementsOffset));
__ Move(a1, Smi::FromInt(0));
__ Move(a1, Smi::kZero);
__ Ret(USE_DELAY_SLOT);
__ sd(a1, FieldMemOperand(v0, JSArray::kLengthOffset)); // In delay slot
STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
@ -4434,7 +4434,7 @@ void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) {
FixedArray::kHeaderSize + 2 * kPointerSize;
// If there are no mapped parameters, we do not need the parameter_map.
Label param_map_size;
DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
DCHECK_EQ(static_cast<Smi*>(0), Smi::kZero);
__ Branch(USE_DELAY_SLOT, &param_map_size, eq, a6, Operand(zero_reg));
__ mov(t1, zero_reg); // In delay slot: param map size = 0 when a6 == 0.
__ SmiScale(t1, a6, kPointerSizeLog2);
@ -4500,13 +4500,13 @@ void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) {
// Initialize parameter map. If there are no mapped arguments, we're done.
Label skip_parameter_map;
Label skip3;
__ Branch(&skip3, ne, a6, Operand(Smi::FromInt(0)));
__ Branch(&skip3, ne, a6, Operand(Smi::kZero));
// Move backing store address to a1, because it is
// expected there when filling in the unmapped arguments.
__ mov(a1, a4);
__ bind(&skip3);
__ Branch(&skip_parameter_map, eq, a6, Operand(Smi::FromInt(0)));
__ Branch(&skip_parameter_map, eq, a6, Operand(Smi::kZero));
__ LoadRoot(a5, Heap::kSloppyArgumentsElementsMapRootIndex);
__ sd(a5, FieldMemOperand(a4, FixedArray::kMapOffset));
@ -4553,7 +4553,7 @@ void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) {
__ sd(a7, MemOperand(t2));
__ Daddu(t1, t1, Operand(Smi::FromInt(1)));
__ bind(&parameters_test);
__ Branch(&parameters_loop, ne, a5, Operand(Smi::FromInt(0)));
__ Branch(&parameters_loop, ne, a5, Operand(Smi::kZero));
// Restore t1 = argument count (tagged).
__ ld(a5, FieldMemOperand(v0, JSSloppyArgumentsObject::kLengthOffset));
@ -5100,7 +5100,7 @@ void CallApiGetterStub::Generate(MacroAssembler* masm) {
__ sd(scratch, MemOperand(sp, (PCA::kIsolateIndex + 1) * kPointerSize));
__ sd(holder, MemOperand(sp, (PCA::kHolderIndex + 1) * kPointerSize));
// should_throw_on_error -> false
DCHECK(Smi::FromInt(0) == nullptr);
DCHECK(Smi::kZero == nullptr);
__ sd(zero_reg,
MemOperand(sp, (PCA::kShouldThrowOnErrorIndex + 1) * kPointerSize));
__ ld(scratch, FieldMemOperand(callback, AccessorInfo::kNameOffset));

View File

@ -6897,7 +6897,7 @@ void MacroAssembler::EmitSeqStringSetCharCheck(Register string,
ld(at, FieldMemOperand(string, String::kLengthOffset));
Check(lt, kIndexIsTooLarge, index, Operand(at));
DCHECK(Smi::FromInt(0) == 0);
DCHECK(Smi::kZero == 0);
Check(ge, kIndexIsNegative, index, Operand(zero_reg));
}
@ -7162,7 +7162,7 @@ void MacroAssembler::CheckEnumCache(Label* call_runtime) {
// For all objects but the receiver, check that the cache is empty.
EnumLength(a3, a1);
Branch(call_runtime, ne, a3, Operand(Smi::FromInt(0)));
Branch(call_runtime, ne, a3, Operand(Smi::kZero));
bind(&start);

View File

@ -677,7 +677,7 @@ void Code::CodeVerify() {
last_gc_pc = it.rinfo()->pc();
}
}
CHECK(raw_type_feedback_info() == Smi::FromInt(0) ||
CHECK(raw_type_feedback_info() == Smi::kZero ||
raw_type_feedback_info()->IsSmi() == IsCodeStubOrIC());
}
@ -872,7 +872,7 @@ void JSArrayBufferView::JSArrayBufferViewVerify() {
VerifyPointer(buffer());
Isolate* isolate = GetIsolate();
CHECK(buffer()->IsJSArrayBuffer() || buffer()->IsUndefined(isolate) ||
buffer() == Smi::FromInt(0));
buffer() == Smi::kZero);
VerifyPointer(raw_byte_offset());
CHECK(raw_byte_offset()->IsSmi() || raw_byte_offset()->IsHeapNumber() ||

View File

@ -305,7 +305,7 @@ Handle<Object> Object::NewStorageFor(Isolate* isolate,
Handle<Object> object,
Representation representation) {
if (representation.IsSmi() && object->IsUninitialized(isolate)) {
return handle(Smi::FromInt(0), isolate);
return handle(Smi::kZero, isolate);
}
if (!representation.IsDouble()) return object;
double value;
@ -1589,9 +1589,9 @@ FixedArrayBase* JSObject::elements() const {
void AllocationSite::Initialize() {
set_transition_info(Smi::FromInt(0));
set_transition_info(Smi::kZero);
SetElementsKind(GetInitialFastElementsKind());
set_nested_site(Smi::FromInt(0));
set_nested_site(Smi::kZero);
set_pretenure_data(0);
set_pretenure_create_count(0);
set_dependent_code(DependentCode::cast(GetHeap()->empty_fixed_array()),
@ -2019,7 +2019,7 @@ void WeakCell::clear() {
// initializing the root empty weak cell.
DCHECK(GetHeap()->gc_state() == Heap::MARK_COMPACT ||
this == GetHeap()->empty_weak_cell());
WRITE_FIELD(this, kValueOffset, Smi::FromInt(0));
WRITE_FIELD(this, kValueOffset, Smi::kZero);
}
@ -2034,9 +2034,7 @@ void WeakCell::initialize(HeapObject* val) {
CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kValueOffset, val, mode);
}
bool WeakCell::cleared() const { return value() == Smi::FromInt(0); }
bool WeakCell::cleared() const { return value() == Smi::kZero; }
Object* WeakCell::next() const { return READ_FIELD(this, kNextOffset); }
@ -2469,7 +2467,7 @@ bool WeakFixedArray::IsEmptySlot(int index) const {
void WeakFixedArray::Clear(int index) {
FixedArray::cast(this)->set(index + kFirstIndex, Smi::FromInt(0));
FixedArray::cast(this)->set(index + kFirstIndex, Smi::kZero);
}
@ -4227,7 +4225,7 @@ int FixedTypedArrayBase::ElementSize(InstanceType type) {
int FixedTypedArrayBase::DataSize(InstanceType type) {
if (base_pointer() == Smi::FromInt(0)) return 0;
if (base_pointer() == Smi::kZero) return 0;
return length() * ElementSize(type);
}
@ -6917,7 +6915,7 @@ void JSArrayBuffer::set_is_shared(bool value) {
Object* JSArrayBufferView::byte_offset() const {
if (WasNeutered()) return Smi::FromInt(0);
if (WasNeutered()) return Smi::kZero;
return Object::cast(READ_FIELD(this, kByteOffsetOffset));
}
@ -6929,7 +6927,7 @@ void JSArrayBufferView::set_byte_offset(Object* value, WriteBarrierMode mode) {
Object* JSArrayBufferView::byte_length() const {
if (WasNeutered()) return Smi::FromInt(0);
if (WasNeutered()) return Smi::kZero;
return Object::cast(READ_FIELD(this, kByteLengthOffset));
}
@ -6953,7 +6951,7 @@ bool JSArrayBufferView::WasNeutered() const {
Object* JSTypedArray::length() const {
if (WasNeutered()) return Smi::FromInt(0);
if (WasNeutered()) return Smi::kZero;
return Object::cast(READ_FIELD(this, kLengthOffset));
}
@ -7705,7 +7703,7 @@ template<typename Derived, typename Shape, typename Key>
void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
Handle<Object> key,
Handle<Object> value) {
this->SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
this->SetEntry(entry, key, value, PropertyDetails(Smi::kZero));
}
@ -8097,9 +8095,9 @@ void TypeFeedbackInfo::change_ic_generic_count(int delta) {
void TypeFeedbackInfo::initialize_storage() {
WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(0));
WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(0));
WRITE_FIELD(this, kStorage3Offset, Smi::FromInt(0));
WRITE_FIELD(this, kStorage1Offset, Smi::kZero);
WRITE_FIELD(this, kStorage2Offset, Smi::kZero);
WRITE_FIELD(this, kStorage3Offset, Smi::kZero);
}

View File

@ -1199,7 +1199,7 @@ bool FunctionTemplateInfo::IsTemplateFor(Map* map) {
Handle<TemplateList> TemplateList::New(Isolate* isolate, int size) {
Handle<FixedArray> list =
isolate->factory()->NewFixedArray(kLengthIndex + size);
list->set(kLengthIndex, Smi::FromInt(0));
list->set(kLengthIndex, Smi::kZero);
return Handle<TemplateList>::cast(list);
}
@ -2152,6 +2152,8 @@ std::ostream& operator<<(std::ostream& os, const Brief& v) {
return os;
}
// Declaration of the static Smi::kZero constant.
Smi* const Smi::kZero(nullptr);
void Smi::SmiPrint(std::ostream& os) const { // NOLINT
os << value();
@ -3175,7 +3177,7 @@ void JSObject::UpdatePrototypeUserRegistration(Handle<Map> old_map,
DCHECK(new_map->is_prototype_map());
bool was_registered = JSObject::UnregisterPrototypeUser(old_map, isolate);
new_map->set_prototype_info(old_map->prototype_info());
old_map->set_prototype_info(Smi::FromInt(0));
old_map->set_prototype_info(Smi::kZero);
if (FLAG_trace_prototype_users) {
PrintF("Moving prototype_info %p from map %p to map %p.\n",
reinterpret_cast<void*>(new_map->prototype_info()),
@ -3331,7 +3333,7 @@ void MigrateFastToFast(Handle<JSObject> object, Handle<Map> new_map) {
value = handle(object->RawFastPropertyAt(index), isolate);
if (!old_representation.IsDouble() && representation.IsDouble()) {
if (old_representation.IsNone()) {
value = handle(Smi::FromInt(0), isolate);
value = handle(Smi::kZero, isolate);
}
value = Object::NewStorageFor(isolate, value, representation);
} else if (old_representation.IsDouble() &&
@ -3514,7 +3516,7 @@ void MigrateFastToSlow(Handle<JSObject> object, Handle<Map> new_map,
int inobject_properties = new_map->GetInObjectProperties();
for (int i = 0; i < inobject_properties; i++) {
FieldIndex index = FieldIndex::ForPropertyIndex(*new_map, i);
object->RawFastPropertyAtPut(index, Smi::FromInt(0));
object->RawFastPropertyAtPut(index, Smi::kZero);
}
isolate->counters()->props_to_dictionary()->Increment();
@ -8900,7 +8902,7 @@ Handle<Map> Map::Normalize(Handle<Map> fast_map, PropertyNormalizationMode mode,
// For prototype maps, the PrototypeInfo is not copied.
DCHECK(memcmp(fresh->address(), new_map->address(),
kTransitionsOrPrototypeInfoOffset) == 0);
DCHECK(fresh->raw_transitions() == Smi::FromInt(0));
DCHECK(fresh->raw_transitions() == Smi::kZero);
STATIC_ASSERT(kDescriptorsOffset ==
kTransitionsOrPrototypeInfoOffset + kPointerSize);
DCHECK(memcmp(HeapObject::RawField(*fresh, kDescriptorsOffset),
@ -10257,7 +10259,7 @@ Handle<WeakFixedArray> WeakFixedArray::Allocate(
}
}
while (index < result->length()) {
result->set(index, Smi::FromInt(0));
result->set(index, Smi::kZero);
index++;
}
return Handle<WeakFixedArray>::cast(result);
@ -10380,15 +10382,11 @@ Handle<DescriptorArray> DescriptorArray::Allocate(Isolate* isolate,
factory->NewFixedArray(LengthFor(size), pretenure);
result->set(kDescriptorLengthIndex, Smi::FromInt(number_of_descriptors));
result->set(kEnumCacheIndex, Smi::FromInt(0));
result->set(kEnumCacheIndex, Smi::kZero);
return Handle<DescriptorArray>::cast(result);
}
void DescriptorArray::ClearEnumCache() {
set(kEnumCacheIndex, Smi::FromInt(0));
}
void DescriptorArray::ClearEnumCache() { set(kEnumCacheIndex, Smi::kZero); }
void DescriptorArray::Replace(int index, Descriptor* descriptor) {
descriptor->SetSortedKeyIndex(GetSortedKeyIndex(index));
@ -10411,9 +10409,9 @@ void DescriptorArray::SetEnumCache(Handle<DescriptorArray> descriptors,
bridge_storage = FixedArray::cast(descriptors->get(kEnumCacheIndex));
}
bridge_storage->set(kEnumCacheBridgeCacheIndex, *new_cache);
bridge_storage->set(kEnumCacheBridgeIndicesCacheIndex,
new_index_cache.is_null() ? Object::cast(Smi::FromInt(0))
: *new_index_cache);
bridge_storage->set(
kEnumCacheBridgeIndicesCacheIndex,
new_index_cache.is_null() ? Object::cast(Smi::kZero) : *new_index_cache);
if (needs_new_enum_cache) {
descriptors->set(kEnumCacheIndex, bridge_storage);
}
@ -10715,7 +10713,7 @@ Handle<Object> String::ToNumber(Handle<String> subject) {
// Fast case: short integer or some sorts of junk values.
if (subject->IsSeqOneByteString()) {
int len = subject->length();
if (len == 0) return handle(Smi::FromInt(0), isolate);
if (len == 0) return handle(Smi::kZero, isolate);
DisallowHeapAllocation no_gc;
uint8_t const* data = Handle<SeqOneByteString>::cast(subject)->GetChars();
@ -16485,14 +16483,13 @@ MaybeHandle<JSRegExp> JSRegExp::Initialize(Handle<JSRegExp> regexp,
if (constructor->IsJSFunction() &&
JSFunction::cast(constructor)->initial_map() == map) {
// If we still have the original map, set in-object properties directly.
regexp->InObjectPropertyAtPut(JSRegExp::kLastIndexFieldIndex,
Smi::FromInt(0), SKIP_WRITE_BARRIER);
regexp->InObjectPropertyAtPut(JSRegExp::kLastIndexFieldIndex, Smi::kZero,
SKIP_WRITE_BARRIER);
} else {
// Map has changed, so use generic, but slower, method.
RETURN_ON_EXCEPTION(
isolate,
JSReceiver::SetProperty(regexp, factory->last_index_string(),
Handle<Smi>(Smi::FromInt(0), isolate), STRICT),
RETURN_ON_EXCEPTION(isolate, JSReceiver::SetProperty(
regexp, factory->last_index_string(),
Handle<Smi>(Smi::kZero, isolate), STRICT),
JSRegExp);
}
@ -17209,7 +17206,7 @@ Handle<Object> JSObject::PrepareElementsForSort(Handle<JSObject> object,
limit = elements_length;
}
if (limit == 0) {
return handle(Smi::FromInt(0), isolate);
return handle(Smi::kZero, isolate);
}
uint32_t result = 0;
@ -18632,7 +18629,7 @@ Smi* OrderedHashTableIterator<Derived, TableType>::Next(JSArray* value_array) {
MoveNext();
return Smi::cast(kind());
}
return Smi::FromInt(0);
return Smi::kZero;
}
@ -19238,7 +19235,7 @@ void JSArrayBuffer::Neuter() {
CHECK(is_neuterable());
CHECK(is_external());
set_backing_store(NULL);
set_byte_length(Smi::FromInt(0));
set_byte_length(Smi::kZero);
set_was_neutered(true);
}
@ -19249,7 +19246,7 @@ void JSArrayBuffer::Setup(Handle<JSArrayBuffer> array_buffer, Isolate* isolate,
DCHECK(array_buffer->GetInternalFieldCount() ==
v8::ArrayBuffer::kInternalFieldCount);
for (int i = 0; i < v8::ArrayBuffer::kInternalFieldCount; i++) {
array_buffer->SetInternalField(i, Smi::FromInt(0));
array_buffer->SetInternalField(i, Smi::kZero);
}
array_buffer->set_bit_field(0);
array_buffer->set_is_external(is_external);

View File

@ -1510,6 +1510,7 @@ class Smi: public Object {
void SmiPrint(std::ostream& os) const; // NOLINT
DECLARE_VERIFIER(Smi)
V8_EXPORT_PRIVATE static Smi* const kZero;
static const int kMinValue =
(static_cast<unsigned int>(-1)) << (kSmiValueSize - 1);
static const int kMaxValue = -(kMinValue + 1);
@ -2872,7 +2873,7 @@ class WeakFixedArray : public FixedArray {
inline int Length() const;
inline bool IsEmptySlot(int index) const;
static Object* Empty() { return Smi::FromInt(0); }
static Object* Empty() { return Smi::kZero; }
class Iterator {
public:
@ -11211,7 +11212,7 @@ class DebugInfo: public Struct {
// Get the number of break points for this function.
int GetBreakPointCount();
static Smi* uninitialized() { return Smi::FromInt(0); }
static Smi* uninitialized() { return Smi::kZero; }
inline bool HasDebugBytecodeArray();
inline bool HasDebugCode();

View File

@ -561,7 +561,7 @@ void CompareICStub::GenerateGeneric(MacroAssembler* masm) {
// If either is a Smi (we know that not both are), then they can only
// be strictly equal if the other is a HeapNumber.
STATIC_ASSERT(kSmiTag == 0);
DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
DCHECK_EQ(static_cast<Smi*>(0), Smi::kZero);
__ and_(r5, lhs, rhs);
__ JumpIfNotSmi(r5, &not_smis);
// One operand is a smi. EmitSmiNonsmiComparison generates code that can:
@ -4385,7 +4385,7 @@ void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) {
const int kParameterMapHeaderSize =
FixedArray::kHeaderSize + 2 * kPointerSize;
// If there are no mapped parameters, we do not need the parameter_map.
__ CmpSmiLiteral(r9, Smi::FromInt(0), r0);
__ CmpSmiLiteral(r9, Smi::kZero, r0);
if (CpuFeatures::IsSupported(ISELECT)) {
__ SmiToPtrArrayOffset(r11, r9);
__ addi(r11, r11, Operand(kParameterMapHeaderSize));
@ -4467,7 +4467,7 @@ void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) {
// r9 = mapped parameter count (tagged)
// Initialize parameter map. If there are no mapped arguments, we're done.
Label skip_parameter_map;
__ CmpSmiLiteral(r9, Smi::FromInt(0), r0);
__ CmpSmiLiteral(r9, Smi::kZero, r0);
if (CpuFeatures::IsSupported(ISELECT)) {
__ isel(eq, r4, r7, r4);
__ beq(&skip_parameter_map);
@ -5113,7 +5113,7 @@ void CallApiGetterStub::Generate(MacroAssembler* masm) {
__ Push(scratch, scratch);
__ mov(scratch, Operand(ExternalReference::isolate_address(isolate())));
__ Push(scratch, holder);
__ Push(Smi::FromInt(0)); // should_throw_on_error -> false
__ Push(Smi::kZero); // should_throw_on_error -> false
__ LoadP(scratch, FieldMemOperand(callback, AccessorInfo::kNameOffset));
__ push(scratch);

View File

@ -3451,7 +3451,7 @@ void MacroAssembler::EmitSeqStringSetCharCheck(Register string, Register index,
cmp(index, ip);
Check(lt, kIndexIsTooLarge);
DCHECK(Smi::FromInt(0) == 0);
DCHECK(Smi::kZero == 0);
cmpi(index, Operand::Zero());
Check(ge, kIndexIsNegative);
@ -3828,7 +3828,7 @@ void MacroAssembler::CheckEnumCache(Label* call_runtime) {
// For all objects but the receiver, check that the cache is empty.
EnumLength(r6, r4);
CmpSmiLiteral(r6, Smi::FromInt(0), r0);
CmpSmiLiteral(r6, Smi::kZero, r0);
bne(call_runtime);
bind(&start);

View File

@ -32,7 +32,7 @@ class Descriptor BASE_EMBEDDED {
PropertyDetails details_;
protected:
Descriptor() : details_(Smi::FromInt(0)) {}
Descriptor() : details_(Smi::kZero) {}
void Init(Handle<Name> key, Handle<Object> value, PropertyDetails details) {
DCHECK(key->IsUniqueName());

View File

@ -417,7 +417,7 @@ void RegExpImpl::SetIrregexpMaxRegisterCount(FixedArray* re, int value) {
void RegExpImpl::SetIrregexpCaptureNameMap(FixedArray* re,
Handle<FixedArray> value) {
if (value.is_null()) {
re->set(JSRegExp::kIrregexpCaptureNameMapIndex, Smi::FromInt(0));
re->set(JSRegExp::kIrregexpCaptureNameMapIndex, Smi::kZero);
} else {
re->set(JSRegExp::kIrregexpCaptureNameMapIndex, *value);
}
@ -6781,10 +6781,10 @@ Object* RegExpResultsCache::Lookup(Heap* heap, String* key_string,
FixedArray** last_match_cache,
ResultsCacheType type) {
FixedArray* cache;
if (!key_string->IsInternalizedString()) return Smi::FromInt(0);
if (!key_string->IsInternalizedString()) return Smi::kZero;
if (type == STRING_SPLIT_SUBSTRINGS) {
DCHECK(key_pattern->IsString());
if (!key_pattern->IsInternalizedString()) return Smi::FromInt(0);
if (!key_pattern->IsInternalizedString()) return Smi::kZero;
cache = heap->string_split_cache();
} else {
DCHECK(type == REGEXP_MULTIPLE_INDICES);
@ -6801,7 +6801,7 @@ Object* RegExpResultsCache::Lookup(Heap* heap, String* key_string,
((index + kArrayEntriesPerCacheEntry) & (kRegExpResultsCacheSize - 1));
if (cache->get(index + kStringOffset) != key_string ||
cache->get(index + kPatternOffset) != key_pattern) {
return Smi::FromInt(0);
return Smi::kZero;
}
}
@ -6831,7 +6831,7 @@ void RegExpResultsCache::Enter(Isolate* isolate, Handle<String> key_string,
uint32_t hash = key_string->Hash();
uint32_t index = ((hash & (kRegExpResultsCacheSize - 1)) &
~(kArrayEntriesPerCacheEntry - 1));
if (cache->get(index + kStringOffset) == Smi::FromInt(0)) {
if (cache->get(index + kStringOffset) == Smi::kZero) {
cache->set(index + kStringOffset, *key_string);
cache->set(index + kPatternOffset, *key_pattern);
cache->set(index + kArrayOffset, *value_array);
@ -6839,16 +6839,16 @@ void RegExpResultsCache::Enter(Isolate* isolate, Handle<String> key_string,
} else {
uint32_t index2 =
((index + kArrayEntriesPerCacheEntry) & (kRegExpResultsCacheSize - 1));
if (cache->get(index2 + kStringOffset) == Smi::FromInt(0)) {
if (cache->get(index2 + kStringOffset) == Smi::kZero) {
cache->set(index2 + kStringOffset, *key_string);
cache->set(index2 + kPatternOffset, *key_pattern);
cache->set(index2 + kArrayOffset, *value_array);
cache->set(index2 + kLastMatchOffset, *last_match_cache);
} else {
cache->set(index2 + kStringOffset, Smi::FromInt(0));
cache->set(index2 + kPatternOffset, Smi::FromInt(0));
cache->set(index2 + kArrayOffset, Smi::FromInt(0));
cache->set(index2 + kLastMatchOffset, Smi::FromInt(0));
cache->set(index2 + kStringOffset, Smi::kZero);
cache->set(index2 + kPatternOffset, Smi::kZero);
cache->set(index2 + kArrayOffset, Smi::kZero);
cache->set(index2 + kLastMatchOffset, Smi::kZero);
cache->set(index + kStringOffset, *key_string);
cache->set(index + kPatternOffset, *key_pattern);
cache->set(index + kArrayOffset, *value_array);
@ -6871,7 +6871,7 @@ void RegExpResultsCache::Enter(Isolate* isolate, Handle<String> key_string,
void RegExpResultsCache::Clear(FixedArray* cache) {
for (int i = 0; i < kRegExpResultsCacheSize; i++) {
cache->set(i, Smi::FromInt(0));
cache->set(i, Smi::kZero);
}
}

View File

@ -28,7 +28,7 @@ RUNTIME_FUNCTION(Runtime_FinishArrayPrototypeSetup) {
// This is necessary to enable fast checks for absence of elements
// on Array.prototype and below.
prototype->set_elements(isolate->heap()->empty_fixed_array());
return Smi::FromInt(0);
return Smi::kZero;
}
static void InstallCode(Isolate* isolate, Handle<JSObject> holder,
@ -140,7 +140,7 @@ RUNTIME_FUNCTION(Runtime_MoveArrayContents) {
to->set_length(from->length());
JSObject::ResetElements(from);
from->set_length(Smi::FromInt(0));
from->set_length(Smi::kZero);
JSObject::ValidateElements(to);
return *to;
@ -376,7 +376,7 @@ RUNTIME_FUNCTION(Runtime_GrowArrayElements) {
if (index >= capacity) {
if (!object->GetElementsAccessor()->GrowCapacity(object, index)) {
return Smi::FromInt(0);
return Smi::kZero;
}
}

View File

@ -95,7 +95,7 @@ RUNTIME_FUNCTION(Runtime_SetIteratorInitialize) {
kind == JSSetIterator::kKindEntries);
Handle<OrderedHashSet> table(OrderedHashSet::cast(set->table()));
holder->set_table(*table);
holder->set_index(Smi::FromInt(0));
holder->set_index(Smi::kZero);
holder->set_kind(Smi::FromInt(kind));
return isolate->heap()->undefined_value();
}
@ -191,7 +191,7 @@ RUNTIME_FUNCTION(Runtime_MapIteratorInitialize) {
kind == JSMapIterator::kKindEntries);
Handle<OrderedHashMap> table(OrderedHashMap::cast(map->table()));
holder->set_table(*table);
holder->set_index(Smi::FromInt(0));
holder->set_index(Smi::kZero);
holder->set_kind(Smi::FromInt(kind));
return isolate->heap()->undefined_value();
}

View File

@ -123,7 +123,7 @@ RUNTIME_FUNCTION(Runtime_InstantiateAsmJs) {
function->shared()->ReplaceCode(
isolate->builtins()->builtin(Builtins::kCompileLazy));
}
return Smi::FromInt(0);
return Smi::kZero;
}
RUNTIME_FUNCTION(Runtime_NotifyStubFailure) {

View File

@ -47,7 +47,7 @@ RUNTIME_FUNCTION(Runtime_DebugBreakOnBytecode) {
isolate->debug()->Break(it.frame());
// If live-edit has dropped frames, we are not going back to dispatch.
if (LiveEdit::SetAfterBreakTarget(isolate->debug())) return Smi::FromInt(0);
if (LiveEdit::SetAfterBreakTarget(isolate->debug())) return Smi::kZero;
// Return the handler from the original bytecode array.
DCHECK(it.frame()->is_interpreted());
@ -457,7 +457,7 @@ RUNTIME_FUNCTION(Runtime_GetFrameCount) {
StackFrame::Id id = isolate->debug()->break_frame_id();
if (id == StackFrame::NO_ID) {
// If there is no JavaScript stack frame count is 0.
return Smi::FromInt(0);
return Smi::kZero;
}
for (StackTraceFrameIterator it(isolate, id); !it.done(); it.Advance()) {
@ -563,10 +563,10 @@ RUNTIME_FUNCTION(Runtime_GetFrameDetails) {
details->set(kFrameDetailsScriptIndex, *script_wrapper);
// Add the arguments count.
details->set(kFrameDetailsArgumentCountIndex, Smi::FromInt(0));
details->set(kFrameDetailsArgumentCountIndex, Smi::kZero);
// Add the locals count
details->set(kFrameDetailsLocalCountIndex, Smi::FromInt(0));
details->set(kFrameDetailsLocalCountIndex, Smi::kZero);
// Add the source position.
if (position != kNoSourcePosition) {
@ -929,7 +929,7 @@ RUNTIME_FUNCTION(Runtime_GetGeneratorScopeCount) {
HandleScope scope(isolate);
DCHECK_EQ(1, args.length());
if (!args[0]->IsJSGeneratorObject()) return Smi::FromInt(0);
if (!args[0]->IsJSGeneratorObject()) return Smi::kZero;
// Check arguments.
CONVERT_ARG_HANDLE_CHECKED(JSGeneratorObject, gen, 0);
@ -1601,7 +1601,7 @@ RUNTIME_FUNCTION(Runtime_ScriptLineStartPosition) {
if (line < 0 || line > line_count) {
return Smi::FromInt(-1);
} else if (line == 0) {
return Smi::FromInt(0);
return Smi::kZero;
} else {
DCHECK(0 < line && line <= line_count);
const int pos = Smi::cast(line_ends_array->get(line - 1))->value() + 1;

View File

@ -1283,7 +1283,7 @@ RUNTIME_FUNCTION(Runtime_DateCacheVersion) {
if (!isolate->eternal_handles()->Exists(EternalHandles::DATE_CACHE_VERSION)) {
Handle<FixedArray> date_cache_version =
isolate->factory()->NewFixedArray(1, TENURED);
date_cache_version->set(0, Smi::FromInt(0));
date_cache_version->set(0, Smi::kZero);
isolate->eternal_handles()->CreateSingleton(
isolate, *date_cache_version, EternalHandles::DATE_CACHE_VERSION);
}

View File

@ -86,7 +86,7 @@ MUST_USE_RESULT static MaybeHandle<Object> CreateObjectLiteralBoilerplate(
if (key->ToArrayIndex(&element_index)) {
// Array index (uint32).
if (value->IsUninitialized(isolate)) {
value = handle(Smi::FromInt(0), isolate);
value = handle(Smi::kZero, isolate);
}
maybe_result = JSObject::SetOwnElementIgnoreAttributes(
boilerplate, element_index, value, NONE);

View File

@ -530,7 +530,7 @@ RUNTIME_FUNCTION(Runtime_GetInterceptorInfo) {
HandleScope scope(isolate);
DCHECK(args.length() == 1);
if (!args[0]->IsJSObject()) {
return Smi::FromInt(0);
return Smi::kZero;
}
CONVERT_ARG_HANDLE_CHECKED(JSObject, obj, 0);
@ -604,14 +604,14 @@ RUNTIME_FUNCTION(Runtime_TryMigrateInstance) {
HandleScope scope(isolate);
DCHECK(args.length() == 1);
CONVERT_ARG_HANDLE_CHECKED(Object, object, 0);
if (!object->IsJSObject()) return Smi::FromInt(0);
if (!object->IsJSObject()) return Smi::kZero;
Handle<JSObject> js_object = Handle<JSObject>::cast(object);
if (!js_object->map()->is_deprecated()) return Smi::FromInt(0);
if (!js_object->map()->is_deprecated()) return Smi::kZero;
// This call must not cause lazy deopts, because it's called from deferred
// code where we can't handle lazy deopts for lack of a suitable bailout
// ID. So we just try migration and signal failure if necessary,
// which will also trigger a deopt.
if (!JSObject::TryMigrateInstance(js_object)) return Smi::FromInt(0);
if (!JSObject::TryMigrateInstance(js_object)) return Smi::kZero;
return *object;
}

View File

@ -706,7 +706,7 @@ RUNTIME_FUNCTION(Runtime_StringSplit) {
&last_match_cache_unused,
RegExpResultsCache::STRING_SPLIT_SUBSTRINGS),
isolate);
if (*cached_answer != Smi::FromInt(0)) {
if (*cached_answer != Smi::kZero) {
// The cache FixedArray is a COW-array and can therefore be reused.
Handle<JSArray> result = isolate->factory()->NewJSArrayWithElements(
Handle<FixedArray>::cast(cached_answer));

View File

@ -256,7 +256,7 @@ RUNTIME_FUNCTION(Runtime_StringCompare) {
break;
}
UNREACHABLE();
return Smi::FromInt(0);
return Smi::kZero;
}
@ -573,13 +573,13 @@ static int CopyCachedOneByteCharsToArray(Heap* heap, const uint8_t* chars,
elements->set(i, value, mode);
}
if (i < length) {
DCHECK(Smi::FromInt(0) == 0);
DCHECK(Smi::kZero == 0);
memset(elements->data_start() + i, 0, kPointerSize * (length - i));
}
#ifdef DEBUG
for (int j = 0; j < length; ++j) {
Object* element = elements->get(j);
DCHECK(element == Smi::FromInt(0) ||
DCHECK(element == Smi::kZero ||
(element->IsString() && String::cast(element)->LooksValid()));
}
#endif
@ -942,7 +942,7 @@ RUNTIME_FUNCTION(Runtime_StringLessThan) {
break;
}
UNREACHABLE();
return Smi::FromInt(0);
return Smi::kZero;
}
RUNTIME_FUNCTION(Runtime_StringLessThanOrEqual) {
@ -960,7 +960,7 @@ RUNTIME_FUNCTION(Runtime_StringLessThanOrEqual) {
break;
}
UNREACHABLE();
return Smi::FromInt(0);
return Smi::kZero;
}
RUNTIME_FUNCTION(Runtime_StringGreaterThan) {
@ -978,7 +978,7 @@ RUNTIME_FUNCTION(Runtime_StringGreaterThan) {
break;
}
UNREACHABLE();
return Smi::FromInt(0);
return Smi::kZero;
}
RUNTIME_FUNCTION(Runtime_StringGreaterThanOrEqual) {
@ -996,7 +996,7 @@ RUNTIME_FUNCTION(Runtime_StringGreaterThanOrEqual) {
break;
}
UNREACHABLE();
return Smi::FromInt(0);
return Smi::kZero;
}
RUNTIME_FUNCTION(Runtime_StringEqual) {

View File

@ -59,7 +59,7 @@ RUNTIME_FUNCTION(Runtime_ArrayBufferNeuter) {
DCHECK(args.length() == 1);
CONVERT_ARG_HANDLE_CHECKED(JSArrayBuffer, array_buffer, 0);
if (array_buffer->backing_store() == NULL) {
CHECK(Smi::FromInt(0) == array_buffer->byte_length());
CHECK(Smi::kZero == array_buffer->byte_length());
return isolate->heap()->undefined_value();
}
// Shared array buffers should never be neutered.
@ -142,7 +142,7 @@ RUNTIME_FUNCTION(Runtime_TypedArrayInitialize) {
DCHECK_EQ(v8::ArrayBufferView::kInternalFieldCount,
holder->GetInternalFieldCount());
for (int i = 0; i < v8::ArrayBufferView::kInternalFieldCount; i++) {
holder->SetInternalField(i, Smi::FromInt(0));
holder->SetInternalField(i, Smi::kZero);
}
Handle<Object> length_obj = isolate->factory()->NewNumberFromSize(length);
holder->set_length(*length_obj);
@ -215,7 +215,7 @@ RUNTIME_FUNCTION(Runtime_TypedArrayInitializeFromArrayLike) {
DCHECK_EQ(v8::ArrayBufferView::kInternalFieldCount,
holder->GetInternalFieldCount());
for (int i = 0; i < v8::ArrayBufferView::kInternalFieldCount; i++) {
holder->SetInternalField(i, Smi::FromInt(0));
holder->SetInternalField(i, Smi::kZero);
}
// NOTE: not initializing backing store.
@ -241,7 +241,7 @@ RUNTIME_FUNCTION(Runtime_TypedArrayInitializeFromArrayLike) {
}
holder->set_buffer(*buffer);
holder->set_byte_offset(Smi::FromInt(0));
holder->set_byte_offset(Smi::kZero);
Handle<Object> byte_length_obj(
isolate->factory()->NewNumberFromSize(byte_length));
holder->set_byte_length(*byte_length_obj);

View File

@ -553,7 +553,7 @@ void CompareICStub::GenerateGeneric(MacroAssembler* masm) {
// If either is a Smi (we know that not both are), then they can only
// be strictly equal if the other is a HeapNumber.
STATIC_ASSERT(kSmiTag == 0);
DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
DCHECK_EQ(static_cast<Smi*>(0), Smi::kZero);
__ AndP(r4, lhs, rhs);
__ JumpIfNotSmi(r4, &not_smis);
// One operand is a smi. EmitSmiNonsmiComparison generates code that can:
@ -4315,7 +4315,7 @@ void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) {
const int kParameterMapHeaderSize =
FixedArray::kHeaderSize + 2 * kPointerSize;
// If there are no mapped parameters, we do not need the parameter_map.
__ CmpSmiLiteral(r8, Smi::FromInt(0), r0);
__ CmpSmiLiteral(r8, Smi::kZero, r0);
Label skip2, skip3;
__ bne(&skip2);
__ LoadImmP(r1, Operand::Zero());
@ -4385,7 +4385,7 @@ void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) {
// r8 = mapped parameter count (tagged)
// Initialize parameter map. If there are no mapped arguments, we're done.
Label skip_parameter_map;
__ CmpSmiLiteral(r8, Smi::FromInt(0), r0);
__ CmpSmiLiteral(r8, Smi::kZero, r0);
Label skip6;
__ bne(&skip6);
// Move backing store address to r3, because it is
@ -5022,7 +5022,7 @@ void CallApiGetterStub::Generate(MacroAssembler* masm) {
__ Push(scratch, scratch);
__ mov(scratch, Operand(ExternalReference::isolate_address(isolate())));
__ Push(scratch, holder);
__ Push(Smi::FromInt(0)); // should_throw_on_error -> false
__ Push(Smi::kZero); // should_throw_on_error -> false
__ LoadP(scratch, FieldMemOperand(callback, AccessorInfo::kNameOffset));
__ push(scratch);

View File

@ -3171,7 +3171,7 @@ void MacroAssembler::EmitSeqStringSetCharCheck(Register string, Register index,
CmpP(index, ip);
Check(lt, kIndexIsTooLarge);
DCHECK(Smi::FromInt(0) == 0);
DCHECK(Smi::kZero == 0);
CmpP(index, Operand::Zero());
Check(ge, kIndexIsNegative);
@ -3496,7 +3496,7 @@ void MacroAssembler::CheckEnumCache(Label* call_runtime) {
// For all objects but the receiver, check that the cache is empty.
EnumLength(r5, r3);
CmpSmiLiteral(r5, Smi::FromInt(0), r0);
CmpSmiLiteral(r5, Smi::kZero, r0);
bne(call_runtime);
bind(&start);

View File

@ -99,7 +99,7 @@ void Deserializer::Deserialize(Isolate* isolate) {
isolate_->heap()->undefined_value());
// The allocation site list is build during root iteration, but if no sites
// were encountered then it needs to be initialized to undefined.
if (isolate_->heap()->allocation_sites_list() == Smi::FromInt(0)) {
if (isolate_->heap()->allocation_sites_list() == Smi::kZero) {
isolate_->heap()->set_allocation_sites_list(
isolate_->heap()->undefined_value());
}
@ -277,7 +277,7 @@ HeapObject* Deserializer::PostProcessNewObject(HeapObject* obj, int space) {
// TODO(mvstanton): consider treating the heap()->allocation_sites_list()
// as a (weak) root. If this root is relocated correctly, this becomes
// unnecessary.
if (isolate_->heap()->allocation_sites_list() == Smi::FromInt(0)) {
if (isolate_->heap()->allocation_sites_list() == Smi::kZero) {
site->set_weak_next(isolate_->heap()->undefined_value());
} else {
site->set_weak_next(isolate_->heap()->allocation_sites_list());

View File

@ -64,7 +64,7 @@ void SerializerDeserializer::Iterate(Isolate* isolate, ObjectVisitor* visitor) {
List<Object*>* cache = isolate->partial_snapshot_cache();
for (int i = 0;; ++i) {
// Extend the array ready to get a value when deserializing.
if (cache->length() <= i) cache->Add(Smi::FromInt(0));
if (cache->length() <= i) cache->Add(Smi::kZero);
// During deserialization, the visitor populates the partial snapshot cache
// and eventually terminates the cache with undefined.
visitor->VisitPointer(&cache->at(i));

View File

@ -26,7 +26,7 @@ void TransitionArray::set_next_link(Object* next, WriteBarrierMode mode) {
bool TransitionArray::HasPrototypeTransitions() {
return get(kPrototypeTransitionsIndex) != Smi::FromInt(0);
return get(kPrototypeTransitionsIndex) != Smi::kZero;
}

View File

@ -395,7 +395,7 @@ Handle<TransitionArray> TransitionArray::Allocate(Isolate* isolate,
int slack) {
Handle<FixedArray> array = isolate->factory()->NewTransitionArray(
LengthFor(number_of_transitions + slack));
array->set(kPrototypeTransitionsIndex, Smi::FromInt(0));
array->set(kPrototypeTransitionsIndex, Smi::kZero);
array->set(kTransitionLengthIndex, Smi::FromInt(number_of_transitions));
return Handle<TransitionArray>::cast(array);
}

Some files were not shown because too many files have changed in this diff Show More