From c9ddba2bf61fec4b31687c6cda291e174f909941 Mon Sep 17 00:00:00 2001 From: Ng Zhi An Date: Fri, 24 Sep 2021 10:27:58 -0700 Subject: [PATCH] [cleanup][ia32][x64] Fix -Wshadow warnings in assembler Rename the kNone enum in SimdPrefix to kNoPrefix R=adamk@chromium.org Bug: v8:12244 Change-Id: I8604dfadea24ce5f00c710de4d3c38da9d8a27a7 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3182886 Reviewed-by: Adam Klein Commit-Queue: Zhi An Ng Cr-Commit-Position: refs/heads/main@{#77064} --- src/codegen/ia32/assembler-ia32.cc | 18 ++++++++--------- src/codegen/ia32/assembler-ia32.h | 32 +++++++++++++++--------------- src/codegen/x64/assembler-x64.cc | 30 ++++++++++++++-------------- src/codegen/x64/assembler-x64.h | 14 ++++++------- 4 files changed, 47 insertions(+), 47 deletions(-) diff --git a/src/codegen/ia32/assembler-ia32.cc b/src/codegen/ia32/assembler-ia32.cc index 1d77296558..b5bbcee83f 100644 --- a/src/codegen/ia32/assembler-ia32.cc +++ b/src/codegen/ia32/assembler-ia32.cc @@ -2875,7 +2875,7 @@ void Assembler::vss(byte op, XMMRegister dst, XMMRegister src1, Operand src2) { } void Assembler::vps(byte op, XMMRegister dst, XMMRegister src1, Operand src2) { - vinstr(op, dst, src1, src2, kNone, k0F, kWIG); + vinstr(op, dst, src1, src2, kNoPrefix, k0F, kWIG); } void Assembler::vpd(byte op, XMMRegister dst, XMMRegister src1, Operand src2) { @@ -2890,27 +2890,27 @@ void Assembler::vshufpd(XMMRegister dst, XMMRegister src1, Operand src2, } void Assembler::vmovhlps(XMMRegister dst, XMMRegister src1, XMMRegister src2) { - vinstr(0x12, dst, src1, src2, kNone, k0F, kWIG); + vinstr(0x12, dst, src1, src2, kNoPrefix, k0F, kWIG); } void Assembler::vmovlhps(XMMRegister dst, XMMRegister src1, XMMRegister src2) { - vinstr(0x16, dst, src1, src2, kNone, k0F, kWIG); + vinstr(0x16, dst, src1, src2, kNoPrefix, k0F, kWIG); } void Assembler::vmovlps(XMMRegister dst, XMMRegister src1, Operand src2) { - vinstr(0x12, dst, src1, src2, kNone, k0F, kWIG); + vinstr(0x12, dst, src1, src2, kNoPrefix, k0F, kWIG); } void Assembler::vmovlps(Operand dst, XMMRegister src) { - vinstr(0x13, src, xmm0, dst, kNone, k0F, kWIG); + vinstr(0x13, src, xmm0, dst, kNoPrefix, k0F, kWIG); } void Assembler::vmovhps(XMMRegister dst, XMMRegister src1, Operand src2) { - vinstr(0x16, dst, src1, src2, kNone, k0F, kWIG); + vinstr(0x16, dst, src1, src2, kNoPrefix, k0F, kWIG); } void Assembler::vmovhps(Operand dst, XMMRegister src) { - vinstr(0x17, src, xmm0, dst, kNone, k0F, kWIG); + vinstr(0x17, src, xmm0, dst, kNoPrefix, k0F, kWIG); } void Assembler::vcmpps(XMMRegister dst, XMMRegister src1, Operand src2, @@ -3094,7 +3094,7 @@ void Assembler::vmovmskpd(Register dst, XMMRegister src) { void Assembler::vmovmskps(Register dst, XMMRegister src) { DCHECK(IsEnabled(AVX)); EnsureSpace ensure_space(this); - emit_vex_prefix(xmm0, kL128, kNone, k0F, kWIG); + emit_vex_prefix(xmm0, kL128, kNoPrefix, k0F, kWIG); EMIT(0x50); emit_sse_operand(dst, src); } @@ -3119,7 +3119,7 @@ void Assembler::vpcmpgtq(XMMRegister dst, XMMRegister src1, XMMRegister src2) { void Assembler::bmi1(byte op, Register reg, Register vreg, Operand rm) { DCHECK(IsEnabled(BMI1)); EnsureSpace ensure_space(this); - emit_vex_prefix(vreg, kLZ, kNone, k0F38, kW0); + emit_vex_prefix(vreg, kLZ, kNoPrefix, k0F38, kW0); EMIT(op); emit_operand(reg, rm); } diff --git a/src/codegen/ia32/assembler-ia32.h b/src/codegen/ia32/assembler-ia32.h index e50c20da06..b099dfcdd3 100644 --- a/src/codegen/ia32/assembler-ia32.h +++ b/src/codegen/ia32/assembler-ia32.h @@ -1505,10 +1505,10 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase { vinstr(0x2E, dst, xmm0, src, k66, k0F, kWIG); } void vucomiss(XMMRegister dst, XMMRegister src) { - vinstr(0x2E, dst, xmm0, src, kNone, k0F, kWIG); + vinstr(0x2E, dst, xmm0, src, kNoPrefix, k0F, kWIG); } void vucomiss(XMMRegister dst, Operand src) { - vinstr(0x2E, dst, xmm0, src, kNone, k0F, kWIG); + vinstr(0x2E, dst, xmm0, src, kNoPrefix, k0F, kWIG); } // BMI instruction @@ -1543,7 +1543,7 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase { bzhi(dst, Operand(src1), src2); } void bzhi(Register dst, Operand src1, Register src2) { - bmi2(kNone, 0xf5, dst, src2, src1); + bmi2(kNoPrefix, 0xf5, dst, src2, src1); } void mulx(Register dst1, Register dst2, Register src) { mulx(dst1, dst2, Operand(src)); @@ -1662,18 +1662,18 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase { #undef PACKED_CMP_LIST // Other SSE and AVX instructions -#define DECLARE_SSE_UNOP_AND_AVX(instruction, escape, opcode) \ - void instruction(XMMRegister dst, XMMRegister src) { \ - instruction(dst, Operand(src)); \ - } \ - void instruction(XMMRegister dst, Operand src) { \ - sse_instr(dst, src, 0x##escape, 0x##opcode); \ - } \ - void v##instruction(XMMRegister dst, XMMRegister src) { \ - v##instruction(dst, Operand(src)); \ - } \ - void v##instruction(XMMRegister dst, Operand src) { \ - vinstr(0x##opcode, dst, xmm0, src, kNone, k##escape, kWIG); \ +#define DECLARE_SSE_UNOP_AND_AVX(instruction, escape, opcode) \ + void instruction(XMMRegister dst, XMMRegister src) { \ + instruction(dst, Operand(src)); \ + } \ + void instruction(XMMRegister dst, Operand src) { \ + sse_instr(dst, src, 0x##escape, 0x##opcode); \ + } \ + void v##instruction(XMMRegister dst, XMMRegister src) { \ + v##instruction(dst, Operand(src)); \ + } \ + void v##instruction(XMMRegister dst, Operand src) { \ + vinstr(0x##opcode, dst, xmm0, src, kNoPrefix, k##escape, kWIG); \ } SSE_UNOP_INSTRUCTION_LIST(DECLARE_SSE_UNOP_AND_AVX) @@ -1869,7 +1869,7 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase { void emit_farith(int b1, int b2, int i); // Emit vex prefix - enum SIMDPrefix { kNone = 0x0, k66 = 0x1, kF3 = 0x2, kF2 = 0x3 }; + enum SIMDPrefix { kNoPrefix = 0x0, k66 = 0x1, kF3 = 0x2, kF2 = 0x3 }; enum VectorLength { kL128 = 0x0, kL256 = 0x4, kLIG = kL128, kLZ = kL128 }; enum VexW { kW0 = 0x0, kW1 = 0x80, kWIG = kW0 }; enum LeadingOpcode { k0F = 0x1, k0F38 = 0x2, k0F3A = 0x3 }; diff --git a/src/codegen/x64/assembler-x64.cc b/src/codegen/x64/assembler-x64.cc index 9b6c5a175c..1c5723c5a3 100644 --- a/src/codegen/x64/assembler-x64.cc +++ b/src/codegen/x64/assembler-x64.cc @@ -3648,7 +3648,7 @@ void Assembler::vmovdqu(YMMRegister dst, YMMRegister src) { void Assembler::vmovlps(XMMRegister dst, XMMRegister src1, Operand src2) { DCHECK(IsEnabled(AVX)); EnsureSpace ensure_space(this); - emit_vex_prefix(dst, src1, src2, kL128, kNone, k0F, kWIG); + emit_vex_prefix(dst, src1, src2, kL128, kNoPrefix, k0F, kWIG); emit(0x12); emit_sse_operand(dst, src2); } @@ -3656,7 +3656,7 @@ void Assembler::vmovlps(XMMRegister dst, XMMRegister src1, Operand src2) { void Assembler::vmovlps(Operand dst, XMMRegister src) { DCHECK(IsEnabled(AVX)); EnsureSpace ensure_space(this); - emit_vex_prefix(src, xmm0, dst, kL128, kNone, k0F, kWIG); + emit_vex_prefix(src, xmm0, dst, kL128, kNoPrefix, k0F, kWIG); emit(0x13); emit_sse_operand(src, dst); } @@ -3664,7 +3664,7 @@ void Assembler::vmovlps(Operand dst, XMMRegister src) { void Assembler::vmovhps(XMMRegister dst, XMMRegister src1, Operand src2) { DCHECK(IsEnabled(AVX)); EnsureSpace ensure_space(this); - emit_vex_prefix(dst, src1, src2, kL128, kNone, k0F, kWIG); + emit_vex_prefix(dst, src1, src2, kL128, kNoPrefix, k0F, kWIG); emit(0x16); emit_sse_operand(dst, src2); } @@ -3672,7 +3672,7 @@ void Assembler::vmovhps(XMMRegister dst, XMMRegister src1, Operand src2) { void Assembler::vmovhps(Operand dst, XMMRegister src) { DCHECK(IsEnabled(AVX)); EnsureSpace ensure_space(this); - emit_vex_prefix(src, xmm0, dst, kL128, kNone, k0F, kWIG); + emit_vex_prefix(src, xmm0, dst, kL128, kNoPrefix, k0F, kWIG); emit(0x17); emit_sse_operand(src, dst); } @@ -3725,7 +3725,7 @@ void Assembler::vps(byte op, XMMRegister dst, XMMRegister src1, XMMRegister src2) { DCHECK(IsEnabled(AVX)); EnsureSpace ensure_space(this); - emit_vex_prefix(dst, src1, src2, kL128, kNone, k0F, kWIG); + emit_vex_prefix(dst, src1, src2, kL128, kNoPrefix, k0F, kWIG); emit(op); emit_sse_operand(dst, src2); } @@ -3734,7 +3734,7 @@ void Assembler::vps(byte op, YMMRegister dst, YMMRegister src1, YMMRegister src2) { DCHECK(IsEnabled(AVX)); EnsureSpace ensure_space(this); - emit_vex_prefix(dst, src1, src2, kL256, kNone, k0F, kWIG); + emit_vex_prefix(dst, src1, src2, kL256, kNoPrefix, k0F, kWIG); emit(op); emit_sse_operand(dst, src2); } @@ -3742,7 +3742,7 @@ void Assembler::vps(byte op, YMMRegister dst, YMMRegister src1, void Assembler::vps(byte op, XMMRegister dst, XMMRegister src1, Operand src2) { DCHECK(IsEnabled(AVX)); EnsureSpace ensure_space(this); - emit_vex_prefix(dst, src1, src2, kL128, kNone, k0F, kWIG); + emit_vex_prefix(dst, src1, src2, kL128, kNoPrefix, k0F, kWIG); emit(op); emit_sse_operand(dst, src2); } @@ -3750,7 +3750,7 @@ void Assembler::vps(byte op, XMMRegister dst, XMMRegister src1, Operand src2) { void Assembler::vps(byte op, YMMRegister dst, YMMRegister src1, Operand src2) { DCHECK(IsEnabled(AVX)); EnsureSpace ensure_space(this); - emit_vex_prefix(dst, src1, src2, kL256, kNone, k0F, kWIG); + emit_vex_prefix(dst, src1, src2, kL256, kNoPrefix, k0F, kWIG); emit(op); emit_sse_operand(dst, src2); } @@ -3759,7 +3759,7 @@ void Assembler::vps(byte op, XMMRegister dst, XMMRegister src1, XMMRegister src2, byte imm8) { DCHECK(IsEnabled(AVX)); EnsureSpace ensure_space(this); - emit_vex_prefix(dst, src1, src2, kL128, kNone, k0F, kWIG); + emit_vex_prefix(dst, src1, src2, kL128, kNoPrefix, k0F, kWIG); emit(op); emit_sse_operand(dst, src2); emit(imm8); @@ -3785,7 +3785,7 @@ void Assembler::vpd(byte op, XMMRegister dst, XMMRegister src1, Operand src2) { void Assembler::vucomiss(XMMRegister dst, XMMRegister src) { DCHECK(IsEnabled(AVX)); EnsureSpace ensure_space(this); - emit_vex_prefix(dst, xmm0, src, kLIG, kNone, k0F, kWIG); + emit_vex_prefix(dst, xmm0, src, kLIG, kNoPrefix, k0F, kWIG); emit(0x2E); emit_sse_operand(dst, src); } @@ -3793,7 +3793,7 @@ void Assembler::vucomiss(XMMRegister dst, XMMRegister src) { void Assembler::vucomiss(XMMRegister dst, Operand src) { DCHECK(IsEnabled(AVX)); EnsureSpace ensure_space(this); - emit_vex_prefix(dst, xmm0, src, kLIG, kNone, k0F, kWIG); + emit_vex_prefix(dst, xmm0, src, kLIG, kNoPrefix, k0F, kWIG); emit(0x2E); emit_sse_operand(dst, src); } @@ -3827,7 +3827,7 @@ void Assembler::vss(byte op, XMMRegister dst, XMMRegister src1, Operand src2) { void Assembler::bmi1q(byte op, Register reg, Register vreg, Register rm) { DCHECK(IsEnabled(BMI1)); EnsureSpace ensure_space(this); - emit_vex_prefix(reg, vreg, rm, kLZ, kNone, k0F38, kW1); + emit_vex_prefix(reg, vreg, rm, kLZ, kNoPrefix, k0F38, kW1); emit(op); emit_modrm(reg, rm); } @@ -3835,7 +3835,7 @@ void Assembler::bmi1q(byte op, Register reg, Register vreg, Register rm) { void Assembler::bmi1q(byte op, Register reg, Register vreg, Operand rm) { DCHECK(IsEnabled(BMI1)); EnsureSpace ensure_space(this); - emit_vex_prefix(reg, vreg, rm, kLZ, kNone, k0F38, kW1); + emit_vex_prefix(reg, vreg, rm, kLZ, kNoPrefix, k0F38, kW1); emit(op); emit_operand(reg, rm); } @@ -3843,7 +3843,7 @@ void Assembler::bmi1q(byte op, Register reg, Register vreg, Operand rm) { void Assembler::bmi1l(byte op, Register reg, Register vreg, Register rm) { DCHECK(IsEnabled(BMI1)); EnsureSpace ensure_space(this); - emit_vex_prefix(reg, vreg, rm, kLZ, kNone, k0F38, kW0); + emit_vex_prefix(reg, vreg, rm, kLZ, kNoPrefix, k0F38, kW0); emit(op); emit_modrm(reg, rm); } @@ -3851,7 +3851,7 @@ void Assembler::bmi1l(byte op, Register reg, Register vreg, Register rm) { void Assembler::bmi1l(byte op, Register reg, Register vreg, Operand rm) { DCHECK(IsEnabled(BMI1)); EnsureSpace ensure_space(this); - emit_vex_prefix(reg, vreg, rm, kLZ, kNone, k0F38, kW0); + emit_vex_prefix(reg, vreg, rm, kLZ, kNoPrefix, k0F38, kW0); emit(op); emit_operand(reg, rm); } diff --git a/src/codegen/x64/assembler-x64.h b/src/codegen/x64/assembler-x64.h index a89cfbf8f8..2dfdb88a3b 100644 --- a/src/codegen/x64/assembler-x64.h +++ b/src/codegen/x64/assembler-x64.h @@ -486,7 +486,7 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase { static constexpr byte kJzShortOpcode = kJccShortPrefix | zero; // VEX prefix encodings. - enum SIMDPrefix { kNone = 0x0, k66 = 0x1, kF3 = 0x2, kF2 = 0x3 }; + enum SIMDPrefix { kNoPrefix = 0x0, k66 = 0x1, kF3 = 0x2, kF2 = 0x3 }; enum VectorLength { kL128 = 0x0, kL256 = 0x4, kLIG = kL128, kLZ = kL128 }; enum VexW { kW0 = 0x0, kW1 = 0x80, kWIG = kW0 }; enum LeadingOpcode { k0F = 0x1, k0F38 = 0x2, k0F3A = 0x3 }; @@ -1411,10 +1411,10 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase { #undef AVX_SSE2_SHIFT_IMM void vmovlhps(XMMRegister dst, XMMRegister src1, XMMRegister src2) { - vinstr(0x16, dst, src1, src2, kNone, k0F, kWIG); + vinstr(0x16, dst, src1, src2, kNoPrefix, k0F, kWIG); } void vmovhlps(XMMRegister dst, XMMRegister src1, XMMRegister src2) { - vinstr(0x12, dst, src1, src2, kNone, k0F, kWIG); + vinstr(0x12, dst, src1, src2, kNoPrefix, k0F, kWIG); } void vcvtdq2pd(XMMRegister dst, XMMRegister src) { vinstr(0xe6, dst, xmm0, src, kF3, k0F, kWIG); @@ -1783,16 +1783,16 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase { void popcntl(Register dst, Operand src); void bzhiq(Register dst, Register src1, Register src2) { - bmi2q(kNone, 0xf5, dst, src2, src1); + bmi2q(kNoPrefix, 0xf5, dst, src2, src1); } void bzhiq(Register dst, Operand src1, Register src2) { - bmi2q(kNone, 0xf5, dst, src2, src1); + bmi2q(kNoPrefix, 0xf5, dst, src2, src1); } void bzhil(Register dst, Register src1, Register src2) { - bmi2l(kNone, 0xf5, dst, src2, src1); + bmi2l(kNoPrefix, 0xf5, dst, src2, src1); } void bzhil(Register dst, Operand src1, Register src2) { - bmi2l(kNone, 0xf5, dst, src2, src1); + bmi2l(kNoPrefix, 0xf5, dst, src2, src1); } void mulxq(Register dst1, Register dst2, Register src) { bmi2q(kF2, 0xf6, dst1, dst2, src);