[config] Add V8_NODISCARD for Scope classes

Scopes in V8 are used to guarantee one or more properties during its
lifetimes. If a scope is not named e.g MyClassScope(args) instead of
MyClassScope scope(args) it will get created and automatically destroyed
and therefore, being useless as a scope. This CL would produce a
compiling warning when that happens to ward off this developer error.

Follow-up to ccrev.com/2552415 in which it was introduced and
implemented for Guard classes.

Change-Id: Ifa0fb89cc3d9bdcdee0fd8150a2618af5ef45cbf
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2555001
Commit-Queue: Santiago Aboy Solanes <solanes@chromium.org>
Reviewed-by: Ulan Degenbaev <ulan@chromium.org>
Reviewed-by: Leszek Swirski <leszeks@chromium.org>
Reviewed-by: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: Jakob Kummerow <jkummerow@chromium.org>
Reviewed-by: Ross McIlroy <rmcilroy@chromium.org>
Reviewed-by: Tobias Tebbi <tebbi@chromium.org>
Cr-Commit-Position: refs/heads/master@{#71425}
This commit is contained in:
Santiago Aboy Solanes 2020-11-26 10:08:27 +00:00 committed by Commit Bot
parent 544ea1513e
commit 14c5b0ae67
116 changed files with 203 additions and 202 deletions

View File

@ -18,7 +18,7 @@ namespace internal {
namespace {
class InvokeScope {
class V8_NODISCARD InvokeScope {
public:
explicit InvokeScope(Isolate* isolate)
: isolate_(isolate), save_context_(isolate) {}
@ -148,7 +148,7 @@ void EnableAccessChecks(Isolate* isolate, Handle<JSObject> object) {
JSObject::MigrateToMap(isolate, object, new_map);
}
class AccessCheckDisableScope {
class V8_NODISCARD AccessCheckDisableScope {
public:
AccessCheckDisableScope(Isolate* isolate, Handle<JSObject> obj)
: isolate_(isolate),

View File

@ -264,7 +264,7 @@ namespace v8 {
namespace {
class InternalEscapableScope : public v8::EscapableHandleScope {
class V8_NODISCARD InternalEscapableScope : public v8::EscapableHandleScope {
public:
explicit inline InternalEscapableScope(i::Isolate* isolate)
: v8::EscapableHandleScope(reinterpret_cast<v8::Isolate*>(isolate)) {}
@ -282,7 +282,7 @@ void CheckMicrotasksScopesConsistency(i::MicrotaskQueue* microtask_queue) {
#endif
template <bool do_callback>
class CallDepthScope {
class V8_NODISCARD CallDepthScope {
public:
CallDepthScope(i::Isolate* isolate, Local<Context> context)
: isolate_(isolate),

View File

@ -317,7 +317,7 @@ class PersistentHandles;
// data.
class HandleScopeImplementer {
public:
class EnteredContextRewindScope {
class V8_NODISCARD EnteredContextRewindScope {
public:
explicit EnteredContextRewindScope(HandleScopeImplementer* hsi)
: hsi_(hsi), saved_entered_context_count_(hsi->EnteredContextCount()) {}

View File

@ -179,7 +179,7 @@ bool AsmJsParser::Run() {
return !failed_;
}
class AsmJsParser::TemporaryVariableScope {
class V8_NODISCARD AsmJsParser::TemporaryVariableScope {
public:
explicit TemporaryVariableScope(AsmJsParser* parser) : parser_(parser) {
local_depth_ = parser_->function_temp_locals_depth_;

View File

@ -721,7 +721,7 @@ void AstPrinter::PrintLiteral(const AstConsString* value, bool quote) {
//-----------------------------------------------------------------------------
class IndentedScope {
class V8_NODISCARD IndentedScope {
public:
IndentedScope(AstPrinter* printer, const char* txt)
: ast_printer_(printer) {
@ -745,7 +745,6 @@ class IndentedScope {
AstPrinter* ast_printer_;
};
//-----------------------------------------------------------------------------
AstPrinter::AstPrinter(uintptr_t stack_limit)

View File

@ -1027,7 +1027,7 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
bool ImmediateFitsAddrMode2Instruction(int32_t imm32);
// Class for scoping postponing the constant pool generation.
class BlockConstPoolScope {
class V8_NODISCARD BlockConstPoolScope {
public:
explicit BlockConstPoolScope(Assembler* assem) : assem_(assem) {
assem_->StartBlockConstPool();
@ -1339,7 +1339,7 @@ class PatchingAssembler : public Assembler {
// state, even if the list is modified by some other means. Note that this scope
// can be nested but the destructors need to run in the opposite order as the
// constructors. We do not have assertions for this.
class V8_EXPORT_PRIVATE UseScratchRegisterScope {
class V8_EXPORT_PRIVATE V8_NODISCARD UseScratchRegisterScope {
public:
explicit UseScratchRegisterScope(Assembler* assembler);
~UseScratchRegisterScope();

View File

@ -2398,7 +2398,7 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
using BlockConstPoolScope = ConstantPool::BlockScope;
class BlockPoolsScope {
class V8_NODISCARD BlockPoolsScope {
public:
// Block veneer and constant pool. Emits pools if necessary to ensure that
// {margin} more bytes can be emitted without triggering pool emission.

View File

@ -2022,7 +2022,7 @@ class V8_EXPORT_PRIVATE MacroAssembler : public TurboAssembler {
// instructions. This scope prevents the MacroAssembler from being called and
// literal pools from being emitted. It also asserts the number of instructions
// emitted is what you specified when creating the scope.
class InstructionAccurateScope {
class V8_NODISCARD InstructionAccurateScope {
public:
explicit InstructionAccurateScope(TurboAssembler* tasm, size_t count = 0)
: tasm_(tasm),
@ -2072,7 +2072,7 @@ class InstructionAccurateScope {
// original state, even if the lists were modified by some other means. Note
// that this scope can be nested but the destructors need to run in the opposite
// order as the constructors. We do not have assertions for this.
class UseScratchRegisterScope {
class V8_NODISCARD UseScratchRegisterScope {
public:
explicit UseScratchRegisterScope(TurboAssembler* tasm)
: available_(tasm->TmpList()),

View File

@ -388,7 +388,7 @@ class V8_EXPORT_PRIVATE AssemblerBase : public Malloced {
};
// Avoids emitting debug code during the lifetime of this scope object.
class DontEmitDebugCodeScope {
class V8_NODISCARD DontEmitDebugCodeScope {
public:
explicit DontEmitDebugCodeScope(AssemblerBase* assembler)
: assembler_(assembler), old_value_(assembler->emit_debug_code()) {
@ -402,7 +402,7 @@ class DontEmitDebugCodeScope {
};
// Enable a specified feature within a scope.
class V8_EXPORT_PRIVATE CpuFeatureScope {
class V8_EXPORT_PRIVATE V8_NODISCARD CpuFeatureScope {
public:
enum CheckPolicy {
kCheckSupported,

View File

@ -1532,7 +1532,7 @@ namespace {
// A scope object that ensures a parse info's runtime call stats and stack limit
// are set correctly during worker-thread compile, and restores it after going
// out of scope.
class OffThreadParseInfoScope {
class V8_NODISCARD OffThreadParseInfoScope {
public:
OffThreadParseInfoScope(
ParseInfo* parse_info,

View File

@ -438,7 +438,7 @@ class DeferredFinalizationJobData {
// A wrapper around a OptimizedCompilationInfo that detaches the Handles from
// the underlying PersistentHandlesScope and stores them in info_ on
// destruction.
class CompilationHandleScope final {
class V8_NODISCARD CompilationHandleScope final {
public:
explicit CompilationHandleScope(Isolate* isolate,
OptimizedCompilationInfo* info)

View File

@ -282,7 +282,7 @@ class ConstantPool {
void SetNextCheckIn(size_t instructions);
// Class for scoping postponing the constant pool generation.
class V8_EXPORT_PRIVATE BlockScope {
class V8_EXPORT_PRIVATE V8_NODISCARD BlockScope {
public:
// BlockScope immediatelly emits the pool if necessary to ensure that
// during the block scope at least {margin} bytes can be emitted without

View File

@ -69,7 +69,7 @@ static constexpr int kMaxCParameters = 10;
static constexpr int kMaxCParameters = 256;
#endif
class FrameScope {
class V8_NODISCARD FrameScope {
public:
explicit FrameScope(TurboAssembler* tasm, StackFrame::Type type)
: tasm_(tasm), type_(type), old_has_frame_(tasm->has_frame()) {
@ -92,7 +92,7 @@ class FrameScope {
bool old_has_frame_;
};
class FrameAndConstantPoolScope {
class V8_NODISCARD FrameAndConstantPoolScope {
public:
FrameAndConstantPoolScope(MacroAssembler* masm, StackFrame::Type type)
: masm_(masm),
@ -127,7 +127,7 @@ class FrameAndConstantPoolScope {
};
// Class for scoping the the unavailability of constant pool access.
class ConstantPoolUnavailableScope {
class V8_NODISCARD ConstantPoolUnavailableScope {
public:
explicit ConstantPoolUnavailableScope(Assembler* assembler)
: assembler_(assembler),
@ -150,7 +150,7 @@ class ConstantPoolUnavailableScope {
DISALLOW_IMPLICIT_CONSTRUCTORS(ConstantPoolUnavailableScope);
};
class AllowExternalCallThatCantCauseGC : public FrameScope {
class V8_NODISCARD AllowExternalCallThatCantCauseGC : public FrameScope {
public:
explicit AllowExternalCallThatCantCauseGC(MacroAssembler* masm)
: FrameScope(masm, StackFrame::NONE) {}
@ -158,7 +158,7 @@ class AllowExternalCallThatCantCauseGC : public FrameScope {
// Prevent the use of the RootArray during the lifetime of this
// scope object.
class NoRootArrayScope {
class V8_NODISCARD NoRootArrayScope {
public:
explicit NoRootArrayScope(TurboAssembler* masm)
: masm_(masm), old_value_(masm->root_array_available()) {

View File

@ -1372,7 +1372,7 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
}
// Class for scoping postponing the trampoline pool generation.
class BlockTrampolinePoolScope {
class V8_NODISCARD BlockTrampolinePoolScope {
public:
explicit BlockTrampolinePoolScope(Assembler* assem) : assem_(assem) {
assem_->StartBlockTrampolinePool();
@ -1389,7 +1389,7 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
// sequences of instructions that must be emitted as a unit, before
// buffer growth (and relocation) can occur.
// This blocking scope is not nestable.
class BlockGrowBufferScope {
class V8_NODISCARD BlockGrowBufferScope {
public:
explicit BlockGrowBufferScope(Assembler* assem) : assem_(assem) {
assem_->StartBlockGrowBuffer();
@ -1908,7 +1908,7 @@ class EnsureSpace {
explicit inline EnsureSpace(Assembler* assembler);
};
class V8_EXPORT_PRIVATE UseScratchRegisterScope {
class V8_EXPORT_PRIVATE V8_NODISCARD UseScratchRegisterScope {
public:
explicit UseScratchRegisterScope(Assembler* assembler);
~UseScratchRegisterScope();

View File

@ -1433,7 +1433,7 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
}
// Class for scoping postponing the trampoline pool generation.
class BlockTrampolinePoolScope {
class V8_NODISCARD BlockTrampolinePoolScope {
public:
explicit BlockTrampolinePoolScope(Assembler* assem) : assem_(assem) {
assem_->StartBlockTrampolinePool();
@ -1450,7 +1450,7 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
// sequences of instructions that must be emitted as a unit, before
// buffer growth (and relocation) can occur.
// This blocking scope is not nestable.
class BlockGrowBufferScope {
class V8_NODISCARD BlockGrowBufferScope {
public:
explicit BlockGrowBufferScope(Assembler* assem) : assem_(assem) {
assem_->StartBlockGrowBuffer();
@ -1936,7 +1936,7 @@ class EnsureSpace {
explicit inline EnsureSpace(Assembler* assembler);
};
class V8_EXPORT_PRIVATE UseScratchRegisterScope {
class V8_EXPORT_PRIVATE V8_NODISCARD UseScratchRegisterScope {
public:
explicit UseScratchRegisterScope(Assembler* assembler);
~UseScratchRegisterScope();

View File

@ -1076,7 +1076,7 @@ class Assembler : public AssemblerBase {
}
// Class for scoping postponing the trampoline pool generation.
class BlockTrampolinePoolScope {
class V8_NODISCARD BlockTrampolinePoolScope {
public:
explicit BlockTrampolinePoolScope(Assembler* assem) : assem_(assem) {
assem_->StartBlockTrampolinePool();
@ -1090,7 +1090,7 @@ class Assembler : public AssemblerBase {
};
// Class for scoping disabling constant pool entry merging
class BlockConstantPoolEntrySharingScope {
class V8_NODISCARD BlockConstantPoolEntrySharingScope {
public:
explicit BlockConstantPoolEntrySharingScope(Assembler* assem)
: assem_(assem) {
@ -1416,7 +1416,7 @@ class PatchingAssembler : public Assembler {
~PatchingAssembler();
};
class V8_EXPORT_PRIVATE UseScratchRegisterScope {
class V8_EXPORT_PRIVATE V8_NODISCARD UseScratchRegisterScope {
public:
explicit UseScratchRegisterScope(Assembler* assembler);
~UseScratchRegisterScope();

View File

@ -1477,7 +1477,7 @@ class EnsureSpace {
explicit EnsureSpace(Assembler* assembler) { assembler->CheckBuffer(); }
};
class V8_EXPORT_PRIVATE UseScratchRegisterScope {
class V8_EXPORT_PRIVATE V8_NODISCARD UseScratchRegisterScope {
public:
explicit UseScratchRegisterScope(Assembler* assembler);
~UseScratchRegisterScope();

View File

@ -139,7 +139,7 @@ class V8_EXPORT_PRIVATE TurboAssemblerBase : public Assembler {
// Avoids emitting calls to the {Builtins::kAbort} builtin when emitting debug
// code during the lifetime of this scope object. For disabling debug code
// entirely use the {DontEmitDebugCodeScope} instead.
class HardAbortScope {
class V8_NODISCARD HardAbortScope {
public:
explicit HardAbortScope(TurboAssemblerBase* assembler)
: assembler_(assembler), old_value_(assembler->should_abort_hard()) {

View File

@ -39,7 +39,7 @@ enum PerIsolateAssertType {
};
template <PerThreadAssertType kType, bool kAllow>
class PerThreadAssertScope {
class V8_NODISCARD PerThreadAssertScope {
public:
V8_EXPORT_PRIVATE PerThreadAssertScope();
V8_EXPORT_PRIVATE ~PerThreadAssertScope();
@ -56,7 +56,7 @@ class PerThreadAssertScope {
};
template <PerIsolateAssertType kType, bool kAllow>
class PerIsolateAssertScope {
class V8_NODISCARD PerIsolateAssertScope {
public:
V8_EXPORT_PRIVATE explicit PerIsolateAssertScope(Isolate* isolate);
V8_EXPORT_PRIVATE ~PerIsolateAssertScope();
@ -75,7 +75,7 @@ class CombinationAssertScope;
// Base case for CombinationAssertScope (equivalent to Scope).
template <typename Scope>
class CombinationAssertScope<Scope> : public Scope {
class V8_NODISCARD CombinationAssertScope<Scope> : public Scope {
public:
V8_EXPORT_PRIVATE static bool IsAllowed() {
// Define IsAllowed() explicitly rather than with using Scope::IsAllowed, to
@ -115,7 +115,7 @@ template <PerThreadAssertType kType, bool kAllow>
class PerThreadAssertScopeDebugOnly
: public PerThreadAssertScope<kType, kAllow> {
#else
class PerThreadAssertScopeDebugOnly {
class V8_NODISCARD PerThreadAssertScopeDebugOnly {
public:
PerThreadAssertScopeDebugOnly() { // NOLINT (modernize-use-equals-default)
// Define a constructor to avoid unused variable warnings.
@ -132,7 +132,7 @@ class PerIsolateAssertScopeDebugOnly
explicit PerIsolateAssertScopeDebugOnly(Isolate* isolate)
: PerIsolateAssertScope<kType, kAllow>(isolate) {}
#else
class PerIsolateAssertScopeDebugOnly {
class V8_NODISCARD PerIsolateAssertScopeDebugOnly {
public:
explicit PerIsolateAssertScopeDebugOnly(Isolate* isolate) {}
#endif

View File

@ -649,7 +649,7 @@ class V8_EXPORT_PRIVATE CodeAssembler {
void SetSourcePosition(const char* file, int line);
void PushSourcePosition();
void PopSourcePosition();
class SourcePositionScope {
class V8_NODISCARD SourcePositionScope {
public:
explicit SourcePositionScope(CodeAssembler* ca) : ca_(ca) {
ca->PushSourcePosition();
@ -1565,7 +1565,7 @@ class V8_EXPORT_PRIVATE CodeAssemblerState {
VariableId NextVariableId() { return next_variable_id_++; }
};
class V8_EXPORT_PRIVATE ScopedExceptionHandler {
class V8_EXPORT_PRIVATE V8_NODISCARD ScopedExceptionHandler {
public:
ScopedExceptionHandler(CodeAssembler* assembler,
CodeAssemblerExceptionHandlerLabel* label);

View File

@ -17,7 +17,7 @@ namespace compiler {
class V8_EXPORT_PRIVATE SourcePositionTable final
: public NON_EXPORTED_BASE(ZoneObject) {
public:
class Scope final {
class V8_NODISCARD Scope final {
public:
Scope(SourcePositionTable* source_positions, SourcePosition position)
: source_positions_(source_positions),

View File

@ -126,7 +126,7 @@ class VariableTracker {
Node* Get(Variable var, Node* effect) { return table_.Get(effect).Get(var); }
Zone* zone() { return zone_; }
class Scope : public ReduceScope {
class V8_NODISCARD Scope : public ReduceScope {
public:
Scope(VariableTracker* tracker, Node* node, Reduction* reduction);
~Scope();
@ -174,7 +174,7 @@ class EscapeAnalysisTracker : public ZoneObject {
EscapeAnalysisTracker(const EscapeAnalysisTracker&) = delete;
EscapeAnalysisTracker& operator=(const EscapeAnalysisTracker&) = delete;
class Scope : public VariableTracker::Scope {
class V8_NODISCARD Scope : public VariableTracker::Scope {
public:
Scope(EffectGraphReducer* reducer, EscapeAnalysisTracker* tracker,
Node* node, Reduction* reduction)

View File

@ -435,11 +435,11 @@ class V8_EXPORT_PRIVATE GraphAssembler {
// All labels created while a LoopScope is live are considered to be inside
// the loop.
template <MachineRepresentation... Reps>
class LoopScope final {
class V8_NODISCARD LoopScope final {
private:
// The internal scope is only here to increment the graph assembler's
// nesting level prior to `loop_header_label` creation below.
class LoopScopeInternal {
class V8_NODISCARD LoopScopeInternal {
public:
explicit LoopScopeInternal(GraphAssembler* gasm)
: previous_loop_nesting_level_(gasm->loop_nesting_level_),
@ -486,7 +486,7 @@ class V8_EXPORT_PRIVATE GraphAssembler {
};
// Upon destruction, restores effect and control to the state at construction.
class RestoreEffectControlScope {
class V8_NODISCARD RestoreEffectControlScope {
public:
explicit RestoreEffectControlScope(GraphAssembler* gasm)
: gasm_(gasm), effect_(gasm->effect()), control_(gasm->control()) {}

View File

@ -40,7 +40,7 @@ class V8_EXPORT_PRIVATE Graph final : public NON_EXPORTED_BASE(ZoneObject) {
// Scope used when creating a subgraph for inlining. Automatically preserves
// the original start and end nodes of the graph, and resets them when you
// leave the scope.
class SubgraphScope final {
class V8_NODISCARD SubgraphScope final {
public:
explicit SubgraphScope(Graph* graph)
: graph_(graph), start_(graph->start()), end_(graph->end()) {}

View File

@ -318,7 +318,7 @@ class JSCallReducerAssembler : public JSGraphAssembler {
// custom catch logic within the reduction itself; or a catch handler in the
// outside graph into which the reduction will be integrated (in this case
// the scope is called 'outermost').
class CatchScope {
class V8_NODISCARD CatchScope {
private:
// Only used to partially construct the outermost scope.
explicit CatchScope(Zone* zone) : if_exception_nodes_(zone) {}

View File

@ -414,7 +414,7 @@ class V8_EXPORT_PRIVATE JSHeapBroker {
static const uint32_t kInitialRefsBucketCount = 1024; // must be power of 2
};
class TraceScope {
class V8_NODISCARD TraceScope {
public:
TraceScope(JSHeapBroker* broker, const char* label)
: TraceScope(broker, static_cast<void*>(broker), label) {}
@ -473,7 +473,7 @@ class OffHeapBytecodeArray final : public interpreter::AbstractBytecodeArray {
// d) The given condition evaluates to true.
// Used, for example, when printing the graph with --trace-turbo with a
// previously parked LocalHeap.
class UnparkedScopeIfNeeded {
class V8_NODISCARD UnparkedScopeIfNeeded {
public:
explicit UnparkedScopeIfNeeded(JSHeapBroker* broker,
bool extra_condition = true) {

View File

@ -67,7 +67,7 @@ inline bool operator!=(const NodeOrigin& lhs, const NodeOrigin& rhs) {
class V8_EXPORT_PRIVATE NodeOriginTable final
: public NON_EXPORTED_BASE(ZoneObject) {
public:
class Scope final {
class V8_NODISCARD Scope final {
public:
Scope(NodeOriginTable* origins, const char* reducer_name, Node* node)
: origins_(origins), prev_origin_(NodeOrigin::Unknown()) {
@ -90,7 +90,7 @@ class V8_EXPORT_PRIVATE NodeOriginTable final
NodeOrigin prev_origin_;
};
class PhaseScope final {
class V8_NODISCARD PhaseScope final {
public:
PhaseScope(NodeOriginTable* origins, const char* phase_name)
: origins_(origins) {

View File

@ -74,8 +74,7 @@ class PipelineStatistics : public Malloced {
CommonStats phase_stats_;
};
class PhaseScope {
class V8_NODISCARD PhaseScope {
public:
PhaseScope(PipelineStatistics* pipeline_stats, const char* name)
: pipeline_stats_(pipeline_stats) {

View File

@ -744,7 +744,7 @@ class NodeOriginsWrapper final : public Reducer {
NodeOriginTable* const table_;
};
class PipelineRunScope {
class V8_NODISCARD PipelineRunScope {
public:
PipelineRunScope(
PipelineData* data, const char* phase_name,
@ -769,7 +769,7 @@ class PipelineRunScope {
// LocalIsolateScope encapsulates the phase where persistent handles are
// attached to the LocalHeap inside {local_isolate}.
class LocalIsolateScope {
class V8_NODISCARD LocalIsolateScope {
public:
explicit LocalIsolateScope(JSHeapBroker* broker,
OptimizedCompilationInfo* info,
@ -1096,7 +1096,7 @@ namespace {
// duration of the job phase and unset immediately afterwards. Each job
// needs to set the correct RuntimeCallStats table depending on whether it
// is running on a background or foreground thread.
class PipelineJobScope {
class V8_NODISCARD PipelineJobScope {
public:
PipelineJobScope(PipelineData* data, RuntimeCallStats* stats) : data_(data) {
data_->set_runtime_call_stats(stats);

View File

@ -18,7 +18,7 @@ namespace compiler {
class V8_EXPORT_PRIVATE ZoneStats final {
public:
class Scope final {
class V8_NODISCARD Scope final {
public:
explicit Scope(ZoneStats* zone_stats, const char* zone_name,
bool support_zone_compression = false)
@ -51,7 +51,7 @@ class V8_EXPORT_PRIVATE ZoneStats final {
const bool support_zone_compression_;
};
class V8_EXPORT_PRIVATE StatsScope final {
class V8_EXPORT_PRIVATE V8_NODISCARD StatsScope final {
public:
explicit StatsScope(ZoneStats* zone_stats);
~StatsScope();

View File

@ -242,7 +242,7 @@ class PerIsolateData {
return reinterpret_cast<PerIsolateData*>(isolate->GetData(0));
}
class RealmScope {
class V8_NODISCARD RealmScope {
public:
explicit RealmScope(PerIsolateData* data);
~RealmScope();

View File

@ -531,7 +531,7 @@ void ForceGarbageCollection(
v8::Isolate* isolate,
v8::EmbedderHeapTracer::EmbedderStackState embedder_stack_state);
class PostponeInterruptsScope {
class V8_NODISCARD PostponeInterruptsScope {
public:
explicit PostponeInterruptsScope(v8::Isolate* isolate);
~PostponeInterruptsScope();
@ -540,7 +540,7 @@ class PostponeInterruptsScope {
std::unique_ptr<i::PostponeInterruptsScope> scope_;
};
class DisableBreakScope {
class V8_NODISCARD DisableBreakScope {
public:
explicit DisableBreakScope(v8::Isolate* isolate);
~DisableBreakScope();

View File

@ -565,7 +565,7 @@ class V8_EXPORT_PRIVATE Debug {
// This scope is used to load and enter the debug context and create a new
// break state. Leaving the scope will restore the previous state.
class DebugScope {
class V8_NODISCARD DebugScope {
public:
explicit DebugScope(Debug* debug);
~DebugScope();
@ -587,7 +587,7 @@ class DebugScope {
// When there are nested debug breaks, we use this to restore the return
// value to the previous state. This is not merged with DebugScope because
// return_value_ will not be cleared when we use DebugScope.
class ReturnValueScope {
class V8_NODISCARD ReturnValueScope {
public:
explicit ReturnValueScope(Debug* debug);
~ReturnValueScope();

View File

@ -37,7 +37,7 @@ class CodeTracer final : public Malloced {
WriteChars(filename_.begin(), "", 0, false);
}
class Scope {
class V8_NODISCARD Scope {
public:
explicit Scope(CodeTracer* tracer) : tracer_(tracer) { tracer->OpenFile(); }
~Scope() { tracer_->CloseFile(); }
@ -48,7 +48,7 @@ class CodeTracer final : public Malloced {
CodeTracer* tracer_;
};
class StreamScope : public Scope {
class V8_NODISCARD StreamScope : public Scope {
public:
explicit StreamScope(CodeTracer* tracer) : Scope(tracer) {
FILE* file = this->file();

View File

@ -466,7 +466,7 @@ int32_t EhFrameIterator::DecodeSLeb128(const byte* encoded, int* encoded_size) {
namespace {
class StreamModifiersScope final {
class V8_NODISCARD StreamModifiersScope final {
public:
explicit StreamModifiersScope(std::ostream* stream)
: stream_(stream), flags_(stream->flags()) {}

View File

@ -73,7 +73,7 @@ class FutexWaitListNode {
// Returns false if the cancelling failed, true otherwise.
bool CancelTimeoutTask();
class ResetWaitingOnScopeExit {
class V8_NODISCARD ResetWaitingOnScopeExit {
public:
explicit ResetWaitingOnScopeExit(FutexWaitListNode* node) : node_(node) {}
~ResetWaitingOnScopeExit() { node_->waiting_ = false; }

View File

@ -14,7 +14,7 @@ class Isolate;
// Scope intercepts only interrupt which is part of its interrupt_mask and does
// not affect other interrupts.
class InterruptsScope {
class V8_NODISCARD InterruptsScope {
public:
enum Mode { kPostponeInterrupts, kRunInterrupts, kNoop };
@ -45,7 +45,7 @@ class InterruptsScope {
// postpone scope is left the interrupts will be re-enabled and any
// interrupts that occurred while in the scope will be taken into
// account.
class PostponeInterruptsScope : public InterruptsScope {
class V8_NODISCARD PostponeInterruptsScope : public InterruptsScope {
public:
PostponeInterruptsScope(Isolate* isolate,
int intercept_mask = StackGuard::ALL_INTERRUPTS)
@ -57,7 +57,7 @@ class PostponeInterruptsScope : public InterruptsScope {
// Support for overriding PostponeInterruptsScope. Interrupt is not ignored if
// innermost scope is SafeForInterruptsScope ignoring any outer
// PostponeInterruptsScopes.
class SafeForInterruptsScope : public InterruptsScope {
class V8_NODISCARD SafeForInterruptsScope : public InterruptsScope {
public:
SafeForInterruptsScope(Isolate* isolate,
int intercept_mask = StackGuard::ALL_INTERRUPTS)

View File

@ -779,7 +779,7 @@ class V8_EXPORT_PRIVATE Isolate final : private HiddenFactory {
// Heuristically guess whether a Promise is handled by user catch handler
bool PromiseHasUserDefinedRejectHandler(Handle<JSPromise> promise);
class ExceptionScope {
class V8_NODISCARD ExceptionScope {
public:
// Scope currently can only be used for regular exceptions,
// not termination exception.
@ -2063,7 +2063,7 @@ class V8_EXPORT_PRIVATE SaveAndSwitchContext : public SaveContext {
// A scope which sets the given isolate's context to null for its lifetime to
// ensure that code does not make assumptions on a context being available.
class NullContextScope : public SaveAndSwitchContext {
class V8_NODISCARD NullContextScope : public SaveAndSwitchContext {
public:
explicit NullContextScope(Isolate* isolate)
: SaveAndSwitchContext(isolate, Context()) {}

View File

@ -836,7 +836,7 @@ MaybeHandle<Object> AppendErrorString(Isolate* isolate, Handle<Object> error,
return error;
}
class PrepareStackTraceScope {
class V8_NODISCARD PrepareStackTraceScope {
public:
explicit PrepareStackTraceScope(Isolate* isolate) : isolate_(isolate) {
DCHECK(!isolate_->formatting_stack_trace());

View File

@ -49,7 +49,7 @@ class RuntimeProfiler {
CodeKind code_kind);
void Baseline(JSFunction function, OptimizationReason reason);
class MarkCandidatesForOptimizationScope final {
class V8_NODISCARD MarkCandidatesForOptimizationScope final {
public:
explicit MarkCandidatesForOptimizationScope(RuntimeProfiler* profiler);
~MarkCandidatesForOptimizationScope();

View File

@ -231,7 +231,7 @@ bool TestAndClear(int* bitfield, int mask) {
return result;
}
class ShouldBeZeroOnReturnScope final {
class V8_NODISCARD ShouldBeZeroOnReturnScope final {
public:
#ifndef DEBUG
explicit ShouldBeZeroOnReturnScope(int*) {}

View File

@ -26,7 +26,7 @@ class VMState {
StateTag previous_tag_;
};
class ExternalCallbackScope {
class V8_NODISCARD ExternalCallbackScope {
public:
inline ExternalCallbackScope(Isolate* isolate, Address callback);
inline ~ExternalCallbackScope();

View File

@ -195,7 +195,7 @@ inline std::ostream& operator<<(std::ostream& os, Handle<T> handle);
// garbage collector will no longer track the object stored in the
// handle and may deallocate it. The behavior of accessing a handle
// for which the handle scope has been deleted is undefined.
class HandleScope {
class V8_NODISCARD HandleScope {
public:
explicit inline HandleScope(Isolate* isolate);
inline HandleScope(HandleScope&& other) V8_NOEXCEPT;
@ -282,7 +282,7 @@ using CanonicalHandlesMap = IdentityMap<Address*, ZoneAllocationPolicy>;
// This does not apply to nested inner HandleScopes unless a nested
// CanonicalHandleScope is introduced. Handles are only canonicalized within
// the same CanonicalHandleScope, but not across nested ones.
class V8_EXPORT_PRIVATE CanonicalHandleScope final {
class V8_EXPORT_PRIVATE V8_NODISCARD CanonicalHandleScope final {
public:
// If we passed a compilation info as parameter, we created the
// CanonicalHandlesMap on said compilation info's zone(). If so, in the
@ -315,7 +315,7 @@ class V8_EXPORT_PRIVATE CanonicalHandleScope final {
// Seal off the current HandleScope so that new handles can only be created
// if a new HandleScope is entered.
class SealHandleScope final {
class V8_NODISCARD SealHandleScope final {
public:
#ifndef DEBUG
explicit SealHandleScope(Isolate* isolate) {}

View File

@ -41,7 +41,7 @@ class LocalHandles {
friend class LocalHandleScope;
};
class LocalHandleScope {
class V8_NODISCARD LocalHandleScope {
public:
explicit inline LocalHandleScope(LocalIsolate* local_isolate);
explicit inline LocalHandleScope(LocalHeap* local_heap);

View File

@ -102,7 +102,7 @@ class PersistentHandlesList {
// PersistentHandlesScope sets up a scope in which all created main thread
// handles become persistent handles that can be sent to another thread.
class PersistentHandlesScope {
class V8_NODISCARD PersistentHandlesScope {
public:
V8_EXPORT_PRIVATE explicit PersistentHandlesScope(Isolate* isolate);
V8_EXPORT_PRIVATE ~PersistentHandlesScope();

View File

@ -113,7 +113,7 @@ class AllocationObserver {
DISALLOW_COPY_AND_ASSIGN(AllocationObserver);
};
class V8_EXPORT_PRIVATE PauseAllocationObserversScope {
class V8_EXPORT_PRIVATE V8_NODISCARD PauseAllocationObserversScope {
public:
explicit PauseAllocationObserversScope(Heap* heap);
~PauseAllocationObserversScope();

View File

@ -44,7 +44,7 @@ class V8_EXPORT_PRIVATE ConcurrentMarking {
// When the scope is entered, the concurrent marking tasks
// are preempted and are not looking at the heap objects, concurrent marking
// is resumed when the scope is exited.
class PauseScope {
class V8_NODISCARD PauseScope {
public:
explicit PauseScope(ConcurrentMarking* concurrent_marking);
~PauseScope();

View File

@ -55,7 +55,7 @@ class V8_EXPORT_PRIVATE HeapBase : public cppgc::HeapHandle {
// NoGCScope allows going over limits and avoids triggering garbage
// collection triggered through allocations or even explicitly.
class V8_EXPORT_PRIVATE NoGCScope final {
class V8_EXPORT_PRIVATE V8_NODISCARD NoGCScope final {
CPPGC_STACK_ALLOCATED();
public:

View File

@ -33,7 +33,7 @@ class V8_EXPORT_PRIVATE ObjectAllocator final : public cppgc::AllocationHandle {
public:
// NoAllocationScope is used in debug mode to catch unwanted allocations. E.g.
// allocations during GC.
class V8_EXPORT_PRIVATE NoAllocationScope final {
class V8_EXPORT_PRIVATE V8_NODISCARD NoAllocationScope final {
CPPGC_STACK_ALLOCATED();
public:

View File

@ -135,7 +135,7 @@ class V8_EXPORT_PRIVATE StatsCollector final {
// Trace a particular scope. Will emit a trace event and record the time in
// the corresponding StatsCollector.
template <TraceCategory trace_category, ScopeContext scope_category>
class InternalScope {
class V8_NODISCARD InternalScope {
using ScopeIdType = std::conditional_t<scope_category == kMutatorThread,
ScopeId, ConcurrentScopeId>;

View File

@ -37,7 +37,7 @@ class V8_EXPORT_PRIVATE LocalEmbedderHeapTracer final {
const WrapperInfo& raw_info;
};
class V8_EXPORT_PRIVATE ProcessingScope {
class V8_EXPORT_PRIVATE V8_NODISCARD ProcessingScope {
public:
explicit ProcessingScope(LocalEmbedderHeapTracer* tracer);
~ProcessingScope();
@ -150,7 +150,7 @@ class V8_EXPORT_PRIVATE LocalEmbedderHeapTracer final {
friend class EmbedderStackStateScope;
};
class V8_EXPORT_PRIVATE EmbedderStackStateScope final {
class V8_EXPORT_PRIVATE V8_NODISCARD EmbedderStackStateScope final {
public:
EmbedderStackStateScope(LocalEmbedderHeapTracer* local_tracer,
EmbedderHeapTracer::EmbedderStackState stack_state)

View File

@ -66,7 +66,7 @@ class V8_EXPORT_PRIVATE GCTracer {
int steps;
};
class Scope {
class V8_NODISCARD Scope {
public:
enum ScopeId {
#define DEFINE_SCOPE(scope) scope,
@ -104,7 +104,7 @@ class V8_EXPORT_PRIVATE GCTracer {
DISALLOW_COPY_AND_ASSIGN(Scope);
};
class V8_EXPORT_PRIVATE BackgroundScope {
class V8_EXPORT_PRIVATE V8_NODISCARD BackgroundScope {
public:
enum ScopeId {
#define DEFINE_SCOPE(scope) scope,

View File

@ -1212,7 +1212,7 @@ void Heap::GarbageCollectionEpilogue() {
last_gc_time_ = MonotonicallyIncreasingTimeInMs();
}
class GCCallbacksScope {
class V8_NODISCARD GCCallbacksScope {
public:
explicit GCCallbacksScope(Heap* heap) : heap_(heap) {
heap_->gc_callbacks_depth_++;

View File

@ -268,7 +268,7 @@ class Heap {
};
// Emits GC events for DevTools timeline.
class DevToolsTraceEventScope {
class V8_NODISCARD DevToolsTraceEventScope {
public:
DevToolsTraceEventScope(Heap* heap, const char* event_name,
const char* event_type);
@ -2412,7 +2412,7 @@ class HeapStats {
// Disables GC for all allocations. It should not be used
// outside heap, deserializer, and isolate bootstrap.
// Use AlwaysAllocateScopeForTesting in tests.
class AlwaysAllocateScope {
class V8_NODISCARD AlwaysAllocateScope {
public:
inline ~AlwaysAllocateScope();
@ -2428,7 +2428,7 @@ class AlwaysAllocateScope {
Heap* heap_;
};
class AlwaysAllocateScopeForTesting {
class V8_NODISCARD AlwaysAllocateScopeForTesting {
public:
explicit inline AlwaysAllocateScopeForTesting(Heap* heap);
@ -2437,7 +2437,7 @@ class AlwaysAllocateScopeForTesting {
};
// The CodeSpaceMemoryModificationScope can only be used by the main thread.
class CodeSpaceMemoryModificationScope {
class V8_NODISCARD CodeSpaceMemoryModificationScope {
public:
explicit inline CodeSpaceMemoryModificationScope(Heap* heap);
inline ~CodeSpaceMemoryModificationScope();
@ -2449,7 +2449,7 @@ class CodeSpaceMemoryModificationScope {
// The CodePageCollectionMemoryModificationScope can only be used by the main
// thread. It will not be enabled if a CodeSpaceMemoryModificationScope is
// already active.
class CodePageCollectionMemoryModificationScope {
class V8_NODISCARD CodePageCollectionMemoryModificationScope {
public:
explicit inline CodePageCollectionMemoryModificationScope(Heap* heap);
inline ~CodePageCollectionMemoryModificationScope();
@ -2461,7 +2461,7 @@ class CodePageCollectionMemoryModificationScope {
// The CodePageMemoryModificationScope does not check if tansitions to
// writeable and back to executable are actually allowed, i.e. the MemoryChunk
// was registered to be executable. It can be used by concurrent threads.
class CodePageMemoryModificationScope {
class V8_NODISCARD CodePageMemoryModificationScope {
public:
explicit inline CodePageMemoryModificationScope(BasicMemoryChunk* chunk);
explicit inline CodePageMemoryModificationScope(Code object);

View File

@ -39,7 +39,7 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
using AtomicMarkingState = MarkCompactCollector::AtomicMarkingState;
using NonAtomicMarkingState = MarkCompactCollector::NonAtomicMarkingState;
class PauseBlackAllocationScope {
class V8_NODISCARD PauseBlackAllocationScope {
public:
explicit PauseBlackAllocationScope(IncrementalMarking* marking)
: marking_(marking), paused_(false) {

View File

@ -361,7 +361,7 @@ class MainMarkingVisitor final
MarkingState> {
public:
// This is used for revisiting objects that were black allocated.
class RevisitScope {
class V8_NODISCARD RevisitScope {
public:
explicit RevisitScope(MainMarkingVisitor* visitor) : visitor_(visitor) {
DCHECK(!visitor->revisiting_object_);
@ -788,7 +788,7 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
friend class RecordMigratedSlotVisitor;
};
class EvacuationScope {
class V8_NODISCARD EvacuationScope {
public:
explicit EvacuationScope(MarkCompactCollector* collector)
: collector_(collector) {

View File

@ -14,7 +14,7 @@ namespace internal {
// Scope that explicitly parks a thread, prohibiting access to the heap and the
// creation of handles.
class ParkedScope {
class V8_NODISCARD ParkedScope {
public:
explicit ParkedScope(LocalIsolate* local_isolate)
: ParkedScope(local_isolate->heap()) {}
@ -30,7 +30,7 @@ class ParkedScope {
// Scope that explicitly unparks a thread, allowing access to the heap and the
// creation of handles.
class UnparkedScope {
class V8_NODISCARD UnparkedScope {
public:
explicit UnparkedScope(LocalIsolate* local_isolate)
: UnparkedScope(local_isolate->heap()) {}

View File

@ -106,7 +106,7 @@ class GlobalSafepoint {
friend class PersistentHandles;
};
class SafepointScope {
class V8_NODISCARD SafepointScope {
public:
V8_EXPORT_PRIVATE explicit SafepointScope(Heap* heap);
V8_EXPORT_PRIVATE ~SafepointScope();

View File

@ -237,7 +237,7 @@ ScavengerCollector::ScavengerCollector(Heap* heap)
: isolate_(heap->isolate()), heap_(heap) {}
// Remove this crashkey after chromium:1010312 is fixed.
class ScopedFullHeapCrashKey {
class V8_NODISCARD ScopedFullHeapCrashKey {
public:
explicit ScopedFullHeapCrashKey(Isolate* isolate) : isolate_(isolate) {
isolate_->AddCrashKey(v8::CrashKeyId::kDumpType, "heap");

View File

@ -32,7 +32,7 @@ class Sweeper {
using FreeRangesMap = std::map<uint32_t, uint32_t>;
// Pauses the sweeper tasks or completes sweeping.
class PauseOrCompleteScope final {
class V8_NODISCARD PauseOrCompleteScope final {
public:
explicit PauseOrCompleteScope(Sweeper* sweeper);
~PauseOrCompleteScope();
@ -45,7 +45,7 @@ class Sweeper {
// sweeper to be paused. Allows for pages to be added to the sweeper while
// in this scope. Note that the original list of sweeping pages is restored
// after exiting this scope.
class FilterSweepingPagesScope final {
class V8_NODISCARD FilterSweepingPagesScope final {
public:
FilterSweepingPagesScope(
Sweeper* sweeper, const PauseOrCompleteScope& pause_or_complete_scope);

View File

@ -24,7 +24,7 @@ class V8Console : public v8::debug::ConsoleDelegate {
void installMemoryGetter(v8::Local<v8::Context> context,
v8::Local<v8::Object> console);
class CommandLineAPIScope {
class V8_NODISCARD CommandLineAPIScope {
public:
CommandLineAPIScope(v8::Local<v8::Context>,
v8::Local<v8::Object> commandLineAPI,

View File

@ -34,7 +34,7 @@ namespace interpreter {
// Scoped class tracking context objects created by the visitor. Represents
// mutations of the context chain within the function body, allowing pushing and
// popping of the current {context_register} during visitation.
class BytecodeGenerator::ContextScope {
class V8_NODISCARD BytecodeGenerator::ContextScope {
public:
ContextScope(BytecodeGenerator* generator, Scope* scope)
: generator_(generator),
@ -98,8 +98,8 @@ class BytecodeGenerator::ContextScope {
};
// Scoped class for tracking control statements entered by the
// visitor. The pattern derives AstGraphBuilder::ControlScope.
class BytecodeGenerator::ControlScope {
// visitor.
class V8_NODISCARD BytecodeGenerator::ControlScope {
public:
explicit ControlScope(BytecodeGenerator* generator)
: generator_(generator),
@ -107,7 +107,7 @@ class BytecodeGenerator::ControlScope {
context_(generator->execution_context()) {
generator_->set_execution_control(this);
}
virtual ~ControlScope() { generator_->set_execution_control(outer()); }
~ControlScope() { generator_->set_execution_control(outer()); }
ControlScope(const ControlScope&) = delete;
ControlScope& operator=(const ControlScope&) = delete;
@ -162,7 +162,7 @@ class BytecodeGenerator::ControlScope {
// control-flow commands that cause entry into a finally-block, and re-apply
// them after again leaving that block. Special tokens are used to identify
// paths going through the finally-block to dispatch after leaving the block.
class BytecodeGenerator::ControlScope::DeferredCommands final {
class V8_NODISCARD BytecodeGenerator::ControlScope::DeferredCommands final {
public:
// Fixed value tokens for paths we know we need.
// Fallthrough is set to -1 to make it the fallthrough case of the jump table,
@ -550,7 +550,7 @@ void BytecodeGenerator::ControlScope::PopContextToExpectedDepth() {
}
}
class BytecodeGenerator::RegisterAllocationScope final {
class V8_NODISCARD BytecodeGenerator::RegisterAllocationScope final {
public:
explicit RegisterAllocationScope(BytecodeGenerator* generator)
: generator_(generator),
@ -572,7 +572,7 @@ class BytecodeGenerator::RegisterAllocationScope final {
int outer_next_register_index_;
};
class BytecodeGenerator::AccumulatorPreservingScope final {
class V8_NODISCARD BytecodeGenerator::AccumulatorPreservingScope final {
public:
explicit AccumulatorPreservingScope(BytecodeGenerator* generator,
AccumulatorPreservingMode mode)
@ -603,7 +603,7 @@ class BytecodeGenerator::AccumulatorPreservingScope final {
// Scoped base class for determining how the result of an expression will be
// used.
class BytecodeGenerator::ExpressionResultScope {
class V8_NODISCARD BytecodeGenerator::ExpressionResultScope {
public:
ExpressionResultScope(BytecodeGenerator* generator, Expression::Context kind)
: outer_(generator->execution_result()),
@ -660,7 +660,8 @@ class BytecodeGenerator::EffectResultScope final
// Scoped class used when the result of the current expression to be
// evaluated should go into the interpreter's accumulator.
class BytecodeGenerator::ValueResultScope final : public ExpressionResultScope {
class V8_NODISCARD BytecodeGenerator::ValueResultScope final
: public ExpressionResultScope {
public:
explicit ValueResultScope(BytecodeGenerator* generator)
: ExpressionResultScope(generator, Expression::kValue) {}
@ -668,7 +669,8 @@ class BytecodeGenerator::ValueResultScope final : public ExpressionResultScope {
// Scoped class used when the result of the current expression to be
// evaluated is only tested with jumps to two branches.
class BytecodeGenerator::TestResultScope final : public ExpressionResultScope {
class V8_NODISCARD BytecodeGenerator::TestResultScope final
: public ExpressionResultScope {
public:
TestResultScope(BytecodeGenerator* generator, BytecodeLabels* then_labels,
BytecodeLabels* else_labels, TestFallthrough fallthrough)
@ -837,7 +839,7 @@ class BytecodeGenerator::TopLevelDeclarationsBuilder final : public ZoneObject {
bool processed_ = false;
};
class BytecodeGenerator::CurrentScope final {
class V8_NODISCARD BytecodeGenerator::CurrentScope final {
public:
CurrentScope(BytecodeGenerator* generator, Scope* scope)
: generator_(generator), outer_scope_(generator->current_scope()) {
@ -941,7 +943,7 @@ class BytecodeGenerator::IteratorRecord final {
Register next_;
};
class BytecodeGenerator::OptionalChainNullLabelScope final {
class V8_NODISCARD BytecodeGenerator::OptionalChainNullLabelScope final {
public:
explicit OptionalChainNullLabelScope(BytecodeGenerator* bytecode_generator)
: bytecode_generator_(bytecode_generator),
@ -966,7 +968,7 @@ class BytecodeGenerator::OptionalChainNullLabelScope final {
// It should be constructed iff a (conceptual) back edge should be produced. In
// the case of creating a LoopBuilder but never emitting the loop, it is valid
// to skip the creation of LoopScope.
class BytecodeGenerator::LoopScope final {
class V8_NODISCARD BytecodeGenerator::LoopScope final {
public:
explicit LoopScope(BytecodeGenerator* bytecode_generator, LoopBuilder* loop)
: bytecode_generator_(bytecode_generator),
@ -1103,7 +1105,7 @@ struct NullContextScopeHelper<Isolate> {
template <>
struct NullContextScopeHelper<LocalIsolate> {
class DummyNullContextScope {
class V8_NODISCARD DummyNullContextScope {
public:
explicit DummyNullContextScope(LocalIsolate*) {}
};

View File

@ -20,7 +20,7 @@ class V8_PLATFORM_EXPORT DefaultForegroundTaskRunner
: public NON_EXPORTED_BASE(TaskRunner) {
public:
using TimeFunction = double (*)();
class RunTaskScope {
class V8_NODISCARD RunTaskScope {
public:
explicit RunTaskScope(
std::shared_ptr<DefaultForegroundTaskRunner> task_runner);

View File

@ -294,7 +294,7 @@ class TimedHistogram : public Histogram {
};
// Helper class for scoping a TimedHistogram.
class TimedHistogramScope {
class V8_NODISCARD TimedHistogramScope {
public:
explicit TimedHistogramScope(TimedHistogram* histogram,
Isolate* isolate = nullptr)
@ -316,7 +316,7 @@ enum class OptionalTimedHistogramScopeMode { TAKE_TIME, DONT_TAKE_TIME };
// Helper class for scoping a TimedHistogram.
// It will not take time for mode = DONT_TAKE_TIME.
class OptionalTimedHistogramScope {
class V8_NODISCARD OptionalTimedHistogramScope {
public:
OptionalTimedHistogramScope(TimedHistogram* histogram, Isolate* isolate,
OptionalTimedHistogramScopeMode mode)
@ -376,7 +376,7 @@ class AsyncTimedHistogram {
// correctly even if Start() was not called. This happens to be true iff Stop()
// is passed a null isolate, but that's an implementation detail of
// TimedHistogram, and we shouldn't rely on it.
class LazyTimedHistogramScope {
class V8_NODISCARD LazyTimedHistogramScope {
public:
LazyTimedHistogramScope() : histogram_(nullptr) { timer_.Start(); }
~LazyTimedHistogramScope() {
@ -427,7 +427,7 @@ class HistogramTimer : public TimedHistogram {
// Parser is currently reentrant (when it throws an error, we call back
// into JavaScript and all bets are off), but ElapsedTimer is not
// reentry-safe. Fix this properly and remove |allow_nesting|.
class HistogramTimerScope {
class V8_NODISCARD HistogramTimerScope {
public:
explicit HistogramTimerScope(HistogramTimer* timer,
bool allow_nesting = false)
@ -503,7 +503,7 @@ class AggregatableHistogramTimer : public Histogram {
// A helper class for use with AggregatableHistogramTimer. This is the
// // outer-most timer scope used with an AggregatableHistogramTimer. It will
// // aggregate the information from the inner AggregatedHistogramTimerScope.
class AggregatingHistogramTimerScope {
class V8_NODISCARD AggregatingHistogramTimerScope {
public:
explicit AggregatingHistogramTimerScope(AggregatableHistogramTimer* histogram)
: histogram_(histogram) {
@ -517,7 +517,7 @@ class AggregatingHistogramTimerScope {
// A helper class for use with AggregatableHistogramTimer, the "inner" scope
// // which defines the events to be timed.
class AggregatedHistogramTimerScope {
class V8_NODISCARD AggregatedHistogramTimerScope {
public:
explicit AggregatedHistogramTimerScope(AggregatableHistogramTimer* histogram)
: histogram_(histogram) {
@ -1235,7 +1235,7 @@ class WorkerThreadRuntimeCallStats final {
// Creating a WorkerThreadRuntimeCallStatsScope will provide a thread-local
// runtime call stats table, and will dump the table to an immediate trace event
// when it is destroyed.
class WorkerThreadRuntimeCallStatsScope final {
class V8_NODISCARD WorkerThreadRuntimeCallStatsScope final {
public:
explicit WorkerThreadRuntimeCallStatsScope(
WorkerThreadRuntimeCallStats* off_thread_stats);
@ -1262,7 +1262,7 @@ class WorkerThreadRuntimeCallStatsScope final {
// A RuntimeCallTimerScopes wraps around a RuntimeCallTimer to measure the
// the time of C++ scope.
class RuntimeCallTimerScope {
class V8_NODISCARD RuntimeCallTimerScope {
public:
inline RuntimeCallTimerScope(Isolate* isolate,
RuntimeCallCounterId counter_id);

View File

@ -372,7 +372,7 @@ TIMER_EVENTS_LIST(V)
#undef V
template <class TimerEvent>
class TimerEventScope {
class V8_NODISCARD TimerEventScope {
public:
explicit TimerEventScope(Isolate* isolate) : isolate_(isolate) {
LogTimerEvent(Logger::START);

View File

@ -83,7 +83,7 @@ class Recorder : public std::enable_shared_from_this<Recorder> {
template <class T, int64_t (base::TimeDelta::*precision)() const =
&base::TimeDelta::InMicroseconds>
class TimedScope {
class V8_NODISCARD TimedScope {
public:
explicit TimedScope(T* event) : event_(event) { Start(); }
~TimedScope() { Stop(); }

View File

@ -673,7 +673,7 @@ struct SourceCodeOf {
// IsCompiledScope enables a caller to check if a function is compiled, and
// ensure it remains compiled (i.e., doesn't have it's bytecode flushed) while
// the scope is retained.
class IsCompiledScope {
class V8_NODISCARD IsCompiledScope {
public:
inline IsCompiledScope(const SharedFunctionInfo shared, Isolate* isolate);
inline IsCompiledScope(const SharedFunctionInfo shared,

View File

@ -78,7 +78,7 @@ struct FormalParametersBase {
};
// Stack-allocated scope to collect source ranges from the parser.
class SourceRangeScope final {
class V8_NODISCARD SourceRangeScope final {
public:
SourceRangeScope(const Scanner* scanner, SourceRange* range)
: scanner_(scanner), range_(range) {
@ -463,7 +463,7 @@ class ParserBase {
return contains_function_or_eval_;
}
class FunctionOrEvalRecordingScope {
class V8_NODISCARD FunctionOrEvalRecordingScope {
public:
explicit FunctionOrEvalRecordingScope(FunctionState* state)
: state_and_prev_value_(state, state->contains_function_or_eval_) {
@ -481,7 +481,7 @@ class ParserBase {
PointerWithPayload<FunctionState, bool, 1> state_and_prev_value_;
};
class LoopScope final {
class V8_NODISCARD LoopScope final {
public:
explicit LoopScope(FunctionState* function_state)
: function_state_(function_state) {
@ -1455,7 +1455,7 @@ class ParserBase {
expression_scope_->has_possible_arrow_parameter_in_scope_chain();
}
class AcceptINScope final {
class V8_NODISCARD AcceptINScope final {
public:
AcceptINScope(ParserBase* parser, bool accept_IN)
: parser_(parser), previous_accept_IN_(parser->accept_IN_) {
@ -1469,7 +1469,7 @@ class ParserBase {
bool previous_accept_IN_;
};
class ParameterParsingScope {
class V8_NODISCARD ParameterParsingScope {
public:
ParameterParsingScope(Impl* parser, FormalParametersT* parameters)
: parser_(parser), parent_parameters_(parser_->parameters_) {
@ -1483,7 +1483,7 @@ class ParserBase {
FormalParametersT* parent_parameters_;
};
class FunctionParsingScope {
class V8_NODISCARD FunctionParsingScope {
public:
explicit FunctionParsingScope(Impl* parser)
: parser_(parser), expression_scope_(parser_->expression_scope_) {

View File

@ -185,7 +185,7 @@ class V8_EXPORT_PRIVATE Parser : public NON_EXPORTED_BASE(ParserBase<Parser>) {
bool parse_lazily() const { return mode_ == PARSE_LAZILY; }
enum Mode { PARSE_LAZILY, PARSE_EAGERLY };
class ParsingModeScope {
class V8_NODISCARD ParsingModeScope {
public:
ParsingModeScope(Parser* parser, Mode mode)
: parser_(parser), old_mode_(parser->mode_) {

View File

@ -39,7 +39,7 @@ class BaseConsumedPreparseData : public ConsumedPreparseData {
// Reading from the ByteData is only allowed when a ReadingScope is on the
// stack. This ensures that we have a DisallowGarbageCollection in place
// whenever ByteData holds a raw pointer into the heap.
class ReadingScope {
class V8_NODISCARD ReadingScope {
public:
ReadingScope(ByteData* consumed_data, Data data)
: consumed_data_(consumed_data) {

View File

@ -109,7 +109,7 @@ class V8_EXPORT_PRIVATE PreparseDataBuilder : public ZoneObject,
// For gathering the inner function data and splitting it up according to the
// laziness boundaries. Each lazy function gets its own
// ProducedPreparseData, and so do all lazy functions inside it.
class DataGatheringScope {
class V8_NODISCARD DataGatheringScope {
public:
explicit DataGatheringScope(PreParser* preparser)
: preparser_(preparser), builder_(nullptr) {}

View File

@ -69,7 +69,7 @@ class Processor final : public AstVisitor<Processor> {
// [replacement_]. In many cases this will just be the original node.
Statement* replacement_;
class BreakableScope final {
class V8_NODISCARD BreakableScope final {
public:
explicit BreakableScope(Processor* processor, bool breakable = true)
: processor_(processor), previous_(processor->breakable_) {

View File

@ -18,7 +18,7 @@
namespace v8 {
namespace internal {
class ScopedExternalStringLock {
class V8_NODISCARD ScopedExternalStringLock {
public:
explicit ScopedExternalStringLock(ExternalString string) {
DCHECK(!string.is_null());

View File

@ -210,7 +210,7 @@ class Utf16CharacterStream {
class V8_EXPORT_PRIVATE Scanner {
public:
// Scoped helper for a re-settable bookmark.
class V8_EXPORT_PRIVATE BookmarkScope {
class V8_EXPORT_PRIVATE V8_NODISCARD BookmarkScope {
public:
explicit BookmarkScope(Scanner* scanner)
: scanner_(scanner),

View File

@ -129,7 +129,7 @@ class CodeEventsContainer {
// Maintains the number of active CPU profilers in an isolate, and routes
// logging to a given ProfilerListener.
class ProfilingScope {
class V8_NODISCARD ProfilingScope {
public:
ProfilingScope(Isolate* isolate, ProfilerListener* listener);
~ProfilingScope();

View File

@ -2080,7 +2080,7 @@ HeapSnapshotGenerator::HeapSnapshotGenerator(
}
namespace {
class NullContextForSnapshotScope {
class V8_NODISCARD NullContextForSnapshotScope {
public:
explicit NullContextForSnapshotScope(Isolate* isolate)
: isolate_(isolate), prev_(isolate->context()) {

View File

@ -19,7 +19,7 @@ class RegExpStack;
// Since there is only one stack area, the Irregexp implementation is not
// re-entrant. I.e., no regular expressions may be executed in the same thread
// during a preempted Irregexp execution.
class RegExpStackScope {
class V8_NODISCARD RegExpStackScope {
public:
// Create and delete an instance to control the life-time of a growing stack.
@ -35,7 +35,6 @@ class RegExpStackScope {
RegExpStack* regexp_stack_;
};
class RegExpStack {
public:
RegExpStack();

View File

@ -64,7 +64,7 @@ Context GetNativeContextFromWasmInstanceOnStackTop(Isolate* isolate) {
return GetWasmInstanceOnStackTop(isolate).native_context();
}
class ClearThreadInWasmScope {
class V8_NODISCARD ClearThreadInWasmScope {
public:
ClearThreadInWasmScope() {
DCHECK_IMPLIES(trap_handler::IsTrapHandlerEnabled(),

View File

@ -20,7 +20,7 @@ namespace {
// During serialization, puts the native context into a state understood by the
// serializer (e.g. by clearing lists of Code objects). After serialization,
// the original state is restored.
class SanitizeNativeContextScope final {
class V8_NODISCARD SanitizeNativeContextScope final {
public:
SanitizeNativeContextScope(Isolate* isolate, NativeContext native_context,
bool allow_active_isolate_for_testing,

View File

@ -608,7 +608,7 @@ void Serializer::ObjectSerializer::SerializeExternalStringAsSequentialString() {
// Clear and later restore the next link in the weak cell or allocation site.
// TODO(all): replace this with proper iteration of weak slots in serializer.
class UnlinkWeakNextScope {
class V8_NODISCARD UnlinkWeakNextScope {
public:
explicit UnlinkWeakNextScope(Heap* heap, Handle<HeapObject> object) {
if (object->IsAllocationSite() &&

View File

@ -178,7 +178,7 @@ class Serializer : public SerializerDeserializer {
using PendingObjectReferences = std::vector<int>*;
class ObjectSerializer;
class RecursionScope {
class V8_NODISCARD RecursionScope {
public:
explicit RecursionScope(Serializer* serializer) : serializer_(serializer) {
serializer_->recursion_depth_++;

View File

@ -23,7 +23,7 @@ namespace {
// The isolate roots may not point at context-specific objects during
// serialization.
class SanitizeIsolateScope final {
class V8_NODISCARD SanitizeIsolateScope final {
public:
SanitizeIsolateScope(Isolate* isolate, bool allow_active_isolate_for_testing,
const DisallowGarbageCollection& no_gc)

View File

@ -212,7 +212,7 @@ class CfgAssembler {
Block* current_block_ = cfg_.start();
};
class CfgAssemblerScopedTemporaryBlock {
class V8_NODISCARD CfgAssemblerScopedTemporaryBlock {
public:
CfgAssemblerScopedTemporaryBlock(CfgAssembler* assembler, Block* block)
: assembler_(assembler), saved_block_(block) {

View File

@ -35,7 +35,7 @@ class ContextualVariable {
// variable is restored to the state before the {Scope} was created. Scopes
// have to follow a stack discipline: A {Scope} has to be destructed before
// any older scope is destructed.
class Scope {
class V8_NODISCARD Scope {
public:
template <class... Args>
explicit Scope(Args&&... args)

View File

@ -599,7 +599,7 @@ class ImplementationVisitor {
// // ... create temporary slots ...
// result = stack_scope.Yield(surviving_slots);
// }
class StackScope {
class V8_NODISCARD StackScope {
public:
explicit StackScope(ImplementationVisitor* visitor) : visitor_(visitor) {
base_ = visitor_->assembler().CurrentStack().AboveTop();

View File

@ -365,7 +365,7 @@ inline bool StringEndsWith(const std::string& s, const std::string& suffix) {
return s.substr(s.size() - suffix.size()) == suffix;
}
class IfDefScope {
class V8_NODISCARD IfDefScope {
public:
IfDefScope(std::ostream& os, std::string d);
~IfDefScope();
@ -377,7 +377,7 @@ class IfDefScope {
std::string d_;
};
class NamespaceScope {
class V8_NODISCARD NamespaceScope {
public:
NamespaceScope(std::ostream& os,
std::initializer_list<std::string> namespaces);
@ -390,7 +390,7 @@ class NamespaceScope {
std::vector<std::string> d_;
};
class IncludeGuardScope {
class V8_NODISCARD IncludeGuardScope {
public:
IncludeGuardScope(std::ostream& os, std::string file_name);
~IncludeGuardScope();
@ -402,7 +402,7 @@ class IncludeGuardScope {
std::string d_;
};
class IncludeObjectMacrosScope {
class V8_NODISCARD IncludeObjectMacrosScope {
public:
explicit IncludeObjectMacrosScope(std::ostream& os);
~IncludeObjectMacrosScope();

View File

@ -184,7 +184,7 @@ class IdentityMap : public IdentityMapBase {
friend class IdentityMap;
};
class IteratableScope {
class V8_NODISCARD IteratableScope {
public:
explicit IteratableScope(IdentityMap* map) : map_(map) {
CHECK(!map_->is_iterable());

View File

@ -24,7 +24,7 @@ class ZoneList;
// add any entries if there is a ScopedList with the same backing in an inner
// scope.
template <typename T, typename TBacking = T>
class ScopedList final {
class V8_NODISCARD ScopedList final {
// The backing can either be the same type as the list type, or, for pointers,
// we additionally allow a void* backing store.
static_assert((std::is_same<TBacking, T>::value) ||

View File

@ -15,7 +15,7 @@
namespace v8 {
namespace internal {
class TimedScope {
class V8_NODISCARD TimedScope {
public:
explicit TimedScope(double* result)
: start_(TimestampMs()), result_(result) {}

View File

@ -141,7 +141,7 @@ class Vector {
};
template <typename T>
class ScopedVector : public Vector<T> {
class V8_NODISCARD ScopedVector : public Vector<T> {
public:
explicit ScopedVector(size_t length)
: Vector<T>(NewArray<T>(length), length) {}

View File

@ -28,7 +28,7 @@ inline void SwitchMemoryPermissionsToExecutable() {
namespace wasm {
class CodeSpaceWriteScope {
class V8_NODISCARD CodeSpaceWriteScope {
public:
// TODO(jkummerow): Background threads could permanently stay in
// writable mode; only the main thread has to switch back and forth.

View File

@ -81,7 +81,7 @@ enum class CompileStrategy : uint8_t {
class CompilationStateImpl;
class BackgroundCompileScope {
class V8_NODISCARD BackgroundCompileScope {
public:
explicit BackgroundCompileScope(std::weak_ptr<NativeModule> native_module)
: native_module_(native_module.lock()) {}

View File

@ -887,7 +887,7 @@ class V8_EXPORT_PRIVATE WasmCodeManager final {
// and even if we did, the resulting set of pages may be fragmented.
// Currently, we try and keep the number of syscalls low.
// - similar argument for debug time.
class NativeModuleModificationScope final {
class V8_NODISCARD NativeModuleModificationScope final {
public:
explicit NativeModuleModificationScope(NativeModule* native_module);
~NativeModuleModificationScope();
@ -899,7 +899,7 @@ class NativeModuleModificationScope final {
// {WasmCodeRefScope}s form a perfect stack. New {WasmCode} pointers generated
// by e.g. creating new code or looking up code by its address are added to the
// top-most {WasmCodeRefScope}.
class V8_EXPORT_PRIVATE WasmCodeRefScope {
class V8_EXPORT_PRIVATE V8_NODISCARD WasmCodeRefScope {
public:
WasmCodeRefScope();
WasmCodeRefScope(const WasmCodeRefScope&) = delete;

View File

@ -416,7 +416,7 @@ void f32x4_nearest_int_wrapper(Address data) {
}
namespace {
class ThreadNotInWasmScope {
class V8_NODISCARD ThreadNotInWasmScope {
// Asan on Windows triggers exceptions to allocate shadow memory lazily. When
// this function is called from WebAssembly, these exceptions would be handled
// by the trap handler before they get handled by Asan, and thereby confuse the

View File

@ -51,7 +51,7 @@ class WasmImportWrapperCache {
};
// Helper class to modify the cache under a lock.
class ModificationScope {
class V8_NODISCARD ModificationScope {
public:
explicit ModificationScope(WasmImportWrapperCache* cache)
: cache_(cache), guard_(&cache->mutex_) {}

View File

@ -235,7 +235,7 @@ void LocalContext::Initialize(v8::Isolate* isolate,
// This indirection is needed because HandleScopes cannot be heap-allocated, and
// we don't want any unnecessary #includes in cctest.h.
class InitializedHandleScopeImpl {
class V8_NODISCARD InitializedHandleScopeImpl {
public:
explicit InitializedHandleScopeImpl(i::Isolate* isolate)
: handle_scope_(isolate) {}

View File

@ -598,7 +598,7 @@ static inline void EmptyMessageQueues(v8::Isolate* isolate) {
class InitializedHandleScopeImpl;
class InitializedHandleScope {
class V8_NODISCARD InitializedHandleScope {
public:
InitializedHandleScope();
~InitializedHandleScope();
@ -611,7 +611,7 @@ class InitializedHandleScope {
std::unique_ptr<InitializedHandleScopeImpl> initialized_handle_scope_impl_;
};
class HandleAndZoneScope : public InitializedHandleScope {
class V8_NODISCARD HandleAndZoneScope : public InitializedHandleScope {
public:
explicit HandleAndZoneScope(bool support_zone_compression = false);
~HandleAndZoneScope();
@ -638,7 +638,7 @@ class StaticOneByteResource : public v8::String::ExternalOneByteStringResource {
const char* data_;
};
class ManualGCScope {
class V8_NODISCARD ManualGCScope {
public:
ManualGCScope()
: flag_concurrent_marking_(i::FLAG_concurrent_marking),

View File

@ -209,7 +209,6 @@ class MoveInterpreter : public GapResolver::Assembler {
InterpreterState state_;
};
class ParallelMoveCreator : public HandleAndZoneScope {
public:
ParallelMoveCreator() : rng_(CcTest::random_number_generator()) {}

View File

@ -12,7 +12,7 @@ namespace v8 {
namespace internal {
namespace heap {
class TemporaryEmbedderHeapTracerScope {
class V8_NODISCARD TemporaryEmbedderHeapTracerScope {
public:
TemporaryEmbedderHeapTracerScope(v8::Isolate* isolate,
v8::EmbedderHeapTracer* tracer)

View File

@ -50,7 +50,7 @@ namespace internal {
namespace heap {
// Temporarily sets a given allocator in an isolate.
class TestMemoryAllocatorScope {
class V8_NODISCARD TestMemoryAllocatorScope {
public:
TestMemoryAllocatorScope(Isolate* isolate, size_t max_capacity,
size_t code_range_size,
@ -85,7 +85,7 @@ class TestMemoryAllocatorScope {
};
// Temporarily sets a given code page allocator in an isolate.
class TestCodePageAllocatorScope {
class V8_NODISCARD TestCodePageAllocatorScope {
public:
TestCodePageAllocatorScope(Isolate* isolate,
v8::PageAllocator* code_page_allocator)
@ -799,7 +799,7 @@ namespace {
// cannot take an argument. Since these tests create ReadOnlySpaces not attached
// to the Heap directly, they need to be destroyed to ensure the
// MemoryAllocator's stats are all 0 at exit.
class ReadOnlySpaceScope {
class V8_NODISCARD ReadOnlySpaceScope {
public:
explicit ReadOnlySpaceScope(Heap* heap) : ro_space_(heap) {}
~ReadOnlySpaceScope() {

Some files were not shown because too many files have changed in this diff Show More