[zone-compr] Add CompressedZonePtr class and compress TurboFan graphs
This CL doesn't try to compress containers of Node* yet. Bug: v8:9923 Change-Id: Ica16b09b6f26321952b440d49a70f9a991d4275f Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2324258 Commit-Queue: Igor Sheludko (OOO Aug 3-17) <ishell@chromium.org> Reviewed-by: Tobias Tebbi <tebbi@chromium.org> Cr-Commit-Position: refs/heads/master@{#69168}
This commit is contained in:
parent
9f94090a91
commit
ee23306c7c
2
BUILD.gn
2
BUILD.gn
@ -2005,6 +2005,7 @@ v8_compiler_sources = [
|
||||
"src/compiler/graph-trimmer.h",
|
||||
"src/compiler/graph-visualizer.cc",
|
||||
"src/compiler/graph-visualizer.h",
|
||||
"src/compiler/graph-zone-traits.h",
|
||||
"src/compiler/graph.cc",
|
||||
"src/compiler/graph.h",
|
||||
"src/compiler/int64-lowering.cc",
|
||||
@ -3303,6 +3304,7 @@ v8_source_set("v8_base_without_compiler") {
|
||||
"src/wasm/wasm-value.h",
|
||||
"src/zone/accounting-allocator.cc",
|
||||
"src/zone/accounting-allocator.h",
|
||||
"src/zone/compressed-zone-ptr.h",
|
||||
"src/zone/type-stats.cc",
|
||||
"src/zone/type-stats.h",
|
||||
"src/zone/zone-allocator.h",
|
||||
|
29
src/compiler/graph-zone-traits.h
Normal file
29
src/compiler/graph-zone-traits.h
Normal file
@ -0,0 +1,29 @@
|
||||
// Copyright 2020 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
#ifndef V8_COMPILER_GRAPH_ZONE_TRAITS_H_
|
||||
#define V8_COMPILER_GRAPH_ZONE_TRAITS_H_
|
||||
|
||||
#include "src/zone/zone-type-traits.h"
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
namespace compiler {
|
||||
|
||||
// Forward declarations.
|
||||
class Node;
|
||||
|
||||
// GraphZoneTraits provides typedefs for zone pointer types that are either
|
||||
// compressed or not depending on the kCompressGraphZone flag.
|
||||
using GraphZoneTraits = ZoneTypeTraits<kCompressGraphZone>;
|
||||
|
||||
// ZoneNodePtr is a possibly compressed pointer to a Node allocated in a zone
|
||||
// memory.
|
||||
using ZoneNodePtr = GraphZoneTraits::Ptr<Node>;
|
||||
|
||||
} // namespace compiler
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
||||
#endif // V8_COMPILER_GRAPH_ZONE_TRAITS_H_
|
@ -22,8 +22,12 @@ Graph::Graph(Zone* zone)
|
||||
end_(nullptr),
|
||||
mark_max_(0),
|
||||
next_node_id_(0),
|
||||
decorators_(zone) {}
|
||||
|
||||
decorators_(zone) {
|
||||
// Nodes use compressed pointers, so zone must support pointer compression.
|
||||
// If the check fails, ensure the zone is created with kCompressGraphZone
|
||||
// flag.
|
||||
CHECK_IMPLIES(kCompressGraphZone, zone->supports_compression());
|
||||
}
|
||||
|
||||
void Graph::Decorate(Node* node) {
|
||||
for (GraphDecorator* const decorator : decorators_) {
|
||||
|
@ -20,14 +20,13 @@ Node::OutOfLineInputs* Node::OutOfLineInputs::New(Zone* zone, int capacity) {
|
||||
return outline;
|
||||
}
|
||||
|
||||
|
||||
void Node::OutOfLineInputs::ExtractFrom(Use* old_use_ptr, Node** old_input_ptr,
|
||||
int count) {
|
||||
void Node::OutOfLineInputs::ExtractFrom(Use* old_use_ptr,
|
||||
ZoneNodePtr* old_input_ptr, int count) {
|
||||
DCHECK_GE(count, 0);
|
||||
// Extract the inputs from the old use and input pointers and copy them
|
||||
// to this out-of-line-storage.
|
||||
Use* new_use_ptr = reinterpret_cast<Use*>(this) - 1;
|
||||
Node** new_input_ptr = inputs();
|
||||
ZoneNodePtr* new_input_ptr = inputs();
|
||||
CHECK_IMPLIES(count > 0, Use::InputIndexField::is_valid(count - 1));
|
||||
for (int current = 0; current < count; current++) {
|
||||
new_use_ptr->bit_field_ =
|
||||
@ -55,11 +54,14 @@ void Node::OutOfLineInputs::ExtractFrom(Use* old_use_ptr, Node** old_input_ptr,
|
||||
struct NodeWithOutOfLineInputs {};
|
||||
struct NodeWithInLineInputs {};
|
||||
|
||||
Node* Node::New(Zone* zone, NodeId id, const Operator* op, int input_count,
|
||||
Node* const* inputs, bool has_extensible_inputs) {
|
||||
template <typename NodePtrT>
|
||||
Node* Node::NewImpl(Zone* zone, NodeId id, const Operator* op, int input_count,
|
||||
NodePtrT const* inputs, bool has_extensible_inputs) {
|
||||
// Node uses compressed pointers, so zone must support pointer compression.
|
||||
DCHECK_IMPLIES(kCompressGraphZone, zone->supports_compression());
|
||||
DCHECK_GE(input_count, 0);
|
||||
|
||||
Node** input_ptr;
|
||||
ZoneNodePtr* input_ptr;
|
||||
Use* use_ptr;
|
||||
Node* node;
|
||||
bool is_inline;
|
||||
@ -80,7 +82,7 @@ Node* Node::New(Zone* zone, NodeId id, const Operator* op, int input_count,
|
||||
|
||||
// Allocate node, with space for OutOfLineInputs pointer.
|
||||
void* node_buffer = zone->Allocate<NodeWithOutOfLineInputs>(
|
||||
sizeof(Node) + sizeof(OutOfLineInputs*));
|
||||
sizeof(Node) + sizeof(ZoneOutOfLineInputsPtr));
|
||||
node = new (node_buffer) Node(id, op, kOutlineMarker, 0);
|
||||
node->set_outline_inputs(outline);
|
||||
|
||||
@ -99,7 +101,7 @@ Node* Node::New(Zone* zone, NodeId id, const Operator* op, int input_count,
|
||||
capacity = std::min(input_count + 3, max);
|
||||
}
|
||||
|
||||
size_t size = sizeof(Node) + capacity * (sizeof(Node*) + sizeof(Use));
|
||||
size_t size = sizeof(Node) + capacity * (sizeof(ZoneNodePtr) + sizeof(Use));
|
||||
intptr_t raw_buffer =
|
||||
reinterpret_cast<intptr_t>(zone->Allocate<NodeWithInLineInputs>(size));
|
||||
void* node_buffer =
|
||||
@ -126,13 +128,17 @@ Node* Node::New(Zone* zone, NodeId id, const Operator* op, int input_count,
|
||||
return node;
|
||||
}
|
||||
|
||||
Node* Node::New(Zone* zone, NodeId id, const Operator* op, int input_count,
|
||||
Node* const* inputs, bool has_extensible_inputs) {
|
||||
return NewImpl(zone, id, op, input_count, inputs, has_extensible_inputs);
|
||||
}
|
||||
|
||||
Node* Node::Clone(Zone* zone, NodeId id, const Node* node) {
|
||||
int const input_count = node->InputCount();
|
||||
Node* const* const inputs = node->has_inline_inputs()
|
||||
ZoneNodePtr const* const inputs = node->has_inline_inputs()
|
||||
? node->inline_inputs()
|
||||
: node->outline_inputs()->inputs();
|
||||
Node* const clone = New(zone, id, node->op(), input_count, inputs, false);
|
||||
Node* const clone = NewImpl(zone, id, node->op(), input_count, inputs, false);
|
||||
clone->set_type(node->type());
|
||||
return clone;
|
||||
}
|
||||
@ -236,7 +242,7 @@ Node* Node::RemoveInput(int index) {
|
||||
}
|
||||
|
||||
void Node::ClearInputs(int start, int count) {
|
||||
Node** input_ptr = GetInputPtr(start);
|
||||
ZoneNodePtr* input_ptr = GetInputPtr(start);
|
||||
Use* use_ptr = GetUsePtr(start);
|
||||
while (count-- > 0) {
|
||||
DCHECK_EQ(input_ptr, use_ptr->input_ptr());
|
||||
|
@ -6,6 +6,7 @@
|
||||
#define V8_COMPILER_NODE_H_
|
||||
|
||||
#include "src/common/globals.h"
|
||||
#include "src/compiler/graph-zone-traits.h"
|
||||
#include "src/compiler/opcodes.h"
|
||||
#include "src/compiler/operator.h"
|
||||
#include "src/compiler/types.h"
|
||||
@ -76,7 +77,7 @@ class V8_EXPORT_PRIVATE Node final {
|
||||
void ReplaceInput(int index, Node* new_to) {
|
||||
CHECK_LE(0, index);
|
||||
CHECK_LT(index, InputCount());
|
||||
Node** input_ptr = GetInputPtr(index);
|
||||
ZoneNodePtr* input_ptr = GetInputPtr(index);
|
||||
Node* old_to = *input_ptr;
|
||||
if (old_to != new_to) {
|
||||
Use* use = GetUsePtr(index);
|
||||
@ -152,35 +153,43 @@ class V8_EXPORT_PRIVATE Node final {
|
||||
void Print(std::ostream&, int depth = 1) const;
|
||||
|
||||
private:
|
||||
template <typename NodePtrT>
|
||||
inline static Node* NewImpl(Zone* zone, NodeId id, const Operator* op,
|
||||
int input_count, NodePtrT const* inputs,
|
||||
bool has_extensible_inputs);
|
||||
|
||||
struct Use;
|
||||
using ZoneUsePtr = GraphZoneTraits::Ptr<Use>;
|
||||
|
||||
// Out of line storage for inputs when the number of inputs overflowed the
|
||||
// capacity of the inline-allocated space.
|
||||
struct OutOfLineInputs {
|
||||
Node* node_;
|
||||
ZoneNodePtr node_;
|
||||
int count_;
|
||||
int capacity_;
|
||||
|
||||
// Inputs are allocated right behind the OutOfLineInputs instance.
|
||||
inline Node** inputs();
|
||||
inline ZoneNodePtr* inputs();
|
||||
|
||||
static OutOfLineInputs* New(Zone* zone, int capacity);
|
||||
void ExtractFrom(Use* use_ptr, Node** input_ptr, int count);
|
||||
void ExtractFrom(Use* use_ptr, ZoneNodePtr* input_ptr, int count);
|
||||
};
|
||||
using ZoneOutOfLineInputsPtr = GraphZoneTraits::Ptr<OutOfLineInputs>;
|
||||
|
||||
// A link in the use chain for a node. Every input {i} to a node {n} has an
|
||||
// associated {Use} which is linked into the use chain of the {i} node.
|
||||
struct Use {
|
||||
Use* next;
|
||||
Use* prev;
|
||||
ZoneUsePtr next;
|
||||
ZoneUsePtr prev;
|
||||
uint32_t bit_field_;
|
||||
|
||||
int input_index() const { return InputIndexField::decode(bit_field_); }
|
||||
bool is_inline_use() const { return InlineField::decode(bit_field_); }
|
||||
Node** input_ptr() {
|
||||
ZoneNodePtr* input_ptr() {
|
||||
int index = input_index();
|
||||
Use* start = this + 1 + index;
|
||||
Node** inputs = is_inline_use()
|
||||
? reinterpret_cast<Node*>(start)->inline_inputs()
|
||||
ZoneNodePtr* inputs =
|
||||
is_inline_use() ? reinterpret_cast<Node*>(start)->inline_inputs()
|
||||
: reinterpret_cast<OutOfLineInputs*>(start)->inputs();
|
||||
return &inputs[index];
|
||||
}
|
||||
@ -230,21 +239,21 @@ class V8_EXPORT_PRIVATE Node final {
|
||||
|
||||
inline Address inputs_location() const;
|
||||
|
||||
Node** inline_inputs() const {
|
||||
return reinterpret_cast<Node**>(inputs_location());
|
||||
ZoneNodePtr* inline_inputs() const {
|
||||
return reinterpret_cast<ZoneNodePtr*>(inputs_location());
|
||||
}
|
||||
OutOfLineInputs* outline_inputs() const {
|
||||
return *reinterpret_cast<OutOfLineInputs**>(inputs_location());
|
||||
return *reinterpret_cast<ZoneOutOfLineInputsPtr*>(inputs_location());
|
||||
}
|
||||
void set_outline_inputs(OutOfLineInputs* outline) {
|
||||
*reinterpret_cast<OutOfLineInputs**>(inputs_location()) = outline;
|
||||
*reinterpret_cast<ZoneOutOfLineInputsPtr*>(inputs_location()) = outline;
|
||||
}
|
||||
|
||||
Node* const* GetInputPtrConst(int input_index) const {
|
||||
ZoneNodePtr const* GetInputPtrConst(int input_index) const {
|
||||
return has_inline_inputs() ? &(inline_inputs()[input_index])
|
||||
: &(outline_inputs()->inputs()[input_index]);
|
||||
}
|
||||
Node** GetInputPtr(int input_index) {
|
||||
ZoneNodePtr* GetInputPtr(int input_index) {
|
||||
return has_inline_inputs() ? &(inline_inputs()[input_index])
|
||||
: &(outline_inputs()->inputs()[input_index]);
|
||||
}
|
||||
@ -286,7 +295,7 @@ class V8_EXPORT_PRIVATE Node final {
|
||||
Type type_;
|
||||
Mark mark_;
|
||||
uint32_t bit_field_;
|
||||
Use* first_use_;
|
||||
ZoneUsePtr first_use_;
|
||||
|
||||
friend class Edge;
|
||||
friend class NodeMarkerBase;
|
||||
@ -299,8 +308,8 @@ Address Node::inputs_location() const {
|
||||
return reinterpret_cast<Address>(this) + sizeof(Node);
|
||||
}
|
||||
|
||||
Node** Node::OutOfLineInputs::inputs() {
|
||||
return reinterpret_cast<Node**>(reinterpret_cast<Address>(this) +
|
||||
ZoneNodePtr* Node::OutOfLineInputs::inputs() {
|
||||
return reinterpret_cast<ZoneNodePtr*>(reinterpret_cast<Address>(this) +
|
||||
sizeof(Node::OutOfLineInputs));
|
||||
}
|
||||
|
||||
@ -396,11 +405,11 @@ class Node::InputEdges final {
|
||||
|
||||
inline value_type operator[](int index) const;
|
||||
|
||||
InputEdges(Node** input_root, Use* use_root, int count)
|
||||
InputEdges(ZoneNodePtr* input_root, Use* use_root, int count)
|
||||
: input_root_(input_root), use_root_(use_root), count_(count) {}
|
||||
|
||||
private:
|
||||
Node** input_root_;
|
||||
ZoneNodePtr* input_root_;
|
||||
Use* use_root_;
|
||||
int count_;
|
||||
};
|
||||
@ -418,11 +427,11 @@ class V8_EXPORT_PRIVATE Node::Inputs final {
|
||||
|
||||
inline value_type operator[](int index) const;
|
||||
|
||||
explicit Inputs(Node* const* input_root, int count)
|
||||
explicit Inputs(ZoneNodePtr const* input_root, int count)
|
||||
: input_root_(input_root), count_(count) {}
|
||||
|
||||
private:
|
||||
Node* const* input_root_;
|
||||
ZoneNodePtr const* input_root_;
|
||||
int count_;
|
||||
};
|
||||
|
||||
@ -456,14 +465,15 @@ class Edge final {
|
||||
friend class Node::InputEdges;
|
||||
friend class Node::InputEdges::iterator;
|
||||
|
||||
Edge(Node::Use* use, Node** input_ptr) : use_(use), input_ptr_(input_ptr) {
|
||||
Edge(Node::Use* use, ZoneNodePtr* input_ptr)
|
||||
: use_(use), input_ptr_(input_ptr) {
|
||||
DCHECK_NOT_NULL(use);
|
||||
DCHECK_NOT_NULL(input_ptr);
|
||||
DCHECK_EQ(input_ptr, use->input_ptr());
|
||||
}
|
||||
|
||||
Node::Use* use_;
|
||||
Node** input_ptr_;
|
||||
ZoneNodePtr* input_ptr_;
|
||||
};
|
||||
|
||||
bool Node::IsDead() const {
|
||||
@ -530,11 +540,11 @@ class Node::InputEdges::iterator final {
|
||||
private:
|
||||
friend class Node;
|
||||
|
||||
explicit iterator(Use* use, Node** input_ptr)
|
||||
explicit iterator(Use* use, ZoneNodePtr* input_ptr)
|
||||
: use_(use), input_ptr_(input_ptr) {}
|
||||
|
||||
Use* use_;
|
||||
Node** input_ptr_;
|
||||
ZoneNodePtr* input_ptr_;
|
||||
};
|
||||
|
||||
|
||||
@ -588,9 +598,10 @@ class Node::Inputs::const_iterator final {
|
||||
private:
|
||||
friend class Node::Inputs;
|
||||
|
||||
explicit const_iterator(Node* const* input_ptr) : input_ptr_(input_ptr) {}
|
||||
explicit const_iterator(ZoneNodePtr const* input_ptr)
|
||||
: input_ptr_(input_ptr) {}
|
||||
|
||||
Node* const* input_ptr_;
|
||||
ZoneNodePtr const* input_ptr_;
|
||||
};
|
||||
|
||||
|
||||
@ -618,7 +629,7 @@ class Node::UseEdges::iterator final {
|
||||
iterator& operator++() {
|
||||
DCHECK_NOT_NULL(current_);
|
||||
current_ = next_;
|
||||
next_ = current_ ? current_->next : nullptr;
|
||||
next_ = current_ ? static_cast<Node::Use*>(current_->next) : nullptr;
|
||||
return *this;
|
||||
}
|
||||
iterator operator++(int);
|
||||
@ -629,7 +640,7 @@ class Node::UseEdges::iterator final {
|
||||
iterator() : current_(nullptr), next_(nullptr) {}
|
||||
explicit iterator(Node* node)
|
||||
: current_(node->first_use_),
|
||||
next_(current_ ? current_->next : nullptr) {}
|
||||
next_(current_ ? static_cast<Node::Use*>(current_->next) : nullptr) {}
|
||||
|
||||
Node::Use* current_;
|
||||
Node::Use* next_;
|
||||
|
93
src/zone/compressed-zone-ptr.h
Normal file
93
src/zone/compressed-zone-ptr.h
Normal file
@ -0,0 +1,93 @@
|
||||
// Copyright 2020 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
#ifndef V8_ZONE_COMPRESSED_ZONE_PTR_H_
|
||||
#define V8_ZONE_COMPRESSED_ZONE_PTR_H_
|
||||
|
||||
#include <type_traits>
|
||||
|
||||
#include "src/base/logging.h"
|
||||
#include "src/common/globals.h"
|
||||
#include "src/zone/zone-compression.h"
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
//
|
||||
// Compressed pointer to T using aligned-base-relative addressing compression.
|
||||
//
|
||||
// Note that the CompressedZonePtr<T> is implicitly convertible to T*.
|
||||
// Such an approach provides the benefit of almost seamless migration of a code
|
||||
// using full pointers to compressed pointers.
|
||||
// However, using CompressedZonePtr<T> in containers is not allowed yet.
|
||||
//
|
||||
// It's not recommended to use this class directly, use ZoneTypeTraits::Ptr<T>
|
||||
// instead.
|
||||
template <typename T>
|
||||
class CompressedZonePtr {
|
||||
public:
|
||||
CompressedZonePtr() = default;
|
||||
explicit CompressedZonePtr(std::nullptr_t) : CompressedZonePtr() {}
|
||||
explicit CompressedZonePtr(T* value) { *this = value; }
|
||||
// Move- and copy-constructors are explicitly deleted in order to avoid
|
||||
// creation of temporary objects which we can't uncompress because they will
|
||||
// live outside of the zone memory.
|
||||
CompressedZonePtr(const CompressedZonePtr& other) V8_NOEXCEPT = delete;
|
||||
CompressedZonePtr(CompressedZonePtr&&) V8_NOEXCEPT = delete;
|
||||
|
||||
CompressedZonePtr& operator=(const CompressedZonePtr& other) V8_NOEXCEPT {
|
||||
DCHECK(ZoneCompression::CheckSameBase(this, &other));
|
||||
compressed_value_ = other.compressed_value_;
|
||||
return *this;
|
||||
}
|
||||
CompressedZonePtr& operator=(CompressedZonePtr&& other) V8_NOEXCEPT = delete;
|
||||
|
||||
CompressedZonePtr& operator=(T* value) {
|
||||
compressed_value_ = ZoneCompression::Compress(value);
|
||||
DCHECK_EQ(value, Decompress());
|
||||
return *this;
|
||||
}
|
||||
|
||||
bool operator==(std::nullptr_t) const { return compressed_value_ == 0; }
|
||||
bool operator!=(std::nullptr_t) const { return compressed_value_ != 0; }
|
||||
|
||||
// The equality comparisons assume that both operands point to objects
|
||||
// allocated by the same allocator supporting pointer compression, therefore
|
||||
// it's enough to compare compressed values.
|
||||
bool operator==(const CompressedZonePtr& other) const {
|
||||
return compressed_value_ == other.compressed_value_;
|
||||
}
|
||||
bool operator!=(const CompressedZonePtr& other) const {
|
||||
return !(*this == other);
|
||||
}
|
||||
bool operator==(T* other) const {
|
||||
return compressed_value_ == ZoneCompression::Compress(other);
|
||||
}
|
||||
bool operator!=(T* other) const { return !(*this == other); }
|
||||
|
||||
T& operator*() const { return *Decompress(); }
|
||||
T* operator->() const { return Decompress(); }
|
||||
|
||||
operator T*() const { return Decompress(); }
|
||||
operator bool() const { return compressed_value_ != 0; }
|
||||
|
||||
private:
|
||||
T* Decompress() const {
|
||||
return reinterpret_cast<T*>(
|
||||
ZoneCompression::Decompress(this, compressed_value_));
|
||||
}
|
||||
|
||||
uint32_t compressed_value_ = 0;
|
||||
};
|
||||
|
||||
// This requirement is necessary for being able to use memcopy in containers
|
||||
// of zone pointers.
|
||||
// TODO(ishell): Re-enable once compressed pointers are supported in containers.
|
||||
// static_assert(std::is_trivially_copyable<CompressedZonePtr<int>>::value,
|
||||
// "CompressedZonePtr must be trivially copyable");
|
||||
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
||||
#endif // V8_ZONE_COMPRESSED_ZONE_PTR_H_
|
@ -7,6 +7,10 @@
|
||||
|
||||
#include "src/common/globals.h"
|
||||
|
||||
#ifdef V8_COMPRESS_ZONES
|
||||
#include "src/zone/compressed-zone-ptr.h"
|
||||
#endif
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
@ -47,17 +51,18 @@ struct ZoneTypeTraits<false> {
|
||||
|
||||
template <>
|
||||
struct ZoneTypeTraits<true> {
|
||||
// TODO(ishell): use CompressedZonePtr<T> here
|
||||
template <typename T>
|
||||
using Ptr = FullZonePtr<T>;
|
||||
using Ptr = CompressedZonePtr<T>;
|
||||
};
|
||||
|
||||
// This requirement is necessary for being able to use memcopy in containers
|
||||
// of zone pointers.
|
||||
static_assert(
|
||||
std::is_trivially_copyable<
|
||||
ZoneTypeTraits<COMPRESS_ZONES_BOOL>::Ptr<int>>::value,
|
||||
"ZoneTypeTraits<COMPRESS_ZONES_BOOL>::Ptr<T> must be trivially copyable");
|
||||
// TODO(ishell): Re-enable once compressed pointers are supported in containers.
|
||||
// static_assert(
|
||||
// std::is_trivially_copyable<
|
||||
// ZoneTypeTraits<COMPRESS_ZONES_BOOL>::Ptr<int>>::value,
|
||||
// "ZoneTypeTraits<COMPRESS_ZONES_BOOL>::Ptr<T> must be trivially
|
||||
// copyable");
|
||||
|
||||
//
|
||||
// is_compressed_pointer<T> predicate can be used for checking if T is a
|
||||
|
Loading…
Reference in New Issue
Block a user