Merge tag 'refs/tags/sync-piper' into sync-stage
This commit is contained in:
commit
dcfeaf712b
@ -43,7 +43,6 @@ import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.logging.Logger;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
@ -1611,14 +1611,14 @@ final class Utf8 {
|
||||
// ANDing the index with 7 to determine the number of bytes that need to be read before
|
||||
// we're 8-byte aligned.
|
||||
final int unaligned = 8 - ((int) offset & 7);
|
||||
for (int j = unaligned; j > 0; j--) {
|
||||
int i;
|
||||
for (i = 0; i < unaligned; i++) {
|
||||
if (UnsafeUtil.getByte(bytes, offset++) < 0) {
|
||||
return unaligned - j;
|
||||
return i;
|
||||
}
|
||||
}
|
||||
|
||||
int i;
|
||||
for (i = 0; i + 8 <= maxChars; i += 8) {
|
||||
for (; i + 8 <= maxChars; i += 8) {
|
||||
if ((UnsafeUtil.getLong(bytes, UnsafeUtil.BYTE_ARRAY_BASE_OFFSET + offset)
|
||||
& ASCII_MASK_LONG)
|
||||
!= 0L) {
|
||||
|
@ -59,7 +59,6 @@ import protobuf_unittest.UnittestProto.TestAllExtensions;
|
||||
import protobuf_unittest.UnittestProto.TestAllTypes;
|
||||
import protobuf_unittest.UnittestProto.TestAllTypes.NestedMessage;
|
||||
import protobuf_unittest.UnittestProto.TestAllTypesOrBuilder;
|
||||
import protobuf_unittest.UnittestProto.TestChildExtension;
|
||||
import protobuf_unittest.UnittestProto.TestExtremeDefaultValues;
|
||||
import protobuf_unittest.UnittestProto.TestOneof2;
|
||||
import protobuf_unittest.UnittestProto.TestPackedTypes;
|
||||
@ -2024,5 +2023,4 @@ public class GeneratedMessageTest {
|
||||
assertThat(builder.getRepeatedField(REPEATED_NESTED_MESSAGE_EXTENSION, 0))
|
||||
.isEqualTo(NestedMessage.newBuilder().setBb(100).build());
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -36,39 +36,40 @@ import os
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
|
||||
def _ApiVersionToImplementationType(api_version):
|
||||
if api_version == 2:
|
||||
return 'cpp'
|
||||
if api_version == 1:
|
||||
raise ValueError('api_version=1 is no longer supported.')
|
||||
if api_version == 0:
|
||||
return 'python'
|
||||
return None
|
||||
|
||||
|
||||
_implementation_type = None
|
||||
try:
|
||||
# pylint: disable=g-import-not-at-top
|
||||
from google.protobuf.internal import _api_implementation
|
||||
# The compile-time constants in the _api_implementation module can be used to
|
||||
# switch to a certain implementation of the Python API at build time.
|
||||
_api_version = _api_implementation.api_version
|
||||
_implementation_type = _ApiVersionToImplementationType(
|
||||
_api_implementation.api_version)
|
||||
except ImportError:
|
||||
_api_version = -1 # Unspecified by compiler flags.
|
||||
|
||||
if _api_version == 1:
|
||||
raise ValueError('api_version=1 is no longer supported.')
|
||||
pass # Unspecified by compiler flags.
|
||||
|
||||
|
||||
|
||||
|
||||
def _ApiVersionToImplementationType(api_version):
|
||||
if api_version == 3:
|
||||
return 'upb'
|
||||
if api_version == 2:
|
||||
return 'cpp'
|
||||
return 'python'
|
||||
|
||||
# TODO(jieluo): Remove _api_version and only keep implementation_type
|
||||
# http://b/228103078
|
||||
_default_implementation_type = _ApiVersionToImplementationType(_api_version)
|
||||
|
||||
if _implementation_type is None:
|
||||
_implementation_type = 'python'
|
||||
|
||||
# This environment variable can be used to switch to a certain implementation
|
||||
# of the Python API, overriding the compile-time constants in the
|
||||
# _api_implementation module. Right now only 'python', 'cpp' and 'upb' are
|
||||
# valid values. Any other value will raise error.
|
||||
_implementation_type = os.getenv('PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION',
|
||||
_default_implementation_type)
|
||||
_implementation_type)
|
||||
|
||||
if _implementation_type not in ('python', 'cpp', 'upb'):
|
||||
raise ValueError('PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION {0} is not '
|
||||
|
@ -40,6 +40,7 @@ import textwrap
|
||||
import unittest
|
||||
|
||||
from google.protobuf import any_pb2
|
||||
from google.protobuf import struct_pb2
|
||||
from google.protobuf import any_test_pb2
|
||||
from google.protobuf import map_unittest_pb2
|
||||
from google.protobuf import unittest_custom_options_pb2
|
||||
@ -1939,6 +1940,16 @@ class Proto3Tests(unittest.TestCase):
|
||||
text_format.Merge(text, message)
|
||||
self.assertEqual(str(e.exception), '3:11 : Expected "}".')
|
||||
|
||||
def testParseExpandedAnyListValue(self):
|
||||
any_msg = any_pb2.Any()
|
||||
any_msg.Pack(struct_pb2.ListValue())
|
||||
msg = any_test_pb2.TestAny(any_value=any_msg)
|
||||
text = ('any_value {\n'
|
||||
' [type.googleapis.com/google.protobuf.ListValue] {}\n'
|
||||
'}\n')
|
||||
parsed_msg = text_format.Parse(text, any_test_pb2.TestAny())
|
||||
self.assertEqual(msg, parsed_msg)
|
||||
|
||||
def testProto3Optional(self):
|
||||
msg = test_proto3_optional_pb2.TestProto3Optional()
|
||||
self.assertEqual(text_format.MessageToString(msg), '')
|
||||
|
@ -365,7 +365,7 @@ class UnknownFieldsAccessorsTest(unittest.TestCase):
|
||||
def testUnknownExtensions(self):
|
||||
message = unittest_pb2.TestEmptyMessageWithExtensions()
|
||||
message.ParseFromString(self.all_fields_data)
|
||||
self.assertEqual(len(message.UnknownFields()), 98)
|
||||
self.assertEqual(len(unknown_fields.UnknownFieldSet(message)), 98)
|
||||
self.assertEqual(message.SerializeToString(), self.all_fields_data)
|
||||
|
||||
|
||||
|
@ -886,7 +886,10 @@ class _Parser(object):
|
||||
expanded_any_end_token = '}'
|
||||
expanded_any_sub_message = _BuildMessageFromTypeName(packed_type_name,
|
||||
self.descriptor_pool)
|
||||
if not expanded_any_sub_message:
|
||||
# Direct comparison with None is used instead of implicit bool conversion
|
||||
# to avoid false positives with falsy initial values, e.g. for
|
||||
# google.protobuf.ListValue.
|
||||
if expanded_any_sub_message is None:
|
||||
raise ParseError('Type %s not found in descriptor pool' %
|
||||
packed_type_name)
|
||||
while not tokenizer.TryConsume(expanded_any_end_token):
|
||||
|
@ -40,17 +40,13 @@ Simple usage example:
|
||||
|
||||
|
||||
from google.protobuf.internal import api_implementation
|
||||
if api_implementation.Type() != 'python':
|
||||
from google.protobuf.pyext import _message # pylint: disable=g-import-not-at-top
|
||||
|
||||
if api_implementation._c_module is not None: # pylint: disable=protected-access
|
||||
UnknownFieldSet = api_implementation._c_module.UnknownFieldSet # pylint: disable=protected-access
|
||||
else:
|
||||
from google.protobuf.internal import decoder # pylint: disable=g-import-not-at-top
|
||||
from google.protobuf.internal import wire_format # pylint: disable=g-import-not-at-top
|
||||
|
||||
if api_implementation.Type() == 'cpp':
|
||||
|
||||
UnknownFieldSet = _message.UnknownFieldSet
|
||||
|
||||
else:
|
||||
class UnknownField:
|
||||
"""A parsed unknown field."""
|
||||
|
||||
|
@ -78,7 +78,7 @@ bool RunProtoCompiler(const std::string& filename,
|
||||
bool DecodeMetadata(const std::string& path, GeneratedCodeInfo* info);
|
||||
|
||||
// Finds all of the Annotations for a given source file and path.
|
||||
// See Location.path in https://github.com/protocolbuffers/protobuf/blob/main/src/google/protobuf/descriptor.proto for
|
||||
// See Location.path in https://github.com/protocolbuffers/protobuf/blob/master/src/google/protobuf/descriptor.proto for
|
||||
// explanation of what path vector is.
|
||||
void FindAnnotationsOnPath(
|
||||
const GeneratedCodeInfo& info, const std::string& source_file,
|
||||
@ -88,7 +88,7 @@ void FindAnnotationsOnPath(
|
||||
// Finds the Annotation for a given source file and path (or returns null if it
|
||||
// couldn't). If there are several annotations for given path, returns the first
|
||||
// one. See Location.path in
|
||||
// https://github.com/protocolbuffers/protobuf/blob/main/src/google/protobuf/descriptor.proto for explanation of what path
|
||||
// https://github.com/protocolbuffers/protobuf/blob/master/src/google/protobuf/descriptor.proto for explanation of what path
|
||||
// vector is.
|
||||
const GeneratedCodeInfo::Annotation* FindAnnotationOnPath(
|
||||
const GeneratedCodeInfo& info, const std::string& source_file,
|
||||
|
@ -193,9 +193,11 @@ bool IsLazy(const FieldDescriptor* field, const Options& options,
|
||||
IsEagerlyVerifiedLazy(field, options, scc_analyzer);
|
||||
}
|
||||
|
||||
bool IsEagerlyVerifiedLazyByProfile(const FieldDescriptor* field,
|
||||
const Options& options,
|
||||
MessageSCCAnalyzer* scc_analyzer) {
|
||||
// Returns true if "field" is a message field that is backed by LazyField per
|
||||
// profile (go/pdlazy).
|
||||
inline bool IsEagerlyVerifiedLazyByProfile(const FieldDescriptor* field,
|
||||
const Options& options,
|
||||
MessageSCCAnalyzer* scc_analyzer) {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -364,12 +364,6 @@ inline bool IsExplicitLazy(const FieldDescriptor* field) {
|
||||
return field->options().lazy() || field->options().unverified_lazy();
|
||||
}
|
||||
|
||||
// Returns true if "field" is a message field that is backed by LazyField per
|
||||
// profile (go/pdlazy).
|
||||
bool IsEagerlyVerifiedLazyByProfile(const FieldDescriptor* field,
|
||||
const Options& options,
|
||||
MessageSCCAnalyzer* scc_analyzer);
|
||||
|
||||
bool IsEagerlyVerifiedLazy(const FieldDescriptor* field, const Options& options,
|
||||
MessageSCCAnalyzer* scc_analyzer);
|
||||
|
||||
|
@ -2250,7 +2250,7 @@ std::pair<size_t, size_t> MessageGenerator::GenerateOffsets(
|
||||
//
|
||||
// Embed whether the field is eagerly verified lazy or inlined string to the
|
||||
// LSB of the offset.
|
||||
if (IsEagerlyVerifiedLazyByProfile(field, options_, scc_analyzer_)) {
|
||||
if (IsEagerlyVerifiedLazy(field, options_, scc_analyzer_)) {
|
||||
format(" | 0x1u // eagerly verified lazy\n");
|
||||
} else if (IsStringInlined(field, options_)) {
|
||||
format(" | 0x1u // inlined\n");
|
||||
|
@ -5031,6 +5031,13 @@ const FileDescriptor* DescriptorBuilder::BuildFile(
|
||||
}
|
||||
}
|
||||
|
||||
static const int kMaximumPackageLength = 511;
|
||||
if (proto.package().size() > kMaximumPackageLength) {
|
||||
AddError(proto.package(), proto, DescriptorPool::ErrorCollector::NAME,
|
||||
"Package name is too long");
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
// If we have a fallback_database_, and we aren't doing lazy import building,
|
||||
// attempt to load all dependencies now, before checkpointing tables_. This
|
||||
// avoids confusion with recursive checkpoints.
|
||||
|
@ -6865,6 +6865,30 @@ TEST_F(ValidationErrorTest, UnusedImportWithOtherError) {
|
||||
}
|
||||
|
||||
|
||||
TEST_F(ValidationErrorTest, PackageTooLong) {
|
||||
BuildFileWithErrors(
|
||||
"name: \"foo.proto\" "
|
||||
"syntax: \"proto3\" "
|
||||
"package: "
|
||||
"\"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
|
||||
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
|
||||
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
|
||||
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
|
||||
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
|
||||
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
|
||||
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
|
||||
"aaaaaaaaaa\"",
|
||||
"foo.proto: "
|
||||
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
|
||||
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
|
||||
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
|
||||
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
|
||||
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
|
||||
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
|
||||
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
|
||||
"aaaaaaaa: NAME: Package name is too long\n");
|
||||
}
|
||||
|
||||
// ===================================================================
|
||||
// DescriptorDatabase
|
||||
|
||||
|
@ -257,7 +257,11 @@ bool Reflection::IsLazyExtension(const Message& message,
|
||||
}
|
||||
|
||||
bool Reflection::IsLazilyVerifiedLazyField(const FieldDescriptor* field) const {
|
||||
return field->options().lazy() || field->options().unverified_lazy();
|
||||
if (field->options().unverified_lazy()) return true;
|
||||
|
||||
// Message fields with [lazy=true] will be eagerly verified
|
||||
// (go/verified-lazy).
|
||||
return field->options().lazy() && !IsEagerlyVerifiedLazyField(field);
|
||||
}
|
||||
|
||||
bool Reflection::IsEagerlyVerifiedLazyField(
|
||||
|
@ -78,6 +78,14 @@ class GeneratedMessageReflectionTestHelper {
|
||||
static bool IsLazyField(const Message& msg, const FieldDescriptor* field) {
|
||||
return msg.GetReflection()->IsLazyField(field);
|
||||
}
|
||||
static bool IsEagerlyVerifiedLazyField(const Message& msg,
|
||||
const FieldDescriptor* field) {
|
||||
return msg.GetReflection()->IsEagerlyVerifiedLazyField(field);
|
||||
}
|
||||
static bool IsLazilyVerifiedLazyField(const Message& msg,
|
||||
const FieldDescriptor* field) {
|
||||
return msg.GetReflection()->IsLazilyVerifiedLazyField(field);
|
||||
}
|
||||
};
|
||||
|
||||
namespace {
|
||||
|
@ -60,7 +60,7 @@ namespace internal {
|
||||
// It uses bit 0 == 0 to indicate an arena pointer and bit 0 == 1 to indicate a
|
||||
// UFS+Arena-container pointer. Besides it uses bit 1 == 0 to indicate arena
|
||||
// allocation and bit 1 == 1 to indicate heap allocation.
|
||||
class InternalMetadata {
|
||||
class PROTOBUF_EXPORT InternalMetadata {
|
||||
public:
|
||||
constexpr InternalMetadata() : ptr_(0) {}
|
||||
explicit InternalMetadata(Arena* arena, bool is_message_owned = false) {
|
||||
|
Loading…
Reference in New Issue
Block a user