Merge pull request #5502 from TeBoring/github-sync

Down-integrate internal changes to github.
This commit is contained in:
Paul Yang 2018-12-20 16:27:26 -08:00 committed by GitHub
commit 1354e469d4
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
117 changed files with 2984 additions and 2626 deletions

View File

@ -32,7 +32,22 @@ package com.google.protobuf;
import static com.google.protobuf.Internal.checkNotNull;
import com.google.protobuf.DescriptorProtos.*;
import com.google.protobuf.DescriptorProtos.DescriptorProto;
import com.google.protobuf.DescriptorProtos.EnumDescriptorProto;
import com.google.protobuf.DescriptorProtos.EnumOptions;
import com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto;
import com.google.protobuf.DescriptorProtos.EnumValueOptions;
import com.google.protobuf.DescriptorProtos.FieldDescriptorProto;
import com.google.protobuf.DescriptorProtos.FieldOptions;
import com.google.protobuf.DescriptorProtos.FileDescriptorProto;
import com.google.protobuf.DescriptorProtos.FileOptions;
import com.google.protobuf.DescriptorProtos.MessageOptions;
import com.google.protobuf.DescriptorProtos.MethodDescriptorProto;
import com.google.protobuf.DescriptorProtos.MethodOptions;
import com.google.protobuf.DescriptorProtos.OneofDescriptorProto;
import com.google.protobuf.DescriptorProtos.OneofOptions;
import com.google.protobuf.DescriptorProtos.ServiceDescriptorProto;
import com.google.protobuf.DescriptorProtos.ServiceOptions;
import com.google.protobuf.Descriptors.FileDescriptor.Syntax;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
@ -1211,7 +1226,7 @@ public final class Descriptors {
StringBuilder result = new StringBuilder(name.length());
boolean isNextUpperCase = false;
for (int i = 0; i < name.length(); i++) {
Character ch = name.charAt(i);
char ch = name.charAt(i);
if (ch == '_') {
isNextUpperCase = true;
} else if (isNextUpperCase) {

View File

@ -1681,6 +1681,15 @@ public abstract class GeneratedMessageV3 extends AbstractMessage
}
}
@Override
public Message.Builder newBuilderForField(final FieldDescriptor field) {
if (field.isExtension()) {
return DynamicMessage.newBuilder(field.getMessageType());
} else {
return super.newBuilderForField(field);
}
}
protected final void mergeExtensionFields(final ExtendableMessage other) {
ensureExtensionsIsMutable();
extensions.mergeFrom(other.extensions);

View File

@ -1127,7 +1127,6 @@ public final class TextFormat {
PARSER.merge(input, builder);
}
/**
* Parse a text-format message from {@code input}.
*
@ -1167,7 +1166,6 @@ public final class TextFormat {
PARSER.merge(input, extensionRegistry, builder);
}
/**
* Parse a text-format message from {@code input}. Extensions will be recognized if they are
* registered in {@code extensionRegistry}.
@ -1187,7 +1185,6 @@ public final class TextFormat {
}
/**
* Parser for text-format proto2 instances. This class is thread-safe. The implementation largely
* follows google/protobuf/text_format.cc.
@ -1217,36 +1214,6 @@ public final class TextFormat {
FORBID_SINGULAR_OVERWRITES
}
/**
* Determines how to deal with repeated values for singular Message fields. For example,
* given a field "foo" containing subfields "baz" and "qux":
*
* <ul>
* <li>"foo { baz: 1 } foo { baz: 2 }", or
* <li>"foo { baz: 1 } foo { qux: 2 }"
* </ul>
*/
public enum MergingStyle {
/**
* Merge the values in standard protobuf fashion:
*
* <ul>
* <li>"foo { baz: 2 }" and
* <li>"foo { baz: 1, qux: 2 }", respectively.
* </ul>
*/
RECURSIVE,
/**
* Later values overwrite ("clobber") previous values:
*
* <ul>
* <li>"foo { baz: 2 }" and
* <li>"foo { qux: 2 }", respectively.
* </ul>
*/
NON_RECURSIVE
}
private final boolean allowUnknownFields;
private final boolean allowUnknownEnumValues;
private final boolean allowUnknownExtensions;
@ -1349,7 +1316,6 @@ public final class TextFormat {
}
private static final int BUFFER_SIZE = 4096;
// TODO(chrisn): See if working around java.io.Reader#read(CharBuffer)
@ -1435,20 +1401,18 @@ public final class TextFormat {
List<UnknownField> unknownFields = new ArrayList<UnknownField>();
while (!tokenizer.atEnd()) {
mergeField(tokenizer, extensionRegistry, target, MergingStyle.RECURSIVE, unknownFields);
mergeField(tokenizer, extensionRegistry, target, unknownFields);
}
checkUnknownFields(unknownFields);
}
/** Parse a single field from {@code tokenizer} and merge it into {@code builder}. */
private void mergeField(
final Tokenizer tokenizer,
final ExtensionRegistry extensionRegistry,
final MessageReflection.MergeTarget target,
final MergingStyle mergingStyle,
List<UnknownField> unknownFields)
throws ParseException {
mergeField(
@ -1456,7 +1420,6 @@ public final class TextFormat {
extensionRegistry,
target,
parseInfoTreeBuilder,
mergingStyle,
unknownFields);
}
@ -1466,7 +1429,6 @@ public final class TextFormat {
final ExtensionRegistry extensionRegistry,
final MessageReflection.MergeTarget target,
TextFormatParseInfoTree.Builder parseTreeBuilder,
final MergingStyle mergingStyle,
List<UnknownField> unknownFields)
throws ParseException {
FieldDescriptor field = null;
@ -1573,7 +1535,6 @@ public final class TextFormat {
field,
extension,
childParseTreeBuilder,
mergingStyle,
unknownFields);
} else {
consumeFieldValues(
@ -1583,7 +1544,6 @@ public final class TextFormat {
field,
extension,
parseTreeBuilder,
mergingStyle,
unknownFields);
}
} else {
@ -1595,7 +1555,6 @@ public final class TextFormat {
field,
extension,
parseTreeBuilder,
mergingStyle,
unknownFields);
}
@ -1620,7 +1579,6 @@ public final class TextFormat {
final FieldDescriptor field,
final ExtensionRegistry.ExtensionInfo extension,
final TextFormatParseInfoTree.Builder parseTreeBuilder,
final MergingStyle mergingStyle,
List<UnknownField> unknownFields)
throws ParseException {
// Support specifying repeated field values as a comma-separated list.
@ -1635,7 +1593,6 @@ public final class TextFormat {
field,
extension,
parseTreeBuilder,
mergingStyle,
unknownFields);
if (tokenizer.tryConsume("]")) {
// End of list.
@ -1652,7 +1609,6 @@ public final class TextFormat {
field,
extension,
parseTreeBuilder,
mergingStyle,
unknownFields);
}
}
@ -1665,7 +1621,6 @@ public final class TextFormat {
final FieldDescriptor field,
final ExtensionRegistry.ExtensionInfo extension,
final TextFormatParseInfoTree.Builder parseTreeBuilder,
final MergingStyle mergingStyle,
List<UnknownField> unknownFields)
throws ParseException {
if (singularOverwritePolicy == SingularOverwritePolicy.FORBID_SINGULAR_OVERWRITES
@ -1698,18 +1653,9 @@ public final class TextFormat {
endToken = "}";
}
final MessageReflection.MergeTarget subField;
Message defaultInstance = (extension == null) ? null : extension.defaultInstance;
switch (mergingStyle) {
case RECURSIVE:
subField = target.newMergeTargetForField(field, defaultInstance);
break;
case NON_RECURSIVE:
subField = target.newEmptyTargetForField(field, defaultInstance);
break;
default:
throw new AssertionError();
}
MessageReflection.MergeTarget subField =
target.newMergeTargetForField(field, defaultInstance);
while (!tokenizer.tryConsume(endToken)) {
if (tokenizer.atEnd()) {
@ -1720,7 +1666,6 @@ public final class TextFormat {
extensionRegistry,
subField,
parseTreeBuilder,
mergingStyle,
unknownFields);
}

View File

@ -391,14 +391,12 @@ final class UnsafeUtil {
}
/**
* Gets the field with the given name within the class, or {@code null} if not found. If found,
* the field is made accessible.
* Gets the field with the given name within the class, or {@code null} if not found.
*/
private static Field field(Class<?> clazz, String fieldName) {
Field field;
try {
field = clazz.getDeclaredField(fieldName);
field.setAccessible(true);
} catch (Throwable t) {
// Failed to access the fields.
field = null;

View File

@ -1104,7 +1104,8 @@ final class Utf8 {
private static int partialIsValidUtf8NonAscii(byte[] bytes, int index, int limit) {
for (; ; ) {
int byte1, byte2;
int byte1;
int byte2;
// Optimize for interior runs of ASCII bytes.
do {

View File

@ -33,8 +33,10 @@ package com.google.protobuf;
import com.google.protobuf.Descriptors.EnumDescriptor;
import com.google.protobuf.Descriptors.FieldDescriptor;
import com.google.protobuf.Descriptors.OneofDescriptor;
import protobuf_unittest.UnittestProto;
import protobuf_unittest.UnittestProto.TestAllExtensions;
import protobuf_unittest.UnittestProto.TestAllTypes;
import protobuf_unittest.UnittestProto.TestAllTypes.NestedMessage;
import protobuf_unittest.UnittestProto.TestEmptyMessage;
import protobuf_unittest.UnittestProto.TestPackedTypes;
import java.util.Arrays;
@ -223,6 +225,17 @@ public class DynamicMessageTest extends TestCase {
packedReflectionTester.assertPackedFieldsSetViaReflection(message3);
}
public void testGetBuilderForExtensionField() {
DynamicMessage.Builder builder = DynamicMessage.newBuilder(TestAllExtensions.getDescriptor());
Message.Builder fieldBuilder =
builder.newBuilderForField(UnittestProto.optionalNestedMessageExtension.getDescriptor());
final int expected = 7432;
FieldDescriptor field =
NestedMessage.getDescriptor().findFieldByNumber(NestedMessage.BB_FIELD_NUMBER);
fieldBuilder.setField(field, expected);
assertEquals(expected, fieldBuilder.build().getField(field));
}
public void testDynamicMessageCopy() throws Exception {
TestAllTypes.Builder builder = TestAllTypes.newBuilder();
TestUtil.setAllFields(builder);

View File

@ -578,6 +578,29 @@ public class GeneratedMessageTest extends TestCase {
TestUtil.assertAllExtensionsSet(message);
}
public void testGetBuilderForExtensionField() {
TestAllExtensions.Builder builder = TestAllExtensions.newBuilder();
Message.Builder fieldBuilder =
builder.newBuilderForField(UnittestProto.optionalNestedMessageExtension.getDescriptor());
final int expected = 7432;
FieldDescriptor field =
NestedMessage.getDescriptor().findFieldByNumber(NestedMessage.BB_FIELD_NUMBER);
fieldBuilder.setField(field, expected);
assertEquals(expected, fieldBuilder.build().getField(field));
}
public void testGetBuilderForNonMessageExtensionField() {
TestAllExtensions.Builder builder = TestAllExtensions.newBuilder();
try {
// This should throw an exception because the extension field is not a message.
builder.newBuilderForField(UnittestProto.optionalInt32Extension.getDescriptor());
fail("Exception was not thrown");
} catch (UnsupportedOperationException e) {
// This exception is expected.
}
}
public void testExtensionRepeatedSetters() throws Exception {
TestAllExtensions.Builder builder = TestAllExtensions.newBuilder();
TestUtil.setAllExtensions(builder);

View File

@ -42,6 +42,7 @@ import static com.google.protobuf.util.Timestamps.NANOS_PER_MICROSECOND;
import static com.google.protobuf.util.Timestamps.NANOS_PER_MILLISECOND;
import static com.google.protobuf.util.Timestamps.NANOS_PER_SECOND;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import com.google.protobuf.Duration;
import java.text.ParseException;
import java.util.Comparator;
@ -91,8 +92,8 @@ public final class Durations {
}
/**
* Compares two durations. The value returned is identical to what would be returned by:
* {@code Durations.comparator().compare(x, y)}.
* Compares two durations. The value returned is identical to what would be returned by: {@code
* Durations.comparator().compare(x, y)}.
*
* @return the value {@code 0} if {@code x == y}; a value less than {@code 0} if {@code x < y};
* and a value greater than {@code 0} if {@code x > y}
@ -151,6 +152,7 @@ public final class Durations {
* @throws IllegalArgumentException if {@code duration} is negative or invalid
* @throws NullPointerException if {@code duration} is {@code null}
*/
@CanIgnoreReturnValue
public static Duration checkNotNegative(Duration duration) {
checkValid(duration);
checkArgument(!isNegative(duration), "duration (%s) must not be negative", toString(duration));
@ -163,6 +165,7 @@ public final class Durations {
* @throws IllegalArgumentException if {@code duration} is negative, {@code ZERO}, or invalid
* @throws NullPointerException if {@code duration} is {@code null}
*/
@CanIgnoreReturnValue
public static Duration checkPositive(Duration duration) {
checkValid(duration);
checkArgument(
@ -173,19 +176,32 @@ public final class Durations {
}
/** Throws an {@link IllegalArgumentException} if the given {@link Duration} is not valid. */
@CanIgnoreReturnValue
public static Duration checkValid(Duration duration) {
long seconds = duration.getSeconds();
int nanos = duration.getNanos();
if (!isValid(seconds, nanos)) {
throw new IllegalArgumentException(String.format(
"Duration is not valid. See proto definition for valid values. "
+ "Seconds (%s) must be in range [-315,576,000,000, +315,576,000,000]. "
+ "Nanos (%s) must be in range [-999,999,999, +999,999,999]. "
+ "Nanos must have the same sign as seconds", seconds, nanos));
throw new IllegalArgumentException(
String.format(
"Duration is not valid. See proto definition for valid values. "
+ "Seconds (%s) must be in range [-315,576,000,000, +315,576,000,000]. "
+ "Nanos (%s) must be in range [-999,999,999, +999,999,999]. "
+ "Nanos must have the same sign as seconds",
seconds, nanos));
}
return duration;
}
/**
* Builds the given builder and throws an {@link IllegalArgumentException} if it is not valid. See
* {@link #checkValid(Duration}).
*
* @return A valid, built {@link Duration}.
*/
public static Duration checkValid(Duration.Builder durationBuilder) {
return checkValid(durationBuilder.build());
}
/**
* Convert Duration to string format. The string format will contains 3, 6, or 9 fractional digits
* depending on the precision required to represent the exact Duration value. For example: "1s",

View File

@ -30,6 +30,7 @@
package com.google.protobuf.util;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import com.google.protobuf.Descriptors.Descriptor;
import com.google.protobuf.Descriptors.FieldDescriptor;
import com.google.protobuf.FieldMask;
@ -88,15 +89,14 @@ final class FieldMaskTree {
}
/**
* Adds a field path to the tree. In a FieldMask, every field path matches the
* specified field as well as all its sub-fields. For example, a field path
* "foo.bar" matches field "foo.bar" and also "foo.bar.baz", etc. When adding
* a field path to the tree, redundant sub-paths will be removed. That is,
* after adding "foo.bar" to the tree, "foo.bar.baz" will be removed if it
* exists, which will turn the tree node for "foo.bar" to a leaf node.
* Likewise, if the field path to add is a sub-path of an existing leaf node,
* nothing will be changed in the tree.
* Adds a field path to the tree. In a FieldMask, every field path matches the specified field as
* well as all its sub-fields. For example, a field path "foo.bar" matches field "foo.bar" and
* also "foo.bar.baz", etc. When adding a field path to the tree, redundant sub-paths will be
* removed. That is, after adding "foo.bar" to the tree, "foo.bar.baz" will be removed if it
* exists, which will turn the tree node for "foo.bar" to a leaf node. Likewise, if the field path
* to add is a sub-path of an existing leaf node, nothing will be changed in the tree.
*/
@CanIgnoreReturnValue
FieldMaskTree addFieldPath(String path) {
String[] parts = path.split(FIELD_PATH_SEPARATOR_REGEX);
if (parts.length == 0) {
@ -125,9 +125,8 @@ final class FieldMaskTree {
return this;
}
/**
* Merges all field paths in a FieldMask into this tree.
*/
/** Merges all field paths in a FieldMask into this tree. */
@CanIgnoreReturnValue
FieldMaskTree mergeFromFieldMask(FieldMask mask) {
for (String path : mask.getPathsList()) {
addFieldPath(path);

View File

@ -36,12 +36,12 @@ import com.google.common.base.CaseFormat;
import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
import com.google.common.primitives.Ints;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import com.google.protobuf.Descriptors.Descriptor;
import com.google.protobuf.Descriptors.FieldDescriptor;
import com.google.protobuf.FieldMask;
import com.google.protobuf.Internal;
import com.google.protobuf.Message;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
@ -277,9 +277,7 @@ public class FieldMaskUtil {
/**
* Whether to replace message fields (i.e., discard existing content in
* destination message fields) when merging.
* Default behavior is to merge the source message field into the
* destination message field.
* destination message fields).
*/
public boolean replaceMessageFields() {
return replaceMessageFields;
@ -287,9 +285,7 @@ public class FieldMaskUtil {
/**
* Whether to replace repeated fields (i.e., discard existing content in
* destination repeated fields) when merging.
* Default behavior is to append elements from source repeated field to the
* destination repeated field.
* destination repeated fields).
*/
public boolean replaceRepeatedFields() {
return replaceRepeatedFields;
@ -297,30 +293,51 @@ public class FieldMaskUtil {
/**
* Whether to replace primitive (non-repeated and non-message) fields in
* destination message fields with the source primitive fields (i.e., if the
* field is set in the source, the value is copied to the
* destination; if the field is unset in the source, the field is cleared
* from the destination) when merging.
*
* <p>Default behavior is to always set the value of the source primitive
* field to the destination primitive field, and if the source field is
* unset, the default value of the source field is copied to the
* destination.
* destination message fields with the source primitive fields (i.e., clear
* destination field if source field is not set).
*/
public boolean replacePrimitiveFields() {
return replacePrimitiveFields;
}
/**
* Specify whether to replace message fields. Defaults to false.
*
* <p>If true, discard existing content in destination message fields when merging.
*
* <p>If false, merge the source message field into the destination message field.
*/
@CanIgnoreReturnValue
public MergeOptions setReplaceMessageFields(boolean value) {
replaceMessageFields = value;
return this;
}
/**
* Specify whether to replace repeated fields. Defaults to false.
*
* <p>If true, discard existing content in destination repeated fields) when merging.
*
* <p>If false, append elements from source repeated field to the destination repeated field.
*/
@CanIgnoreReturnValue
public MergeOptions setReplaceRepeatedFields(boolean value) {
replaceRepeatedFields = value;
return this;
}
/**
* Specify whether to replace primitive (non-repeated and non-message) fields in destination
* message fields with the source primitive fields. Defaults to false.
*
* <p>If true, set the value of the destination primitive field to the source primitive field if
* the source field is set, but clear the destination field otherwise.
*
* <p>If false, always set the value of the destination primitive field to the source primitive
* field, and if the source field is unset, the default value of the source field is copied to
* the destination.
*/
@CanIgnoreReturnValue
public MergeOptions setReplacePrimitiveFields(boolean value) {
replacePrimitiveFields = value;
return this;

View File

@ -32,6 +32,7 @@ package com.google.protobuf.util;
import com.google.common.base.Preconditions;
import com.google.common.io.BaseEncoding;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonArray;
@ -226,7 +227,7 @@ public class JsonFormat {
return new Printer(
registry,
false,
fieldsToAlwaysOutput,
Collections.unmodifiableSet(new HashSet<>(fieldsToAlwaysOutput)),
preservingProtoFieldNames,
omittingInsignificantWhitespace,
printingEnumsAsInts,
@ -467,9 +468,10 @@ public class JsonFormat {
private Builder() {}
/**
* Adds a message type and all types defined in the same .proto file as
* well as all transitively imported .proto files to this {@link Builder}.
* Adds a message type and all types defined in the same .proto file as well as all
* transitively imported .proto files to this {@link Builder}.
*/
@CanIgnoreReturnValue
public Builder add(Descriptor messageType) {
if (types == null) {
throw new IllegalStateException("A TypeRegistry.Builer can only be used once.");
@ -479,9 +481,10 @@ public class JsonFormat {
}
/**
* Adds message types and all types defined in the same .proto file as
* well as all transitively imported .proto files to this {@link Builder}.
* Adds message types and all types defined in the same .proto file as well as all
* transitively imported .proto files to this {@link Builder}.
*/
@CanIgnoreReturnValue
public Builder add(Iterable<Descriptor> messageTypes) {
if (types == null) {
throw new IllegalStateException("A TypeRegistry.Builder can only be used once.");

View File

@ -36,6 +36,7 @@ import static com.google.common.math.LongMath.checkedAdd;
import static com.google.common.math.LongMath.checkedMultiply;
import static com.google.common.math.LongMath.checkedSubtract;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import com.google.protobuf.Duration;
import com.google.protobuf.Timestamp;
import java.text.ParseException;
@ -119,8 +120,8 @@ public final class Timestamps {
}
/**
* Compares two timestamps. The value returned is identical to what would be returned by:
* {@code Timestamps.comparator().compare(x, y)}.
* Compares two timestamps. The value returned is identical to what would be returned by: {@code
* Timestamps.comparator().compare(x, y)}.
*
* @return the value {@code 0} if {@code x == y}; a value less than {@code 0} if {@code x < y};
* and a value greater than {@code 0} if {@code x > y}
@ -162,18 +163,31 @@ public final class Timestamps {
}
/** Throws an {@link IllegalArgumentException} if the given {@link Timestamp} is not valid. */
@CanIgnoreReturnValue
public static Timestamp checkValid(Timestamp timestamp) {
long seconds = timestamp.getSeconds();
int nanos = timestamp.getNanos();
if (!isValid(seconds, nanos)) {
throw new IllegalArgumentException(String.format(
"Timestamp is not valid. See proto definition for valid values. "
+ "Seconds (%s) must be in range [-62,135,596,800, +253,402,300,799]. "
+ "Nanos (%s) must be in range [0, +999,999,999].", seconds, nanos));
throw new IllegalArgumentException(
String.format(
"Timestamp is not valid. See proto definition for valid values. "
+ "Seconds (%s) must be in range [-62,135,596,800, +253,402,300,799]. "
+ "Nanos (%s) must be in range [0, +999,999,999].",
seconds, nanos));
}
return timestamp;
}
/**
* Builds the given builder and throws an {@link IllegalArgumentException} if it is not valid. See
* {@link #checkValid(Timestamp}).
*
* @return A valid, built {@link Timestamp}.
*/
public static Timestamp checkValid(Timestamp.Builder timestampBuilder) {
return checkValid(timestampBuilder.build());
}
/**
* Convert Timestamp to RFC 3339 date string format. The output will always be Z-normalized and
* uses 3, 6 or 9 fractional digits as required to represent the exact value. Note that Timestamp

View File

@ -49,19 +49,20 @@ import com.google.protobuf.UInt32Value;
import com.google.protobuf.UInt64Value;
import com.google.protobuf.Value;
import com.google.protobuf.util.JsonFormat.TypeRegistry;
import com.google.protobuf.util.JsonTestProto.TestAllTypes;
import com.google.protobuf.util.JsonTestProto.TestAllTypes.NestedEnum;
import com.google.protobuf.util.JsonTestProto.TestAllTypes.NestedMessage;
import com.google.protobuf.util.JsonTestProto.TestAny;
import com.google.protobuf.util.JsonTestProto.TestCustomJsonName;
import com.google.protobuf.util.JsonTestProto.TestDuration;
import com.google.protobuf.util.JsonTestProto.TestFieldMask;
import com.google.protobuf.util.JsonTestProto.TestMap;
import com.google.protobuf.util.JsonTestProto.TestOneof;
import com.google.protobuf.util.JsonTestProto.TestRecursive;
import com.google.protobuf.util.JsonTestProto.TestStruct;
import com.google.protobuf.util.JsonTestProto.TestTimestamp;
import com.google.protobuf.util.JsonTestProto.TestWrappers;
import com.google.protobuf.util.proto.JsonTestProto.TestAllTypes;
import com.google.protobuf.util.proto.JsonTestProto.TestAllTypes.AliasedEnum;
import com.google.protobuf.util.proto.JsonTestProto.TestAllTypes.NestedEnum;
import com.google.protobuf.util.proto.JsonTestProto.TestAllTypes.NestedMessage;
import com.google.protobuf.util.proto.JsonTestProto.TestAny;
import com.google.protobuf.util.proto.JsonTestProto.TestCustomJsonName;
import com.google.protobuf.util.proto.JsonTestProto.TestDuration;
import com.google.protobuf.util.proto.JsonTestProto.TestFieldMask;
import com.google.protobuf.util.proto.JsonTestProto.TestMap;
import com.google.protobuf.util.proto.JsonTestProto.TestOneof;
import com.google.protobuf.util.proto.JsonTestProto.TestRecursive;
import com.google.protobuf.util.proto.JsonTestProto.TestStruct;
import com.google.protobuf.util.proto.JsonTestProto.TestTimestamp;
import com.google.protobuf.util.proto.JsonTestProto.TestWrappers;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
@ -676,7 +677,7 @@ public class JsonFormatTest extends TestCase {
+ "}",
builder);
fail();
} catch (InvalidProtocolBufferException e) {
// Exception expected.
}
@ -1159,8 +1160,8 @@ public class JsonFormatTest extends TestCase {
}
public void testParserAcceptBase64Variants() throws Exception {
assertAccepts("optionalBytes", "AQI"); // No padding
assertAccepts("optionalBytes", "-_w"); // base64Url, no padding
assertAccepts("optionalBytes", "AQI"); // No padding
assertAccepts("optionalBytes", "-_w"); // base64Url, no padding
}
public void testParserRejectInvalidEnumValue() throws Exception {
@ -1197,6 +1198,23 @@ public class JsonFormatTest extends TestCase {
assertEquals(0, builder.getOptionalNestedEnumValue());
}
public void testParserSupportAliasEnums() throws Exception {
TestAllTypes.Builder builder = TestAllTypes.newBuilder();
String json = "{\n" + " \"optionalAliasedEnum\": \"QUX\"\n" + "}";
JsonFormat.parser().merge(json, builder);
assertEquals(AliasedEnum.ALIAS_BAZ, builder.getOptionalAliasedEnum());
builder = TestAllTypes.newBuilder();
json = "{\n" + " \"optionalAliasedEnum\": \"qux\"\n" + "}";
JsonFormat.parser().merge(json, builder);
assertEquals(AliasedEnum.ALIAS_BAZ, builder.getOptionalAliasedEnum());
builder = TestAllTypes.newBuilder();
json = "{\n" + " \"optionalAliasedEnum\": \"bAz\"\n" + "}";
JsonFormat.parser().merge(json, builder);
assertEquals(AliasedEnum.ALIAS_BAZ, builder.getOptionalAliasedEnum());
}
public void testUnknownEnumMap() throws Exception {
TestMap.Builder builder = TestMap.newBuilder();
JsonFormat.parser()
@ -1280,7 +1298,8 @@ public class JsonFormatTest extends TestCase {
+ " \"repeatedString\": [],\n"
+ " \"repeatedBytes\": [],\n"
+ " \"repeatedNestedMessage\": [],\n"
+ " \"repeatedNestedEnum\": []\n"
+ " \"repeatedNestedEnum\": [],\n"
+ " \"optionalAliasedEnum\": \"ALIAS_FOO\"\n"
+ "}",
JsonFormat.printer().includingDefaultValueFields().print(message));
@ -1644,11 +1663,11 @@ public class JsonFormatTest extends TestCase {
mapBuilder.putStringToInt32Map("\ud834\udd20", 3); // utf-8 F0 9D 84 A0
mapBuilder.putStringToInt32Map("foo", 99);
mapBuilder.putStringToInt32Map("xxx", 123);
mapBuilder.putStringToInt32Map("\u20ac", 1); // utf-8 E2 82 AC
mapBuilder.putStringToInt32Map("\u20ac", 1); // utf-8 E2 82 AC
mapBuilder.putStringToInt32Map("abc", 20);
mapBuilder.putStringToInt32Map("19", 19);
mapBuilder.putStringToInt32Map("8", 8);
mapBuilder.putStringToInt32Map("\ufb00", 2); // utf-8 EF AC 80
mapBuilder.putStringToInt32Map("\ufb00", 2); // utf-8 EF AC 80
mapBuilder.putInt32ToInt32Map(3, 3);
mapBuilder.putInt32ToInt32Map(10, 10);
mapBuilder.putInt32ToInt32Map(5, 5);

View File

@ -32,7 +32,7 @@ syntax = "proto3";
package json_test;
option java_package = "com.google.protobuf.util";
option java_package = "com.google.protobuf.util.proto";
option java_outer_classname = "JsonTestProto";
import "google/protobuf/any.proto";
@ -48,6 +48,17 @@ message TestAllTypes {
BAR = 1;
BAZ = 2;
}
enum AliasedEnum {
option allow_alias = true;
ALIAS_FOO = 0;
ALIAS_BAR = 1;
ALIAS_BAZ = 2;
QUX = 2;
qux = 2;
bAz = 2;
}
message NestedMessage {
int32 value = 1;
}
@ -69,6 +80,7 @@ message TestAllTypes {
bytes optional_bytes = 15;
NestedMessage optional_nested_message = 18;
NestedEnum optional_nested_enum = 21;
AliasedEnum optional_aliased_enum = 52;
// Repeated
repeated int32 repeated_int32 = 31;

View File

@ -55,7 +55,7 @@ goog.forwardDeclare('jspb.BinaryMessage');
goog.forwardDeclare('jspb.BinaryReader');
goog.forwardDeclare('jspb.BinaryWriter');
goog.forwardDeclare('jspb.Message');
goog.forwardDeclare('jsproto.BinaryExtension');
goog.forwardDeclare('jsprotolib.BinaryExtension');
@ -122,7 +122,7 @@ jspb.RepeatedFieldType;
!Uint8Array|
!jspb.ConstBinaryMessage|
!jspb.BinaryMessage|
!jsproto.BinaryExtension}
!jsprotolib.BinaryExtension}
*/
jspb.AnyFieldType;

View File

@ -63,7 +63,7 @@ goog.require('jspb.utils');
* @struct
*/
jspb.BinaryIterator = function(opt_decoder, opt_next, opt_elements) {
/** @private {jspb.BinaryDecoder} */
/** @private {?jspb.BinaryDecoder} */
this.decoder_ = null;
/**

View File

@ -97,7 +97,7 @@ jspb.BinaryReader = function(opt_bytes, opt_start, opt_length) {
/**
* User-defined reader callbacks.
* @private {Object<string, function(!jspb.BinaryReader):*>}
* @private {?Object<string, function(!jspb.BinaryReader):*>}
*/
this.readCallbacks_ = null;
};

View File

@ -149,58 +149,60 @@ describe('Message test suite', function() {
});
it('testComplexConversion', function() {
var data1 = ['a',,, [, 11], [[, 22], [, 33]],, ['s1', 's2'],, 1];
var data2 = ['a',,, [, 11], [[, 22], [, 33]],, ['s1', 's2'],, 1];
var data1 = ['a', , , [, 11], [[, 22], [, 33]], , ['s1', 's2'], , 1];
var data2 = ['a', , , [, 11], [[, 22], [, 33]], , ['s1', 's2'], , 1];
var foo = new proto.jspb.test.Complex(data1);
var bar = new proto.jspb.test.Complex(data2);
var result = foo.toObject();
assertObjectEquals({
aString: 'a',
anOutOfOrderBool: 1,
aNestedMessage: {
anInt: 11
},
aRepeatedMessageList: [{anInt: 22}, {anInt: 33}],
aRepeatedStringList: ['s1', 's2']
}, result);
assertObjectEquals(
{
aString: 'a',
anOutOfOrderBool: 1,
aNestedMessage: {anInt: 11},
aRepeatedMessageList: [{anInt: 22}, {anInt: 33}],
aRepeatedStringList: ['s1', 's2']
},
result);
// Now test with the jspb instances included.
result = foo.toObject(true /* opt_includeInstance */);
assertObjectEquals({
aString: 'a',
anOutOfOrderBool: 1,
aNestedMessage: {
anInt: 11,
$jspbMessageInstance: foo.getANestedMessage()
},
aRepeatedMessageList: [
{anInt: 22, $jspbMessageInstance: foo.getARepeatedMessageList()[0]},
{anInt: 33, $jspbMessageInstance: foo.getARepeatedMessageList()[1]}
],
aRepeatedStringList: ['s1', 's2'],
$jspbMessageInstance: foo
}, result);
assertObjectEquals(
{
aString: 'a',
anOutOfOrderBool: 1,
aNestedMessage:
{anInt: 11, $jspbMessageInstance: foo.getANestedMessage()},
aRepeatedMessageList: [
{anInt: 22, $jspbMessageInstance: foo.getARepeatedMessageList()[0]},
{anInt: 33, $jspbMessageInstance: foo.getARepeatedMessageList()[1]}
],
aRepeatedStringList: ['s1', 's2'],
$jspbMessageInstance: foo
},
result);
});
it('testMissingFields', function() {
var foo = new proto.jspb.test.Complex([
undefined, undefined, undefined, [],
undefined, undefined, undefined, undefined]);
undefined, undefined, undefined, [], undefined, undefined, undefined,
undefined
]);
var bar = new proto.jspb.test.Complex([
undefined, undefined, undefined, [],
undefined, undefined, undefined, undefined]);
undefined, undefined, undefined, [], undefined, undefined, undefined,
undefined
]);
var result = foo.toObject();
assertObjectEquals({
aString: undefined,
anOutOfOrderBool: undefined,
aNestedMessage: {
anInt: undefined
},
// Note: JsPb converts undefined repeated fields to empty arrays.
aRepeatedMessageList: [],
aRepeatedStringList: []
}, result);
assertObjectEquals(
{
aString: undefined,
anOutOfOrderBool: undefined,
aNestedMessage: {anInt: undefined},
// Note: JsPb converts undefined repeated fields to empty arrays.
aRepeatedMessageList: [],
aRepeatedStringList: []
},
result);
});
@ -214,20 +216,21 @@ describe('Message test suite', function() {
it('testSpecialCases', function() {
// Note: Some property names are reserved in JavaScript.
// These names are converted to the Js property named pb_<reserved_name>.
var special =
new proto.jspb.test.SpecialCases(['normal', 'default', 'function',
'var']);
var special = new proto.jspb.test.SpecialCases(
['normal', 'default', 'function', 'var']);
var result = special.toObject();
assertObjectEquals({
normal: 'normal',
pb_default: 'default',
pb_function: 'function',
pb_var: 'var'
}, result);
assertObjectEquals(
{
normal: 'normal',
pb_default: 'default',
pb_function: 'function',
pb_var: 'var'
},
result);
});
it('testDefaultValues', function() {
var defaultString = "default<>\'\"abc";
var defaultString = 'default<>\'"abc';
var response = new proto.jspb.test.DefaultValues();
// Test toObject
@ -291,8 +294,10 @@ describe('Message test suite', function() {
// Test that clearing the values reverts them to the default state.
response = makeDefault(['blah', false, 111, 77]);
response.clearStringField(); response.clearBoolField();
response.clearIntField(); response.clearEnumField();
response.clearStringField();
response.clearBoolField();
response.clearIntField();
response.clearEnumField();
assertEquals(defaultString, response.getStringField());
assertEquals(true, response.getBoolField());
assertEquals(11, response.getIntField());
@ -304,8 +309,10 @@ describe('Message test suite', function() {
// Test that setFoo(null) clears the values.
response = makeDefault(['blah', false, 111, 77]);
response.setStringField(null); response.setBoolField(null);
response.setIntField(undefined); response.setEnumField(undefined);
response.setStringField(null);
response.setBoolField(null);
response.setIntField(undefined);
response.setEnumField(undefined);
assertEquals(defaultString, response.getStringField());
assertEquals(true, response.getBoolField());
assertEquals(11, response.getIntField());
@ -321,13 +328,13 @@ describe('Message test suite', function() {
assertTrue(jspb.Message.equals(s1, new proto.jspb.test.Simple1(['hi'])));
assertFalse(jspb.Message.equals(s1, new proto.jspb.test.Simple1(['bye'])));
var s1b = new proto.jspb.test.Simple1(['hi', ['hello']]);
assertTrue(jspb.Message.equals(s1b,
new proto.jspb.test.Simple1(['hi', ['hello']])));
assertTrue(jspb.Message.equals(s1b,
new proto.jspb.test.Simple1(['hi', ['hello', undefined,
undefined, undefined]])));
assertFalse(jspb.Message.equals(s1b,
new proto.jspb.test.Simple1(['no', ['hello']])));
assertTrue(jspb.Message.equals(
s1b, new proto.jspb.test.Simple1(['hi', ['hello']])));
assertTrue(jspb.Message.equals(s1b, new proto.jspb.test.Simple1([
'hi', ['hello', undefined, undefined, undefined]
])));
assertFalse(jspb.Message.equals(
s1b, new proto.jspb.test.Simple1(['no', ['hello']])));
// Test with messages of different types
var s2 = new proto.jspb.test.Simple2(['hi']);
assertFalse(jspb.Message.equals(s1, s2));
@ -335,18 +342,18 @@ describe('Message test suite', function() {
it('testEquals_softComparison', function() {
var s1 = new proto.jspb.test.Simple1(['hi', [], null]);
assertTrue(jspb.Message.equals(s1,
new proto.jspb.test.Simple1(['hi', []])));
assertTrue(
jspb.Message.equals(s1, new proto.jspb.test.Simple1(['hi', []])));
var s1b = new proto.jspb.test.Simple1(['hi', [], true]);
assertTrue(jspb.Message.equals(s1b,
new proto.jspb.test.Simple1(['hi', [], 1])));
assertTrue(
jspb.Message.equals(s1b, new proto.jspb.test.Simple1(['hi', [], 1])));
});
it('testEqualsComplex', function() {
var data1 = ['a',,, [, 11], [[, 22], [, 33]],, ['s1', 's2'],, 1];
var data2 = ['a',,, [, 11], [[, 22], [, 34]],, ['s1', 's2'],, 1];
var data3 = ['a',,, [, 11], [[, 22]],, ['s1', 's2'],, 1];
var data1 = ['a', , , [, 11], [[, 22], [, 33]], , ['s1', 's2'], , 1];
var data2 = ['a', , , [, 11], [[, 22], [, 34]], , ['s1', 's2'], , 1];
var data3 = ['a', , , [, 11], [[, 22]], , ['s1', 's2'], , 1];
var data4 = ['hi'];
var c1a = new proto.jspb.test.Complex(data1);
var c1b = new proto.jspb.test.Complex(data1);
@ -363,42 +370,34 @@ describe('Message test suite', function() {
it('testEqualsExtensionsConstructed', function() {
assertTrue(jspb.Message.equals(
new proto.jspb.test.HasExtensions([]),
new proto.jspb.test.HasExtensions([{}])
));
new proto.jspb.test.HasExtensions([{}])));
assertTrue(jspb.Message.equals(
new proto.jspb.test.HasExtensions(['hi', {100: [{200: 'a'}]}]),
new proto.jspb.test.HasExtensions(['hi', {100: [{200: 'a'}]}])
));
new proto.jspb.test.HasExtensions(['hi', {100: [{200: 'a'}]}])));
assertFalse(jspb.Message.equals(
new proto.jspb.test.HasExtensions(['hi', {100: [{200: 'a'}]}]),
new proto.jspb.test.HasExtensions(['hi', {100: [{200: 'b'}]}])
));
new proto.jspb.test.HasExtensions(['hi', {100: [{200: 'b'}]}])));
assertTrue(jspb.Message.equals(
new proto.jspb.test.HasExtensions([{100: [{200: 'a'}]}]),
new proto.jspb.test.HasExtensions([{100: [{200: 'a'}]}])
));
new proto.jspb.test.HasExtensions([{100: [{200: 'a'}]}])));
assertTrue(jspb.Message.equals(
new proto.jspb.test.HasExtensions([{100: [{200: 'a'}]}]),
new proto.jspb.test.HasExtensions([,,, {100: [{200: 'a'}]}])
));
new proto.jspb.test.HasExtensions([, , , {100: [{200: 'a'}]}])));
assertTrue(jspb.Message.equals(
new proto.jspb.test.HasExtensions([,,, {100: [{200: 'a'}]}]),
new proto.jspb.test.HasExtensions([{100: [{200: 'a'}]}])
));
new proto.jspb.test.HasExtensions([, , , {100: [{200: 'a'}]}]),
new proto.jspb.test.HasExtensions([{100: [{200: 'a'}]}])));
assertTrue(jspb.Message.equals(
new proto.jspb.test.HasExtensions(['hi', {100: [{200: 'a'}]}]),
new proto.jspb.test.HasExtensions(['hi',,, {100: [{200: 'a'}]}])
));
new proto.jspb.test.HasExtensions(['hi', , , {100: [{200: 'a'}]}])));
assertTrue(jspb.Message.equals(
new proto.jspb.test.HasExtensions(['hi',,, {100: [{200: 'a'}]}]),
new proto.jspb.test.HasExtensions(['hi', {100: [{200: 'a'}]}])
));
new proto.jspb.test.HasExtensions(['hi', , , {100: [{200: 'a'}]}]),
new proto.jspb.test.HasExtensions(['hi', {100: [{200: 'a'}]}])));
});
it('testEqualsExtensionsUnconstructed', function() {
assertTrue(jspb.Message.compareFields([], [{}]));
assertTrue(jspb.Message.compareFields([,,, {}], []));
assertTrue(jspb.Message.compareFields([,,, {}], [,, {}]));
assertTrue(jspb.Message.compareFields([, , , {}], []));
assertTrue(jspb.Message.compareFields([, , , {}], [, , {}]));
assertTrue(jspb.Message.compareFields(
['hi', {100: [{200: 'a'}]}], ['hi', {100: [{200: 'a'}]}]));
assertFalse(jspb.Message.compareFields(
@ -406,13 +405,13 @@ describe('Message test suite', function() {
assertTrue(jspb.Message.compareFields(
[{100: [{200: 'a'}]}], [{100: [{200: 'a'}]}]));
assertTrue(jspb.Message.compareFields(
[{100: [{200: 'a'}]}], [,,, {100: [{200: 'a'}]}]));
[{100: [{200: 'a'}]}], [, , , {100: [{200: 'a'}]}]));
assertTrue(jspb.Message.compareFields(
[,,, {100: [{200: 'a'}]}], [{100: [{200: 'a'}]}]));
[, , , {100: [{200: 'a'}]}], [{100: [{200: 'a'}]}]));
assertTrue(jspb.Message.compareFields(
['hi', {100: [{200: 'a'}]}], ['hi',,, {100: [{200: 'a'}]}]));
['hi', {100: [{200: 'a'}]}], ['hi', , , {100: [{200: 'a'}]}]));
assertTrue(jspb.Message.compareFields(
['hi',,, {100: [{200: 'a'}]}], ['hi', {100: [{200: 'a'}]}]));
['hi', , , {100: [{200: 'a'}]}], ['hi', {100: [{200: 'a'}]}]));
});
it('testInitializeMessageWithLastFieldNull', function() {
@ -436,13 +435,13 @@ describe('Message test suite', function() {
it('testToMap', function() {
var p1 = new proto.jspb.test.Simple1(['k', ['v']]);
var p2 = new proto.jspb.test.Simple1(['k1', ['v1', 'v2']]);
var soymap = jspb.Message.toMap([p1, p2],
proto.jspb.test.Simple1.prototype.getAString,
var soymap = jspb.Message.toMap(
[p1, p2], proto.jspb.test.Simple1.prototype.getAString,
proto.jspb.test.Simple1.prototype.toObject);
assertEquals('k', soymap['k'].aString);
assertArrayEquals(['v'], soymap['k'].aRepeatedStringList);
var protomap = jspb.Message.toMap([p1, p2],
proto.jspb.test.Simple1.prototype.getAString);
var protomap = jspb.Message.toMap(
[p1, p2], proto.jspb.test.Simple1.prototype.getAString);
assertEquals('k', protomap['k'].getAString());
assertArrayEquals(['v'], protomap['k'].getARepeatedStringList());
});
@ -463,8 +462,12 @@ describe('Message test suite', function() {
extension.setExt('e1');
original.setExtension(proto.jspb.test.IsExtension.extField, extension);
var clone = original.clone();
assertArrayEquals(['v1',, ['x1', ['y1', 'z1']],,
[['x2', ['y2', 'z2']], ['x3', ['y3', 'z3']]], bytes1,, { 100: [, 'e1'] }],
assertArrayEquals(
[
'v1', , ['x1', ['y1', 'z1']], ,
[['x2', ['y2', 'z2']], ['x3', ['y3', 'z3']]], bytes1, ,
{100: [, 'e1']}
],
clone.toArray());
clone.setStr('v2');
var simple4 = new proto.jspb.test.Simple1(['a1', ['b1', 'c1']]);
@ -481,11 +484,19 @@ describe('Message test suite', function() {
var newExtension = new proto.jspb.test.CloneExtension();
newExtension.setExt('e2');
clone.setExtension(proto.jspb.test.CloneExtension.extField, newExtension);
assertArrayEquals(['v2',, ['a1', ['b1', 'c1']],,
[['a2', ['b2', 'c2']], ['a3', ['b3', 'c3']]], bytes2,, { 100: [, 'e2'] }],
assertArrayEquals(
[
'v2', , ['a1', ['b1', 'c1']], ,
[['a2', ['b2', 'c2']], ['a3', ['b3', 'c3']]], bytes2, ,
{100: [, 'e2']}
],
clone.toArray());
assertArrayEquals(['v1',, ['x1', ['y1', 'z1']],,
[['x2', ['y2', 'z2']], ['x3', ['y3', 'z3']]], bytes1,, { 100: [, 'e1'] }],
assertArrayEquals(
[
'v1', , ['x1', ['y1', 'z1']], ,
[['x2', ['y2', 'z2']], ['x3', ['y3', 'z3']]], bytes1, ,
{100: [, 'e1']}
],
original.toArray());
});
@ -517,11 +528,12 @@ describe('Message test suite', function() {
jspb.Message.copyInto(original, dest);
assertArrayEquals(original.toArray(), dest.toArray());
assertEquals('x1', dest.getSimple1().getAString());
assertEquals('e1',
assertEquals(
'e1',
dest.getExtension(proto.jspb.test.CloneExtension.extField).getExt());
dest.getSimple1().setAString('new value');
assertNotEquals(dest.getSimple1().getAString(),
original.getSimple1().getAString());
assertNotEquals(
dest.getSimple1().getAString(), original.getSimple1().getAString());
if (supportsUint8Array) {
dest.getBytesField()[0] = 7;
assertObjectEquals(bytes1, original.getBytesField());
@ -531,12 +543,12 @@ describe('Message test suite', function() {
assertObjectEquals(bytes1, original.getBytesField());
assertObjectEquals('789', dest.getBytesField());
}
dest.getExtension(proto.jspb.test.CloneExtension.extField).
setExt('new value');
dest.getExtension(proto.jspb.test.CloneExtension.extField)
.setExt('new value');
assertNotEquals(
dest.getExtension(proto.jspb.test.CloneExtension.extField).getExt(),
original.getExtension(
proto.jspb.test.CloneExtension.extField).getExt());
original.getExtension(proto.jspb.test.CloneExtension.extField)
.getExt());
});
it('testCopyInto_notSameType', function() {
@ -554,26 +566,32 @@ describe('Message test suite', function() {
var extension2 = new proto.jspb.test.Simple1(['str', ['s1', 's2']]);
var extendable = new proto.jspb.test.HasExtensions(['v1', 'v2', 'v3']);
extendable.setExtension(proto.jspb.test.IsExtension.extField, extension1);
extendable.setExtension(proto.jspb.test.IndirectExtension.simple,
extension2);
extendable.setExtension(
proto.jspb.test.IndirectExtension.simple, extension2);
extendable.setExtension(proto.jspb.test.IndirectExtension.str, 'xyzzy');
extendable.setExtension(proto.jspb.test.IndirectExtension.repeatedStrList,
['a', 'b']);
extendable.setExtension(
proto.jspb.test.IndirectExtension.repeatedStrList, ['a', 'b']);
var s1 = new proto.jspb.test.Simple1(['foo', ['s1', 's2']]);
var s2 = new proto.jspb.test.Simple1(['bar', ['t1', 't2']]);
extendable.setExtension(
proto.jspb.test.IndirectExtension.repeatedSimpleList,
[s1, s2]);
assertObjectEquals(extension1,
proto.jspb.test.IndirectExtension.repeatedSimpleList, [s1, s2]);
assertObjectEquals(
extension1,
extendable.getExtension(proto.jspb.test.IsExtension.extField));
assertObjectEquals(extension2,
assertObjectEquals(
extension2,
extendable.getExtension(proto.jspb.test.IndirectExtension.simple));
assertObjectEquals('xyzzy',
assertObjectEquals(
'xyzzy',
extendable.getExtension(proto.jspb.test.IndirectExtension.str));
assertObjectEquals(['a', 'b'], extendable.getExtension(
proto.jspb.test.IndirectExtension.repeatedStrList));
assertObjectEquals([s1, s2], extendable.getExtension(
proto.jspb.test.IndirectExtension.repeatedSimpleList));
assertObjectEquals(
['a', 'b'],
extendable.getExtension(
proto.jspb.test.IndirectExtension.repeatedStrList));
assertObjectEquals(
[s1, s2],
extendable.getExtension(
proto.jspb.test.IndirectExtension.repeatedSimpleList));
// Not supported yet, but it should work...
extendable.setExtension(proto.jspb.test.IndirectExtension.simple, null);
assertNull(
@ -592,29 +610,35 @@ describe('Message test suite', function() {
var extendable = new proto.jspb.test.HasExtensions(['v1', 'v2', 'v3']);
var extension = new proto.jspb.test.Simple1(['foo', ['s1', 's2']]);
extendable.setExtension(proto.jspb.test.simple1, extension);
assertObjectEquals(extension,
extendable.getExtension(proto.jspb.test.simple1));
assertObjectEquals(
extension, extendable.getExtension(proto.jspb.test.simple1));
// From _lib mode.
extension = new proto.jspb.test.ExtensionMessage(['s1']);
extendable = new proto.jspb.test.TestExtensionsMessage([16]);
extendable.setExtension(proto.jspb.test.floatingMsgField, extension);
extendable.setExtension(proto.jspb.test.floatingStrField, 's2');
assertObjectEquals(extension,
extendable.getExtension(proto.jspb.test.floatingMsgField));
assertObjectEquals('s2',
extendable.getExtension(proto.jspb.test.floatingStrField));
assertObjectEquals(
extension, extendable.getExtension(proto.jspb.test.floatingMsgField));
assertObjectEquals(
's2', extendable.getExtension(proto.jspb.test.floatingStrField));
assertNotUndefined(proto.jspb.exttest.floatingMsgField);
assertNotUndefined(proto.jspb.exttest.floatingMsgFieldTwo);
assertNotUndefined(proto.jspb.exttest.beta.floatingStrField);
});
it('testNestedExtensions', function() {
var extendable = new proto.jspb.exttest.nested.TestNestedExtensionsMessage();
var extension = new proto.jspb.exttest.nested.TestOuterMessage.NestedExtensionMessage(['s1']);
extendable.setExtension(proto.jspb.exttest.nested.TestOuterMessage.innerExtension, extension);
assertObjectEquals(extension,
extendable.getExtension(proto.jspb.exttest.nested.TestOuterMessage.innerExtension));
var extendable =
new proto.jspb.exttest.nested.TestNestedExtensionsMessage();
var extension =
new proto.jspb.exttest.nested.TestOuterMessage.NestedExtensionMessage(
['s1']);
extendable.setExtension(
proto.jspb.exttest.nested.TestOuterMessage.innerExtension, extension);
assertObjectEquals(
extension,
extendable.getExtension(
proto.jspb.exttest.nested.TestOuterMessage.innerExtension));
});
it('testToObject_extendedObject', function() {
@ -622,60 +646,72 @@ describe('Message test suite', function() {
var extension2 = new proto.jspb.test.Simple1(['str', ['s1', 's2'], true]);
var extendable = new proto.jspb.test.HasExtensions(['v1', 'v2', 'v3']);
extendable.setExtension(proto.jspb.test.IsExtension.extField, extension1);
extendable.setExtension(proto.jspb.test.IndirectExtension.simple,
extension2);
extendable.setExtension(
proto.jspb.test.IndirectExtension.simple, extension2);
extendable.setExtension(proto.jspb.test.IndirectExtension.str, 'xyzzy');
extendable.setExtension(proto.jspb.test.IndirectExtension.repeatedStrList,
['a', 'b']);
extendable.setExtension(
proto.jspb.test.IndirectExtension.repeatedStrList, ['a', 'b']);
var s1 = new proto.jspb.test.Simple1(['foo', ['s1', 's2'], true]);
var s2 = new proto.jspb.test.Simple1(['bar', ['t1', 't2'], false]);
extendable.setExtension(
proto.jspb.test.IndirectExtension.repeatedSimpleList,
[s1, s2]);
assertObjectEquals({
str1: 'v1', str2: 'v2', str3: 'v3',
extField: { ext1: 'ext1field' },
simple: {
aString: 'str', aRepeatedStringList: ['s1', 's2'], aBoolean: true
},
str: 'xyzzy',
repeatedStrList: ['a', 'b'],
repeatedSimpleList: [
{ aString: 'foo', aRepeatedStringList: ['s1', 's2'], aBoolean: true},
{ aString: 'bar', aRepeatedStringList: ['t1', 't2'], aBoolean: false}
]
}, extendable.toObject());
proto.jspb.test.IndirectExtension.repeatedSimpleList, [s1, s2]);
assertObjectEquals(
{
str1: 'v1',
str2: 'v2',
str3: 'v3',
extField: {ext1: 'ext1field'},
simple: {
aString: 'str',
aRepeatedStringList: ['s1', 's2'],
aBoolean: true
},
str: 'xyzzy',
repeatedStrList: ['a', 'b'],
repeatedSimpleList: [
{aString: 'foo', aRepeatedStringList: ['s1', 's2'], aBoolean: true},
{aString: 'bar', aRepeatedStringList: ['t1', 't2'], aBoolean: false}
]
},
extendable.toObject());
// Now, with instances included.
assertObjectEquals({
str1: 'v1', str2: 'v2', str3: 'v3',
extField: {
ext1: 'ext1field',
$jspbMessageInstance:
extendable.getExtension(proto.jspb.test.IsExtension.extField)
},
simple: {
aString: 'str',
aRepeatedStringList: ['s1', 's2'],
aBoolean: true,
$jspbMessageInstance:
extendable.getExtension(proto.jspb.test.IndirectExtension.simple)
},
str: 'xyzzy',
repeatedStrList: ['a', 'b'],
repeatedSimpleList: [{
aString: 'foo',
aRepeatedStringList: ['s1', 's2'],
aBoolean: true,
$jspbMessageInstance: s1
}, {
aString: 'bar',
aRepeatedStringList: ['t1', 't2'],
aBoolean: false,
$jspbMessageInstance: s2
}],
$jspbMessageInstance: extendable
}, extendable.toObject(true /* opt_includeInstance */));
assertObjectEquals(
{
str1: 'v1',
str2: 'v2',
str3: 'v3',
extField: {
ext1: 'ext1field',
$jspbMessageInstance:
extendable.getExtension(proto.jspb.test.IsExtension.extField)
},
simple: {
aString: 'str',
aRepeatedStringList: ['s1', 's2'],
aBoolean: true,
$jspbMessageInstance: extendable.getExtension(
proto.jspb.test.IndirectExtension.simple)
},
str: 'xyzzy',
repeatedStrList: ['a', 'b'],
repeatedSimpleList: [
{
aString: 'foo',
aRepeatedStringList: ['s1', 's2'],
aBoolean: true,
$jspbMessageInstance: s1
},
{
aString: 'bar',
aRepeatedStringList: ['t1', 't2'],
aBoolean: false,
$jspbMessageInstance: s2
}
],
$jspbMessageInstance: extendable
},
extendable.toObject(true /* opt_includeInstance */));
});
it('testInitialization_emptyArray', function() {
@ -708,7 +744,8 @@ describe('Message test suite', function() {
});
it('testToObject_hasExtensionField', function() {
var data = new proto.jspb.test.HasExtensions(['str1', {100: ['ext1'], 102: ''}]);
var data =
new proto.jspb.test.HasExtensions(['str1', {100: ['ext1'], 102: ''}]);
var obj = data.toObject();
assertEquals('str1', obj.str1);
assertEquals('ext1', obj.extField.ext1);
@ -728,8 +765,7 @@ describe('Message test suite', function() {
var extensionMessage = new proto.jspb.test.IsExtension(['is_extension']);
data.setExtension(proto.jspb.test.IsExtension.extField, extensionMessage);
var obj = data.toObject();
assertNotNull(
data.getExtension(proto.jspb.test.IsExtension.extField));
assertNotNull(data.getExtension(proto.jspb.test.IsExtension.extField));
assertEquals('is_extension', obj.extField.ext1);
});
@ -746,16 +782,18 @@ describe('Message test suite', function() {
var groups = group.getRepeatedGroupList();
assertEquals('g1', groups[0].getId());
assertObjectEquals([true, false], groups[0].getSomeBoolList());
assertObjectEquals({id: 'g1', someBoolList: [true, false]},
groups[0].toObject());
assertObjectEquals({
repeatedGroupList: [{id: 'g1', someBoolList: [true, false]}],
requiredGroup: {id: undefined},
optionalGroup: undefined,
requiredSimple: {aRepeatedStringList: [], aString: undefined},
optionalSimple: undefined,
id: undefined
}, group.toObject());
assertObjectEquals(
{id: 'g1', someBoolList: [true, false]}, groups[0].toObject());
assertObjectEquals(
{
repeatedGroupList: [{id: 'g1', someBoolList: [true, false]}],
requiredGroup: {id: undefined},
optionalGroup: undefined,
requiredSimple: {aRepeatedStringList: [], aString: undefined},
optionalSimple: undefined,
id: undefined
},
group.toObject());
var group1 = new proto.jspb.test.TestGroup1();
group1.setGroup(someGroup);
assertEquals(someGroup, group1.getGroup());
@ -772,28 +810,29 @@ describe('Message test suite', function() {
message.setExtension$(11);
message.setExtension(proto.jspb.test.TestReservedNamesExtension.foo, 12);
assertEquals(11, message.getExtension$());
assertEquals(12, message.getExtension(
proto.jspb.test.TestReservedNamesExtension.foo));
assertEquals(
12,
message.getExtension(proto.jspb.test.TestReservedNamesExtension.foo));
assertObjectEquals({extension: 11, foo: 12}, message.toObject());
});
it('testInitializeMessageWithUnsetOneof', function() {
var message = new proto.jspb.test.TestMessageWithOneof([]);
assertEquals(
proto.jspb.test.TestMessageWithOneof.PartialOneofCase.
PARTIAL_ONEOF_NOT_SET,
proto.jspb.test.TestMessageWithOneof.PartialOneofCase
.PARTIAL_ONEOF_NOT_SET,
message.getPartialOneofCase());
assertEquals(
proto.jspb.test.TestMessageWithOneof.RecursiveOneofCase.
RECURSIVE_ONEOF_NOT_SET,
proto.jspb.test.TestMessageWithOneof.RecursiveOneofCase
.RECURSIVE_ONEOF_NOT_SET,
message.getRecursiveOneofCase());
});
it('testUnsetsOneofCaseWhenFieldIsCleared', function() {
var message = new proto.jspb.test.TestMessageWithOneof;
assertEquals(
proto.jspb.test.TestMessageWithOneof.PartialOneofCase.
PARTIAL_ONEOF_NOT_SET,
proto.jspb.test.TestMessageWithOneof.PartialOneofCase
.PARTIAL_ONEOF_NOT_SET,
message.getPartialOneofCase());
message.setPone('hi');
@ -803,20 +842,20 @@ describe('Message test suite', function() {
message.clearPone();
assertEquals(
proto.jspb.test.TestMessageWithOneof.PartialOneofCase.
PARTIAL_ONEOF_NOT_SET,
proto.jspb.test.TestMessageWithOneof.PartialOneofCase
.PARTIAL_ONEOF_NOT_SET,
message.getPartialOneofCase());
});
it('testFloatingPointFieldsSupportNan', function() {
var assertNan = function(x) {
assertTrue('Expected ' + x + ' (' + goog.typeOf(x) + ') to be NaN.',
assertTrue(
'Expected ' + x + ' (' + goog.typeOf(x) + ') to be NaN.',
goog.isNumber(x) && isNaN(x));
};
var message = new proto.jspb.test.FloatingPointFields([
'NaN', 'NaN', ['NaN', 'NaN'], 'NaN',
'NaN', 'NaN', ['NaN', 'NaN'], 'NaN'
'NaN', 'NaN', ['NaN', 'NaN'], 'NaN', 'NaN', 'NaN', ['NaN', 'NaN'], 'NaN'
]);
assertNan(message.getOptionalFloatField());
assertNan(message.getRequiredFloatField());
@ -837,12 +876,9 @@ describe('Message test suite', function() {
message2.setExtension(
proto.jspb.exttest.reverse.TestExtensionReverseOrderMessage1.a, 233);
message2.setExtension(
proto
.jspb
.exttest
.reverse
.TestExtensionReverseOrderMessage1
.TestExtensionReverseOrderNestedMessage1.b, 2333);
proto.jspb.exttest.reverse.TestExtensionReverseOrderMessage1
.TestExtensionReverseOrderNestedMessage1.b,
2333);
message2.setExtension(proto.jspb.exttest.reverse.c, 23333);
assertEquals(
@ -852,15 +888,9 @@ describe('Message test suite', function() {
assertEquals(
2333,
message2.getExtension(
proto
.jspb
.exttest
.reverse
.TestExtensionReverseOrderMessage1
proto.jspb.exttest.reverse.TestExtensionReverseOrderMessage1
.TestExtensionReverseOrderNestedMessage1.b));
assertEquals(
23333,
message2.getExtension(proto.jspb.exttest.reverse.c));
assertEquals(23333, message2.getExtension(proto.jspb.exttest.reverse.c));
});
it('testCircularDepsBaseOnMessageField', function() {
@ -983,16 +1013,14 @@ describe('Message test suite', function() {
var package1Message = new proto.jspb.filenametest.package1.TestMessage;
var package2Message = new proto.jspb.filenametest.package2.TestMessage;
package1Message.setExtension(
proto.jspb.filenametest.package1.a, 10);
package1Message.setExtension(
proto.jspb.filenametest.package1.b, 11);
package1Message.setExtension(proto.jspb.filenametest.package1.a, 10);
package1Message.setExtension(proto.jspb.filenametest.package1.b, 11);
package2Message.setA(12);
assertEquals(10,
package1Message.getExtension(proto.jspb.filenametest.package1.a));
assertEquals(11,
package1Message.getExtension(proto.jspb.filenametest.package1.b));
assertEquals(
10, package1Message.getExtension(proto.jspb.filenametest.package1.a));
assertEquals(
11, package1Message.getExtension(proto.jspb.filenametest.package1.b));
assertEquals(12, package2Message.getA());
});

View File

@ -49,9 +49,11 @@ from google.protobuf import field_mask_pb2
from google.protobuf import struct_pb2
from google.protobuf import timestamp_pb2
from google.protobuf import wrappers_pb2
from google.protobuf import any_test_pb2
from google.protobuf import unittest_mset_pb2
from google.protobuf import unittest_pb2
from google.protobuf.internal import well_known_types
from google.protobuf import descriptor_pool
from google.protobuf import json_format
from google.protobuf.util import json_format_proto3_pb2
@ -202,8 +204,8 @@ class JsonFormatTest(JsonFormatBase):
def testJsonParseDictToAnyDoesNotAlterInput(self):
orig_dict = {
"int32Value": 20,
"@type": "type.googleapis.com/proto3.TestMessage"
'int32Value': 20,
'@type': 'type.googleapis.com/proto3.TestMessage'
}
copied_dict = json.loads(json.dumps(orig_dict))
parsed_message = any_pb2.Any()
@ -628,6 +630,19 @@ class JsonFormatTest(JsonFormatBase):
' "value": {\n'
' "@type": "type.googleapis.com/proto3.TestMessage"')
def testAnyMessageDescriptorPoolMissingType(self):
packed_message = unittest_pb2.OneString()
packed_message.data = 'string'
message = any_test_pb2.TestAny()
message.any_value.Pack(packed_message)
empty_pool = descriptor_pool.DescriptorPool()
with self.assertRaises(TypeError) as cm:
json_format.MessageToJson(message, True, descriptor_pool=empty_pool)
self.assertEqual(
'Can not find message descriptor by type_url:'
' type.googleapis.com/protobuf_unittest.OneString.',
str(cm.exception))
def testWellKnownInAnyMessage(self):
message = any_pb2.Any()
int32_value = wrappers_pb2.Int32Value()
@ -1016,6 +1031,32 @@ class JsonFormatTest(JsonFormatBase):
json_format.ParseDict(js_dict, message)
self.assertEqual(expected, message.int32_value)
def testParseDictAnyDescriptorPoolMissingType(self):
# Confirm that ParseDict does not raise ParseError with default pool
js_dict = {
'any_value': {
'@type': 'type.googleapis.com/proto3.MessageType',
'value': 1234
}
}
json_format.ParseDict(js_dict, any_test_pb2.TestAny())
# Check ParseDict raises ParseError with empty pool
js_dict = {
'any_value': {
'@type': 'type.googleapis.com/proto3.MessageType',
'value': 1234
}
}
with self.assertRaises(json_format.ParseError) as cm:
empty_pool = descriptor_pool.DescriptorPool()
json_format.ParseDict(js_dict,
any_test_pb2.TestAny(),
descriptor_pool=empty_pool)
self.assertEqual(
str(cm.exception),
'Failed to parse any_value field: Can not find message descriptor by'
' type_url: type.googleapis.com/proto3.MessageType..')
def testMessageToDict(self):
message = json_format_proto3_pb2.TestMessage()
message.int32_value = 12345

View File

@ -30,6 +30,7 @@
syntax = "proto2";
package google.protobuf.python.internal;
message TestEnumValues {

View File

@ -1406,7 +1406,11 @@ def _DiscardUnknownFields(self):
self._unknown_field_set = None # pylint: disable=protected-access
for field, value in self.ListFields():
if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
if field.label == _FieldDescriptor.LABEL_REPEATED:
if _IsMapField(field):
if _IsMessageMapField(field):
for key in value:
value[key].DiscardUnknownFields()
elif field.label == _FieldDescriptor.LABEL_REPEATED:
for sub_message in value:
sub_message.DiscardUnknownFields()
else:

View File

@ -715,6 +715,24 @@ class TextFormatParserTests(TextFormatBase):
self.assertEqual(m.optional_string, self._GOLDEN_UNICODE)
self.assertEqual(m.repeated_bytes[0], self._GOLDEN_BYTES)
def testParseDuplicateMessages(self, message_module):
message = message_module.TestAllTypes()
text = ('optional_nested_message { bb: 1 } '
'optional_nested_message { bb: 2 }')
six.assertRaisesRegex(self, text_format.ParseError, (
r'1:59 : Message type "\w+.TestAllTypes" '
r'should not have multiple "optional_nested_message" fields.'),
text_format.Parse, text,
message)
def testParseDuplicateScalars(self, message_module):
message = message_module.TestAllTypes()
text = ('optional_int32: 42 ' 'optional_int32: 67')
six.assertRaisesRegex(self, text_format.ParseError, (
r'1:36 : Message type "\w+.TestAllTypes" should not '
r'have multiple "optional_int32" fields.'), text_format.Parse, text,
message)
@_parameterized.parameters(unittest_pb2, unittest_proto3_arena_pb2)
class TextFormatMergeTests(TextFormatBase):
@ -1293,16 +1311,6 @@ class Proto2Tests(TextFormatBase):
'"protobuf_unittest.optional_int32_extension" extensions.'),
text_format.Parse, text, message)
def testParseDuplicateMessages(self):
message = unittest_pb2.TestAllTypes()
text = ('optional_nested_message { bb: 1 } '
'optional_nested_message { bb: 2 }')
six.assertRaisesRegex(self, text_format.ParseError, (
'1:59 : Message type "protobuf_unittest.TestAllTypes" '
'should not have multiple "optional_nested_message" fields.'),
text_format.Parse, text,
message)
def testParseDuplicateExtensionMessages(self):
message = unittest_pb2.TestAllExtensions()
text = ('[protobuf_unittest.optional_nested_message_extension]: {} '
@ -1313,14 +1321,6 @@ class Proto2Tests(TextFormatBase):
'"protobuf_unittest.optional_nested_message_extension" extensions.'),
text_format.Parse, text, message)
def testParseDuplicateScalars(self):
message = unittest_pb2.TestAllTypes()
text = ('optional_int32: 42 ' 'optional_int32: 67')
six.assertRaisesRegex(self, text_format.ParseError, (
'1:36 : Message type "protobuf_unittest.TestAllTypes" should not '
'have multiple "optional_int32" fields.'), text_format.Parse, text,
message)
def testParseGroupNotClosed(self):
message = unittest_pb2.TestAllTypes()
text = 'RepeatedGroup: <'

View File

@ -39,6 +39,7 @@ try:
import unittest2 as unittest #PY26
except ImportError:
import unittest
from google.protobuf import map_unittest_pb2
from google.protobuf import unittest_mset_pb2
from google.protobuf import unittest_pb2
from google.protobuf import unittest_proto3_arena_pb2
@ -138,6 +139,18 @@ class UnknownFieldsTest(BaseTestCase):
self.assertEqual(
b'', message.repeated_nested_message[0].SerializeToString())
msg = map_unittest_pb2.TestMap()
msg.map_int32_all_types[1].optional_nested_message.ParseFromString(
other_message.SerializeToString())
msg.map_string_string['1'] = 'test'
self.assertNotEqual(
b'',
msg.map_int32_all_types[1].optional_nested_message.SerializeToString())
msg.DiscardUnknownFields()
self.assertEqual(
b'',
msg.map_int32_all_types[1].optional_nested_message.SerializeToString())
class UnknownFieldsAccessorsTest(BaseTestCase):

View File

@ -96,12 +96,14 @@ class ParseError(Error):
"""Thrown in case of parsing error."""
def MessageToJson(message,
including_default_value_fields=False,
preserving_proto_field_name=False,
indent=2,
sort_keys=False,
use_integers_for_enums=False):
def MessageToJson(
message,
including_default_value_fields=False,
preserving_proto_field_name=False,
indent=2,
sort_keys=False,
use_integers_for_enums=False,
descriptor_pool=None):
"""Converts protobuf message to JSON format.
Args:
@ -117,20 +119,26 @@ def MessageToJson(message,
An indent level of 0 or negative will only insert newlines.
sort_keys: If True, then the output will be sorted by field names.
use_integers_for_enums: If true, print integers instead of enum names.
descriptor_pool: A Descriptor Pool for resolving types. If None use the
default.
Returns:
A string containing the JSON formatted protocol buffer message.
"""
printer = _Printer(including_default_value_fields,
preserving_proto_field_name,
use_integers_for_enums)
printer = _Printer(
including_default_value_fields,
preserving_proto_field_name,
use_integers_for_enums,
descriptor_pool)
return printer.ToJsonString(message, indent, sort_keys)
def MessageToDict(message,
including_default_value_fields=False,
preserving_proto_field_name=False,
use_integers_for_enums=False):
def MessageToDict(
message,
including_default_value_fields=False,
preserving_proto_field_name=False,
use_integers_for_enums=False,
descriptor_pool=None):
"""Converts protobuf message to a dictionary.
When the dictionary is encoded to JSON, it conforms to proto3 JSON spec.
@ -145,13 +153,17 @@ def MessageToDict(message,
names as defined in the .proto file. If False, convert the field
names to lowerCamelCase.
use_integers_for_enums: If true, print integers instead of enum names.
descriptor_pool: A Descriptor Pool for resolving types. If None use the
default.
Returns:
A dict representation of the protocol buffer message.
"""
printer = _Printer(including_default_value_fields,
preserving_proto_field_name,
use_integers_for_enums)
printer = _Printer(
including_default_value_fields,
preserving_proto_field_name,
use_integers_for_enums,
descriptor_pool)
# pylint: disable=protected-access
return printer._MessageToJsonObject(message)
@ -165,13 +177,16 @@ def _IsMapEntry(field):
class _Printer(object):
"""JSON format printer for protocol message."""
def __init__(self,
including_default_value_fields=False,
preserving_proto_field_name=False,
use_integers_for_enums=False):
def __init__(
self,
including_default_value_fields=False,
preserving_proto_field_name=False,
use_integers_for_enums=False,
descriptor_pool=None):
self.including_default_value_fields = including_default_value_fields
self.preserving_proto_field_name = preserving_proto_field_name
self.use_integers_for_enums = use_integers_for_enums
self.descriptor_pool = descriptor_pool
def ToJsonString(self, message, indent, sort_keys):
js = self._MessageToJsonObject(message)
@ -300,7 +315,7 @@ class _Printer(object):
js = OrderedDict()
type_url = message.type_url
js['@type'] = type_url
sub_message = _CreateMessageFromTypeUrl(type_url)
sub_message = _CreateMessageFromTypeUrl(type_url, self.descriptor_pool)
sub_message.ParseFromString(message.value)
message_descriptor = sub_message.DESCRIPTOR
full_name = message_descriptor.full_name
@ -366,13 +381,13 @@ def _DuplicateChecker(js):
return result
def _CreateMessageFromTypeUrl(type_url):
# TODO(jieluo): Should add a way that users can register the type resolver
# instead of the default one.
def _CreateMessageFromTypeUrl(type_url, descriptor_pool):
"""Creates a message from a type URL."""
db = symbol_database.Default()
pool = db.pool if descriptor_pool is None else descriptor_pool
type_name = type_url.split('/')[-1]
try:
message_descriptor = db.pool.FindMessageTypeByName(type_name)
message_descriptor = pool.FindMessageTypeByName(type_name)
except KeyError:
raise TypeError(
'Can not find message descriptor by type_url: {0}.'.format(type_url))
@ -380,13 +395,15 @@ def _CreateMessageFromTypeUrl(type_url):
return message_class()
def Parse(text, message, ignore_unknown_fields=False):
def Parse(text, message, ignore_unknown_fields=False, descriptor_pool=None):
"""Parses a JSON representation of a protocol message into a message.
Args:
text: Message JSON representation.
message: A protocol buffer message to merge into.
ignore_unknown_fields: If True, do not raise errors for unknown fields.
descriptor_pool: A Descriptor Pool for resolving types. If None use the
default.
Returns:
The same message passed as argument.
@ -399,21 +416,26 @@ def Parse(text, message, ignore_unknown_fields=False):
js = json.loads(text, object_pairs_hook=_DuplicateChecker)
except ValueError as e:
raise ParseError('Failed to load JSON: {0}.'.format(str(e)))
return ParseDict(js, message, ignore_unknown_fields)
return ParseDict(js, message, ignore_unknown_fields, descriptor_pool)
def ParseDict(js_dict, message, ignore_unknown_fields=False):
def ParseDict(js_dict,
message,
ignore_unknown_fields=False,
descriptor_pool=None):
"""Parses a JSON dictionary representation into a message.
Args:
js_dict: Dict representation of a JSON message.
message: A protocol buffer message to merge into.
ignore_unknown_fields: If True, do not raise errors for unknown fields.
descriptor_pool: A Descriptor Pool for resolving types. If None use the
default.
Returns:
The same message passed as argument.
"""
parser = _Parser(ignore_unknown_fields)
parser = _Parser(ignore_unknown_fields, descriptor_pool)
parser.ConvertMessage(js_dict, message)
return message
@ -424,9 +446,9 @@ _INT_OR_FLOAT = six.integer_types + (float,)
class _Parser(object):
"""JSON format parser for protocol message."""
def __init__(self,
ignore_unknown_fields):
def __init__(self, ignore_unknown_fields, descriptor_pool):
self.ignore_unknown_fields = ignore_unknown_fields
self.descriptor_pool = descriptor_pool
def ConvertMessage(self, value, message):
"""Convert a JSON object into a message.
@ -562,7 +584,7 @@ class _Parser(object):
except KeyError:
raise ParseError('@type is missing when parsing any message.')
sub_message = _CreateMessageFromTypeUrl(type_url)
sub_message = _CreateMessageFromTypeUrl(type_url, self.descriptor_pool)
message_descriptor = sub_message.DESCRIPTOR
full_name = message_descriptor.full_name
if _IsWrapperMessage(message_descriptor):

View File

@ -346,11 +346,11 @@ PyObject* MapReflectionFriend::MergeFrom(PyObject* _self, PyObject* arg) {
const Message* other_message = other_map->message;
const Reflection* reflection = message->GetReflection();
const Reflection* other_reflection = other_message->GetReflection();
internal::MapFieldBase* field = reflection->MapData(
internal::MapFieldBase* field = reflection->MutableMapData(
message, self->parent_field_descriptor);
internal::MapFieldBase* other_field =
other_reflection->MapData(const_cast<Message*>(other_message),
self->parent_field_descriptor);
const internal::MapFieldBase* other_field =
other_reflection->GetMapData(*other_message,
self->parent_field_descriptor);
field->MergeFrom(*other_field);
self->version++;
Py_RETURN_NONE;

View File

@ -937,11 +937,9 @@ class _Parser(object):
else:
getattr(message, field.name).append(value)
else:
# Proto3 doesn't represent presence so we can't test if multiple scalars
# have occurred. We have to allow them.
can_check_presence = not self._IsProto3Syntax(message)
if field.is_extension:
if (not self._allow_multiple_scalars and can_check_presence and
if (not self._allow_multiple_scalars and
not self._IsProto3Syntax(message) and
message.HasExtension(field)):
raise tokenizer.ParseErrorPreviousToken(
'Message type "%s" should not have multiple "%s" extensions.' %
@ -949,8 +947,16 @@ class _Parser(object):
else:
message.Extensions[field] = value
else:
if (not self._allow_multiple_scalars and can_check_presence and
message.HasField(field.name)):
duplicate_error = False
if not self._allow_multiple_scalars:
if self._IsProto3Syntax(message):
# Proto3 doesn't represent presence so we try best effort to check
# multiple scalars by compare to default values.
duplicate_error = bool(getattr(message, field.name))
else:
duplicate_error = message.HasField(field.name)
if duplicate_error:
raise tokenizer.ParseErrorPreviousToken(
'Message type "%s" should not have multiple "%s" fields.' %
(message.DESCRIPTOR.full_name, field.name))

View File

@ -201,13 +201,13 @@ const char* Any::_InternalParse(const char* begin, const char* end, void* object
::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end;
auto ptr = begin;
while (ptr < end) {
ptr = Varint::Parse32Inline(ptr, &tag);
ptr = ::google::protobuf::io::Parse32(ptr, &tag);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
switch (tag >> 3) {
// string type_url = 1;
case 1: {
if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual;
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
ctx->extra_parse_data().SetFieldName("google.protobuf.Any.type_url");
auto str = msg->mutable_type_url();
@ -226,7 +226,7 @@ const char* Any::_InternalParse(const char* begin, const char* end, void* object
// bytes value = 2;
case 2: {
if (static_cast<::google::protobuf::uint8>(tag) != 18) goto handle_unusual;
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
auto str = msg->mutable_value();
if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) {
@ -242,7 +242,7 @@ const char* Any::_InternalParse(const char* begin, const char* end, void* object
break;
}
default: {
handle_unusual: (void)&&handle_unusual;
handle_unusual:
if ((tag & 7) == 4 || tag == 0) {
ctx->EndGroup(tag);
return ptr;
@ -256,13 +256,9 @@ const char* Any::_InternalParse(const char* begin, const char* end, void* object
} // switch
} // while
return ptr;
len_delim_till_end: (void)&&len_delim_till_end;
len_delim_till_end:
return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg},
{parser_till_end, object}, size);
group_continues: (void)&&group_continues;
GOOGLE_DCHECK(ptr >= end);
GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->StoreGroup({_InternalParse, msg}, {parser_till_end, object}, depth, tag));
return ptr;
{parser_till_end, object}, size);
}
#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
bool Any::MergePartialFromCodedStream(
@ -352,8 +348,7 @@ void Any::SerializeWithCachedSizes(
}
::google::protobuf::uint8* Any::InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const {
(void)deterministic; // Unused
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:google.protobuf.Any)
::google::protobuf::uint32 cached_has_bits = 0;
(void) cached_has_bits;

View File

@ -148,7 +148,7 @@ class PROTOBUF_EXPORT Any : public ::google::protobuf::Message /* @@protoc_inser
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:

View File

@ -307,13 +307,13 @@ const char* Api::_InternalParse(const char* begin, const char* end, void* object
::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end;
auto ptr = begin;
while (ptr < end) {
ptr = Varint::Parse32Inline(ptr, &tag);
ptr = ::google::protobuf::io::Parse32(ptr, &tag);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
switch (tag >> 3) {
// string name = 1;
case 1: {
if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual;
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
ctx->extra_parse_data().SetFieldName("google.protobuf.Api.name");
auto str = msg->mutable_name();
@ -333,7 +333,7 @@ const char* Api::_InternalParse(const char* begin, const char* end, void* object
case 2: {
if (static_cast<::google::protobuf::uint8>(tag) != 18) goto handle_unusual;
do {
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
parser_till_end = ::google::protobuf::Method::_InternalParse;
object = msg->add_methods();
@ -351,7 +351,7 @@ const char* Api::_InternalParse(const char* begin, const char* end, void* object
case 3: {
if (static_cast<::google::protobuf::uint8>(tag) != 26) goto handle_unusual;
do {
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
parser_till_end = ::google::protobuf::Option::_InternalParse;
object = msg->add_options();
@ -368,7 +368,7 @@ const char* Api::_InternalParse(const char* begin, const char* end, void* object
// string version = 4;
case 4: {
if (static_cast<::google::protobuf::uint8>(tag) != 34) goto handle_unusual;
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
ctx->extra_parse_data().SetFieldName("google.protobuf.Api.version");
auto str = msg->mutable_version();
@ -387,7 +387,7 @@ const char* Api::_InternalParse(const char* begin, const char* end, void* object
// .google.protobuf.SourceContext source_context = 5;
case 5: {
if (static_cast<::google::protobuf::uint8>(tag) != 42) goto handle_unusual;
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
parser_till_end = ::google::protobuf::SourceContext::_InternalParse;
object = msg->mutable_source_context();
@ -403,7 +403,7 @@ const char* Api::_InternalParse(const char* begin, const char* end, void* object
case 6: {
if (static_cast<::google::protobuf::uint8>(tag) != 50) goto handle_unusual;
do {
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
parser_till_end = ::google::protobuf::Mixin::_InternalParse;
object = msg->add_mixins();
@ -421,14 +421,14 @@ const char* Api::_InternalParse(const char* begin, const char* end, void* object
case 7: {
if (static_cast<::google::protobuf::uint8>(tag) != 56) goto handle_unusual;
::google::protobuf::uint64 val;
ptr = Varint::Parse64(ptr, &val);
ptr = ::google::protobuf::io::Parse64(ptr, &val);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
::google::protobuf::Syntax value = static_cast<::google::protobuf::Syntax>(val);
msg->set_syntax(value);
break;
}
default: {
handle_unusual: (void)&&handle_unusual;
handle_unusual:
if ((tag & 7) == 4 || tag == 0) {
ctx->EndGroup(tag);
return ptr;
@ -442,13 +442,9 @@ const char* Api::_InternalParse(const char* begin, const char* end, void* object
} // switch
} // while
return ptr;
len_delim_till_end: (void)&&len_delim_till_end;
len_delim_till_end:
return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg},
{parser_till_end, object}, size);
group_continues: (void)&&group_continues;
GOOGLE_DCHECK(ptr >= end);
GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->StoreGroup({_InternalParse, msg}, {parser_till_end, object}, depth, tag));
return ptr;
{parser_till_end, object}, size);
}
#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
bool Api::MergePartialFromCodedStream(
@ -643,8 +639,7 @@ void Api::SerializeWithCachedSizes(
}
::google::protobuf::uint8* Api::InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const {
(void)deterministic; // Unused
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:google.protobuf.Api)
::google::protobuf::uint32 cached_has_bits = 0;
(void) cached_has_bits;
@ -665,7 +660,7 @@ void Api::SerializeWithCachedSizes(
n = static_cast<unsigned int>(this->methods_size()); i < n; i++) {
target = ::google::protobuf::internal::WireFormatLite::
InternalWriteMessageToArray(
2, this->methods(static_cast<int>(i)), deterministic, target);
2, this->methods(static_cast<int>(i)), target);
}
// repeated .google.protobuf.Option options = 3;
@ -673,7 +668,7 @@ void Api::SerializeWithCachedSizes(
n = static_cast<unsigned int>(this->options_size()); i < n; i++) {
target = ::google::protobuf::internal::WireFormatLite::
InternalWriteMessageToArray(
3, this->options(static_cast<int>(i)), deterministic, target);
3, this->options(static_cast<int>(i)), target);
}
// string version = 4;
@ -691,7 +686,7 @@ void Api::SerializeWithCachedSizes(
if (this->has_source_context()) {
target = ::google::protobuf::internal::WireFormatLite::
InternalWriteMessageToArray(
5, HasBitSetters::source_context(this), deterministic, target);
5, HasBitSetters::source_context(this), target);
}
// repeated .google.protobuf.Mixin mixins = 6;
@ -699,7 +694,7 @@ void Api::SerializeWithCachedSizes(
n = static_cast<unsigned int>(this->mixins_size()); i < n; i++) {
target = ::google::protobuf::internal::WireFormatLite::
InternalWriteMessageToArray(
6, this->mixins(static_cast<int>(i)), deterministic, target);
6, this->mixins(static_cast<int>(i)), target);
}
// .google.protobuf.Syntax syntax = 7;
@ -983,13 +978,13 @@ const char* Method::_InternalParse(const char* begin, const char* end, void* obj
::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end;
auto ptr = begin;
while (ptr < end) {
ptr = Varint::Parse32Inline(ptr, &tag);
ptr = ::google::protobuf::io::Parse32(ptr, &tag);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
switch (tag >> 3) {
// string name = 1;
case 1: {
if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual;
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
ctx->extra_parse_data().SetFieldName("google.protobuf.Method.name");
auto str = msg->mutable_name();
@ -1008,7 +1003,7 @@ const char* Method::_InternalParse(const char* begin, const char* end, void* obj
// string request_type_url = 2;
case 2: {
if (static_cast<::google::protobuf::uint8>(tag) != 18) goto handle_unusual;
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
ctx->extra_parse_data().SetFieldName("google.protobuf.Method.request_type_url");
auto str = msg->mutable_request_type_url();
@ -1028,7 +1023,7 @@ const char* Method::_InternalParse(const char* begin, const char* end, void* obj
case 3: {
if (static_cast<::google::protobuf::uint8>(tag) != 24) goto handle_unusual;
::google::protobuf::uint64 val;
ptr = Varint::Parse64(ptr, &val);
ptr = ::google::protobuf::io::Parse64(ptr, &val);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
bool value = val;
msg->set_request_streaming(value);
@ -1037,7 +1032,7 @@ const char* Method::_InternalParse(const char* begin, const char* end, void* obj
// string response_type_url = 4;
case 4: {
if (static_cast<::google::protobuf::uint8>(tag) != 34) goto handle_unusual;
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
ctx->extra_parse_data().SetFieldName("google.protobuf.Method.response_type_url");
auto str = msg->mutable_response_type_url();
@ -1057,7 +1052,7 @@ const char* Method::_InternalParse(const char* begin, const char* end, void* obj
case 5: {
if (static_cast<::google::protobuf::uint8>(tag) != 40) goto handle_unusual;
::google::protobuf::uint64 val;
ptr = Varint::Parse64(ptr, &val);
ptr = ::google::protobuf::io::Parse64(ptr, &val);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
bool value = val;
msg->set_response_streaming(value);
@ -1067,7 +1062,7 @@ const char* Method::_InternalParse(const char* begin, const char* end, void* obj
case 6: {
if (static_cast<::google::protobuf::uint8>(tag) != 50) goto handle_unusual;
do {
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
parser_till_end = ::google::protobuf::Option::_InternalParse;
object = msg->add_options();
@ -1085,14 +1080,14 @@ const char* Method::_InternalParse(const char* begin, const char* end, void* obj
case 7: {
if (static_cast<::google::protobuf::uint8>(tag) != 56) goto handle_unusual;
::google::protobuf::uint64 val;
ptr = Varint::Parse64(ptr, &val);
ptr = ::google::protobuf::io::Parse64(ptr, &val);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
::google::protobuf::Syntax value = static_cast<::google::protobuf::Syntax>(val);
msg->set_syntax(value);
break;
}
default: {
handle_unusual: (void)&&handle_unusual;
handle_unusual:
if ((tag & 7) == 4 || tag == 0) {
ctx->EndGroup(tag);
return ptr;
@ -1106,13 +1101,9 @@ const char* Method::_InternalParse(const char* begin, const char* end, void* obj
} // switch
} // while
return ptr;
len_delim_till_end: (void)&&len_delim_till_end;
len_delim_till_end:
return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg},
{parser_till_end, object}, size);
group_continues: (void)&&group_continues;
GOOGLE_DCHECK(ptr >= end);
GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->StoreGroup({_InternalParse, msg}, {parser_till_end, object}, depth, tag));
return ptr;
{parser_till_end, object}, size);
}
#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
bool Method::MergePartialFromCodedStream(
@ -1311,8 +1302,7 @@ void Method::SerializeWithCachedSizes(
}
::google::protobuf::uint8* Method::InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const {
(void)deterministic; // Unused
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:google.protobuf.Method)
::google::protobuf::uint32 cached_has_bits = 0;
(void) cached_has_bits;
@ -1365,7 +1355,7 @@ void Method::SerializeWithCachedSizes(
n = static_cast<unsigned int>(this->options_size()); i < n; i++) {
target = ::google::protobuf::internal::WireFormatLite::
InternalWriteMessageToArray(
6, this->options(static_cast<int>(i)), deterministic, target);
6, this->options(static_cast<int>(i)), target);
}
// .google.protobuf.Syntax syntax = 7;
@ -1617,13 +1607,13 @@ const char* Mixin::_InternalParse(const char* begin, const char* end, void* obje
::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end;
auto ptr = begin;
while (ptr < end) {
ptr = Varint::Parse32Inline(ptr, &tag);
ptr = ::google::protobuf::io::Parse32(ptr, &tag);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
switch (tag >> 3) {
// string name = 1;
case 1: {
if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual;
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
ctx->extra_parse_data().SetFieldName("google.protobuf.Mixin.name");
auto str = msg->mutable_name();
@ -1642,7 +1632,7 @@ const char* Mixin::_InternalParse(const char* begin, const char* end, void* obje
// string root = 2;
case 2: {
if (static_cast<::google::protobuf::uint8>(tag) != 18) goto handle_unusual;
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
ctx->extra_parse_data().SetFieldName("google.protobuf.Mixin.root");
auto str = msg->mutable_root();
@ -1659,7 +1649,7 @@ const char* Mixin::_InternalParse(const char* begin, const char* end, void* obje
break;
}
default: {
handle_unusual: (void)&&handle_unusual;
handle_unusual:
if ((tag & 7) == 4 || tag == 0) {
ctx->EndGroup(tag);
return ptr;
@ -1673,13 +1663,9 @@ const char* Mixin::_InternalParse(const char* begin, const char* end, void* obje
} // switch
} // while
return ptr;
len_delim_till_end: (void)&&len_delim_till_end;
len_delim_till_end:
return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg},
{parser_till_end, object}, size);
group_continues: (void)&&group_continues;
GOOGLE_DCHECK(ptr >= end);
GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->StoreGroup({_InternalParse, msg}, {parser_till_end, object}, depth, tag));
return ptr;
{parser_till_end, object}, size);
}
#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
bool Mixin::MergePartialFromCodedStream(
@ -1777,8 +1763,7 @@ void Mixin::SerializeWithCachedSizes(
}
::google::protobuf::uint8* Mixin::InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const {
(void)deterministic; // Unused
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:google.protobuf.Mixin)
::google::protobuf::uint32 cached_has_bits = 0;
(void) cached_has_bits;

View File

@ -142,7 +142,7 @@ class PROTOBUF_EXPORT Api : public ::google::protobuf::Message /* @@protoc_inser
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
@ -332,7 +332,7 @@ class PROTOBUF_EXPORT Method : public ::google::protobuf::Message /* @@protoc_in
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
@ -515,7 +515,7 @@ class PROTOBUF_EXPORT Mixin : public ::google::protobuf::Message /* @@protoc_ins
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:

File diff suppressed because it is too large Load Diff

View File

@ -271,6 +271,8 @@ inline bool IsWeak(const FieldDescriptor* field, const Options& options) {
return false;
}
bool IsStringInlined(const FieldDescriptor* descriptor, const Options& options);
// For a string field, returns the effective ctype. If the actual ctype is
// not supported, returns the default of STRING.
FieldOptions::CType EffectiveStringCType(const FieldDescriptor* field,

View File

@ -262,8 +262,7 @@ static void GenerateSerializationLoop(const Formatter& format,
if (to_array) {
format(
"target = ::$proto_ns$::internal::WireFormatLite::InternalWrite"
"$declared_type$NoVirtualToArray($number$, *entry, deterministic, "
"target);\n");
"$declared_type$NoVirtualToArray($number$, *entry, target);\n");
} else {
format(
"::$proto_ns$::internal::WireFormatLite::Write$stream_writer$($number$,"
@ -365,7 +364,7 @@ void MapFieldGenerator::GenerateSerializeWithCachedSizes(io::Printer* printer,
" items[static_cast<ptrdiff_t>(n)] = SortItem(&*it);\n"
" }\n"
" ::std::sort(&items[0], &items[static_cast<ptrdiff_t>(n)], Less());\n",
to_array ? "deterministic" : "output->IsSerializationDeterministic()");
to_array ? "false" : "output->IsSerializationDeterministic()");
format.Indent();
GenerateSerializationLoop(format, SupportsArenas(descriptor_), string_key,
string_value, to_array, true);

View File

@ -800,7 +800,8 @@ void MessageGenerator::GenerateSingularFieldHasBits(
} else {
format(
"inline bool $classname$::has_$name$() const {\n"
" return this != internal_default_instance() && $name$_ != nullptr;\n"
" return this != internal_default_instance() "
"&& $name$_ != nullptr;\n"
"}\n");
}
}
@ -941,7 +942,7 @@ void MessageGenerator::GenerateClassDefinition(io::Printer* printer) {
"public:\n"
"#if $GOOGLE_PROTOBUF$_ENABLE_EXPERIMENTAL_PARSER\n"
"static bool _ParseMap(const char* begin, const "
"char* end, void* object, ::google::protobuf::internal::ParseContext* ctx);\n"
"char* end, void* object, ::$proto_ns$::internal::ParseContext* ctx);\n"
"#endif // $GOOGLE_PROTOBUF$_ENABLE_EXPERIMENTAL_PARSER\n"
" typedef ::$proto_ns$::internal::MapEntry$lite$<$classname$, \n"
" $key_cpp$, $val_cpp$,\n"
@ -1190,7 +1191,7 @@ void MessageGenerator::GenerateClassDefinition(io::Printer* printer) {
if (HasFastArraySerialization(descriptor_->file(), options_)) {
format(
"$uint8$* InternalSerializeWithCachedSizesToArray(\n"
" bool deterministic, $uint8$* target) const final;\n");
" $uint8$* target) const final;\n");
}
}
@ -1654,8 +1655,10 @@ int MessageGenerator::GenerateFieldMetadata(io::Printer* printer) {
return 2;
}
format(
"{PROTOBUF_FIELD_OFFSET($classtype$, _cached_size_), 0, 0, 0, nullptr},\n");
"{PROTOBUF_FIELD_OFFSET($classtype$, _cached_size_),"
" 0, 0, 0, nullptr},\n");
std::vector<const Descriptor::ExtensionRange*> sorted_extensions;
sorted_extensions.reserve(descriptor_->extension_range_count());
for (int i = 0; i < descriptor_->extension_range_count(); ++i) {
sorted_extensions.push_back(descriptor_->extension_range(i));
}
@ -1864,8 +1867,8 @@ void MessageGenerator::GenerateClassMethods(io::Printer* printer) {
}
format(
"#if $GOOGLE_PROTOBUF$_ENABLE_EXPERIMENTAL_PARSER\n"
"bool $classname$::_ParseMap(const char* begin, const "
"char* end, void* object, ::google::protobuf::internal::ParseContext* ctx) {\n"
"bool $classname$::_ParseMap(const char* begin, const char* end, "
"void* object, ::$proto_ns$::internal::ParseContext* ctx) {\n"
" using MF = ::$proto_ns$::internal::MapField$1$<\n"
" $classname$, EntryKeyType, EntryValueType,\n"
" kEntryKeyFieldType, kEntryValueFieldType,\n"
@ -1885,7 +1888,8 @@ void MessageGenerator::GenerateClassMethods(io::Printer* printer) {
format(
" DO_(parser.ParseMapEnumValidation(\n"
" begin, end, ctx->extra_parse_data().field_number,\n"
" static_cast<::google::protobuf::internal::InternalMetadataWithArena$1$*>("
" static_cast<::$proto_ns$::internal::"
"InternalMetadataWithArena$1$*>("
"ctx->extra_parse_data().unknown_fields), $2$_IsValid));\n",
HasDescriptorMethods(descriptor_->file(), options_) ? "" : "Lite",
QualifiedClassName(val->enum_type()));
@ -3769,7 +3773,7 @@ void MessageGenerator::GenerateSerializeOneExtensionRange(
if (to_array) {
format(
"target = _extensions_.InternalSerializeWithCachedSizesToArray(\n"
" $start$, $end$, deterministic, target);\n\n");
" $start$, $end$, target);\n\n");
} else {
format(
"_extensions_.SerializeWithCachedSizes($start$, $end$, output);\n"
@ -3819,10 +3823,9 @@ void MessageGenerator::GenerateSerializeWithCachedSizesToArray(
// Special-case MessageSet.
format(
"$uint8$* $classname$::InternalSerializeWithCachedSizesToArray(\n"
" bool deterministic, $uint8$* target) const {\n"
" $uint8$* target) const {\n"
" target = _extensions_."
"InternalSerializeMessageSetWithCachedSizesToArray(\n"
" deterministic, target);\n");
"InternalSerializeMessageSetWithCachedSizesToArray(target);\n");
GOOGLE_CHECK(UseUnknownFieldSet(descriptor_->file(), options_));
std::map<string, string> vars;
SetUnknkownFieldsVariable(descriptor_, options_, &vars);
@ -3839,10 +3842,9 @@ void MessageGenerator::GenerateSerializeWithCachedSizesToArray(
format(
"$uint8$* $classname$::InternalSerializeWithCachedSizesToArray(\n"
" bool deterministic, $uint8$* target) const {\n");
" $uint8$* target) const {\n");
format.Indent();
format("(void)deterministic; // Unused\n");
format("// @@protoc_insertion_point(serialize_to_array_start:$full_name$)\n");
GenerateSerializeWithCachedSizesBody(printer, true);
@ -3937,6 +3939,7 @@ void MessageGenerator::GenerateSerializeWithCachedSizesBody(
SortFieldsByNumber(descriptor_);
std::vector<const Descriptor::ExtensionRange*> sorted_extensions;
sorted_extensions.reserve(descriptor_->extension_range_count());
for (int i = 0; i < descriptor_->extension_range_count(); ++i) {
sorted_extensions.push_back(descriptor_->extension_range(i));
}

View File

@ -477,7 +477,7 @@ GenerateSerializeWithCachedSizesToArray(io::Printer* printer) const {
format(
"target = ::$proto_ns$::internal::WireFormatLite::\n"
" InternalWrite$declared_type$ToArray(\n"
" $number$, HasBitSetters::$name$(this), deterministic, target);\n");
" $number$, HasBitSetters::$name$(this), target);\n");
}
void MessageFieldGenerator::
@ -812,8 +812,7 @@ GenerateSerializeWithCachedSizesToArray(io::Printer* printer) const {
" n = static_cast<unsigned int>(this->$name$_size()); i < n; i++) {\n"
" target = ::$proto_ns$::internal::WireFormatLite::\n"
" InternalWrite$declared_type$ToArray(\n"
" $number$, this->$name$(static_cast<int>(i)), deterministic, "
"target);\n"
" $number$, this->$name$(static_cast<int>(i)), target);\n"
"}\n");
}

View File

@ -43,8 +43,8 @@ class AccessInfoMap;
namespace cpp {
enum class EnforceOptimizeMode {
kNoEnforcement, // Use the runtime specified by the file specific options.
kSpeed, // This is the full runtime.
kNoEnforcement, // Use the runtime specified by the file specific options.
kSpeed, // This is the full runtime.
kLiteRuntime,
};

View File

@ -89,21 +89,7 @@ StringFieldGenerator::StringFieldGenerator(const FieldDescriptor* descriptor,
const Options& options)
: FieldGenerator(descriptor, options),
lite_(!HasDescriptorMethods(descriptor->file(), options)),
inlined_(false) {
// TODO(ckennelly): Handle inlining for any.proto.
if (IsAnyMessage(descriptor_->containing_type(), options_)) {
inlined_ = false;
}
if (descriptor_->containing_type()->options().map_entry()) {
inlined_ = false;
}
// Limit to proto2, as we rely on has bits to distinguish field presence for
// release_$name$. On proto3, we cannot use the address of the string
// instance when the field has been inlined.
inlined_ = inlined_ && HasFieldPresence(descriptor_->file());
inlined_(IsStringInlined(descriptor, options)) {
SetStringVariables(descriptor, &variables_, options);
}

View File

@ -147,6 +147,7 @@ enum ConflictingEnum { // NO_PROTO3
NOT_EQ = 1; // NO_PROTO3
volatile = 2; // NO_PROTO3
return = 3; // NO_PROTO3
NULL = 4; // NO_PROTO3
} // NO_PROTO3
message DummyMessage {}

View File

@ -107,12 +107,13 @@ TEST(GENERATED_MESSAGE_TEST_NAME, TestConflictingEnumNames) {
message.set_conflicting_enum(protobuf_unittest::TestConflictingEnumNames_NestedConflictingEnum_XOR);
EXPECT_EQ(5, message.conflicting_enum());
protobuf_unittest::ConflictingEnum conflicting_enum;
conflicting_enum = protobuf_unittest::NOT_EQ;
EXPECT_EQ(1, conflicting_enum);
conflicting_enum = protobuf_unittest::return_;
EXPECT_EQ(3, conflicting_enum);
conflicting_enum = protobuf_unittest::NULL_;
EXPECT_EQ(4, conflicting_enum);
}
} // namespace cpp_unittest

View File

@ -2154,6 +2154,7 @@ TEST(HELPERS_TEST_NAME, TestSCC) {
MessageSCCAnalyzer scc_analyzer((Options()));
const SCC* scc = scc_analyzer.GetSCC(a.GetDescriptor());
std::vector<string> names;
names.reserve(scc->descriptors.size());
for (int i = 0; i < scc->descriptors.size(); i++) {
names.push_back(scc->descriptors[i]->full_name());
}

View File

@ -2430,6 +2430,9 @@ void Generator::GenerateClassFromObject(const GeneratorOptions& options,
" * @param {!Object} obj The object representation of this proto to\n"
" * load the data from.\n"
" * @return {!$classname$}\n"
" * @suppress {missingProperties} To prevent JSCompiler errors at "
"the\n"
" * `goog.isDef(obj.<fieldName>)` lookups.\n"
" */\n"
"$classname$.fromObject = function(obj) {\n"
" var msg = new $classname$();\n",
@ -2437,7 +2440,9 @@ void Generator::GenerateClassFromObject(const GeneratorOptions& options,
for (int i = 0; i < desc->field_count(); i++) {
const FieldDescriptor* field = desc->field(i);
GenerateClassFieldFromObject(options, printer, field);
if (!IgnoreField(field)) {
GenerateClassFieldFromObject(options, printer, field);
}
}
printer->Print(
@ -2479,9 +2484,8 @@ void Generator::GenerateClassFieldFromObject(
printer->Print(
" goog.isDef(obj.$name$) && "
"jspb.Message.setRepeatedWrapperField(\n"
" msg, $index$, goog.array.map(obj.$name$, function(i) {\n"
" return $fieldclass$.fromObject(i);\n"
" }));\n",
" msg, $index$, obj.$name$.map(\n"
" $fieldclass$.fromObject));\n",
"name", JSObjectFieldName(options, field),
"index", JSFieldIndex(field),
"fieldclass", SubmessageTypeRef(options, field));

View File

@ -1564,15 +1564,18 @@ bool Parser::ParseExtensions(DescriptorProto* message,
// name literals.
bool Parser::ParseReserved(DescriptorProto* message,
const LocationRecorder& message_location) {
io::Tokenizer::Token start_token = input_->current();
// Parse the declaration.
DO(Consume("reserved"));
if (LookingAtType(io::Tokenizer::TYPE_STRING)) {
LocationRecorder location(message_location,
DescriptorProto::kReservedNameFieldNumber);
location.StartAt(start_token);
return ParseReservedNames(message, location);
} else {
LocationRecorder location(message_location,
DescriptorProto::kReservedRangeFieldNumber);
location.StartAt(start_token);
return ParseReservedNumbers(message, location);
}
}
@ -1638,16 +1641,19 @@ bool Parser::ParseReservedNumbers(DescriptorProto* message,
}
bool Parser::ParseReserved(EnumDescriptorProto* message,
const LocationRecorder& message_location) {
const LocationRecorder& message_location) {
io::Tokenizer::Token start_token = input_->current();
// Parse the declaration.
DO(Consume("reserved"));
if (LookingAtType(io::Tokenizer::TYPE_STRING)) {
LocationRecorder location(message_location,
DescriptorProto::kReservedNameFieldNumber);
location.StartAt(start_token);
return ParseReservedNames(message, location);
} else {
LocationRecorder location(message_location,
DescriptorProto::kReservedRangeFieldNumber);
location.StartAt(start_token);
return ParseReservedNumbers(message, location);
}
}

View File

@ -2739,6 +2739,33 @@ TEST_F(SourceInfoTest, ExtensionRanges) {
EXPECT_TRUE(HasSpan(file_.message_type(0), "name"));
}
TEST_F(SourceInfoTest, ReservedRanges) {
EXPECT_TRUE(
Parse("message Message {\n"
" $a$reserved $b$1$c$ to $d$4$e$, $f$6$g$;$h$\n"
"}\n"));
const DescriptorProto::ReservedRange& range1 =
file_.message_type(0).reserved_range(0);
const DescriptorProto::ReservedRange& range2 =
file_.message_type(0).reserved_range(1);
EXPECT_TRUE(HasSpan('a', 'h', file_.message_type(0), "reserved_range"));
EXPECT_TRUE(HasSpan('b', 'e', range1));
EXPECT_TRUE(HasSpan('b', 'c', range1, "start"));
EXPECT_TRUE(HasSpan('d', 'e', range1, "end"));
EXPECT_TRUE(HasSpan('f', 'g', range2));
EXPECT_TRUE(HasSpan('f', 'g', range2, "start"));
EXPECT_TRUE(HasSpan('f', 'g', range2, "end"));
// Ignore these.
EXPECT_TRUE(HasSpan(file_));
EXPECT_TRUE(HasSpan(file_.message_type(0)));
EXPECT_TRUE(HasSpan(file_.message_type(0), "name"));
}
TEST_F(SourceInfoTest, Oneofs) {
EXPECT_TRUE(Parse(
"message Foo {\n"

View File

@ -320,14 +320,14 @@ const char* Version::_InternalParse(const char* begin, const char* end, void* ob
::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end;
auto ptr = begin;
while (ptr < end) {
ptr = Varint::Parse32Inline(ptr, &tag);
ptr = ::google::protobuf::io::Parse32(ptr, &tag);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
switch (tag >> 3) {
// optional int32 major = 1;
case 1: {
if (static_cast<::google::protobuf::uint8>(tag) != 8) goto handle_unusual;
::google::protobuf::uint64 val;
ptr = Varint::Parse64(ptr, &val);
ptr = ::google::protobuf::io::Parse64(ptr, &val);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
::google::protobuf::int32 value = val;
msg->set_major(value);
@ -337,7 +337,7 @@ const char* Version::_InternalParse(const char* begin, const char* end, void* ob
case 2: {
if (static_cast<::google::protobuf::uint8>(tag) != 16) goto handle_unusual;
::google::protobuf::uint64 val;
ptr = Varint::Parse64(ptr, &val);
ptr = ::google::protobuf::io::Parse64(ptr, &val);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
::google::protobuf::int32 value = val;
msg->set_minor(value);
@ -347,7 +347,7 @@ const char* Version::_InternalParse(const char* begin, const char* end, void* ob
case 3: {
if (static_cast<::google::protobuf::uint8>(tag) != 24) goto handle_unusual;
::google::protobuf::uint64 val;
ptr = Varint::Parse64(ptr, &val);
ptr = ::google::protobuf::io::Parse64(ptr, &val);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
::google::protobuf::int32 value = val;
msg->set_patch(value);
@ -356,7 +356,7 @@ const char* Version::_InternalParse(const char* begin, const char* end, void* ob
// optional string suffix = 4;
case 4: {
if (static_cast<::google::protobuf::uint8>(tag) != 34) goto handle_unusual;
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
ctx->extra_parse_data().SetFieldName("google.protobuf.compiler.Version.suffix");
auto str = msg->mutable_suffix();
@ -373,7 +373,7 @@ const char* Version::_InternalParse(const char* begin, const char* end, void* ob
break;
}
default: {
handle_unusual: (void)&&handle_unusual;
handle_unusual:
if ((tag & 7) == 4 || tag == 0) {
ctx->EndGroup(tag);
return ptr;
@ -387,13 +387,9 @@ const char* Version::_InternalParse(const char* begin, const char* end, void* ob
} // switch
} // while
return ptr;
len_delim_till_end: (void)&&len_delim_till_end;
len_delim_till_end:
return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg},
{parser_till_end, object}, size);
group_continues: (void)&&group_continues;
GOOGLE_DCHECK(ptr >= end);
GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->StoreGroup({_InternalParse, msg}, {parser_till_end, object}, depth, tag));
return ptr;
{parser_till_end, object}, size);
}
#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
bool Version::MergePartialFromCodedStream(
@ -521,8 +517,7 @@ void Version::SerializeWithCachedSizes(
}
::google::protobuf::uint8* Version::InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const {
(void)deterministic; // Unused
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:google.protobuf.compiler.Version)
::google::protobuf::uint32 cached_has_bits = 0;
(void) cached_has_bits;
@ -804,14 +799,14 @@ const char* CodeGeneratorRequest::_InternalParse(const char* begin, const char*
::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end;
auto ptr = begin;
while (ptr < end) {
ptr = Varint::Parse32Inline(ptr, &tag);
ptr = ::google::protobuf::io::Parse32(ptr, &tag);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
switch (tag >> 3) {
// repeated string file_to_generate = 1;
case 1: {
if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual;
do {
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
ctx->extra_parse_data().SetFieldName("google.protobuf.compiler.CodeGeneratorRequest.file_to_generate");
auto str = msg->add_file_to_generate();
@ -832,7 +827,7 @@ const char* CodeGeneratorRequest::_InternalParse(const char* begin, const char*
// optional string parameter = 2;
case 2: {
if (static_cast<::google::protobuf::uint8>(tag) != 18) goto handle_unusual;
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
ctx->extra_parse_data().SetFieldName("google.protobuf.compiler.CodeGeneratorRequest.parameter");
auto str = msg->mutable_parameter();
@ -851,7 +846,7 @@ const char* CodeGeneratorRequest::_InternalParse(const char* begin, const char*
// optional .google.protobuf.compiler.Version compiler_version = 3;
case 3: {
if (static_cast<::google::protobuf::uint8>(tag) != 26) goto handle_unusual;
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
parser_till_end = ::google::protobuf::compiler::Version::_InternalParse;
object = msg->mutable_compiler_version();
@ -867,7 +862,7 @@ const char* CodeGeneratorRequest::_InternalParse(const char* begin, const char*
case 15: {
if (static_cast<::google::protobuf::uint8>(tag) != 122) goto handle_unusual;
do {
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
parser_till_end = ::google::protobuf::FileDescriptorProto::_InternalParse;
object = msg->add_proto_file();
@ -882,7 +877,7 @@ const char* CodeGeneratorRequest::_InternalParse(const char* begin, const char*
break;
}
default: {
handle_unusual: (void)&&handle_unusual;
handle_unusual:
if ((tag & 7) == 4 || tag == 0) {
ctx->EndGroup(tag);
return ptr;
@ -896,13 +891,9 @@ const char* CodeGeneratorRequest::_InternalParse(const char* begin, const char*
} // switch
} // while
return ptr;
len_delim_till_end: (void)&&len_delim_till_end;
len_delim_till_end:
return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg},
{parser_till_end, object}, size);
group_continues: (void)&&group_continues;
GOOGLE_DCHECK(ptr >= end);
GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->StoreGroup({_InternalParse, msg}, {parser_till_end, object}, depth, tag));
return ptr;
{parser_till_end, object}, size);
}
#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
bool CodeGeneratorRequest::MergePartialFromCodedStream(
@ -1039,8 +1030,7 @@ void CodeGeneratorRequest::SerializeWithCachedSizes(
}
::google::protobuf::uint8* CodeGeneratorRequest::InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const {
(void)deterministic; // Unused
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:google.protobuf.compiler.CodeGeneratorRequest)
::google::protobuf::uint32 cached_has_bits = 0;
(void) cached_has_bits;
@ -1071,7 +1061,7 @@ void CodeGeneratorRequest::SerializeWithCachedSizes(
if (cached_has_bits & 0x00000002u) {
target = ::google::protobuf::internal::WireFormatLite::
InternalWriteMessageToArray(
3, HasBitSetters::compiler_version(this), deterministic, target);
3, HasBitSetters::compiler_version(this), target);
}
// repeated .google.protobuf.FileDescriptorProto proto_file = 15;
@ -1079,7 +1069,7 @@ void CodeGeneratorRequest::SerializeWithCachedSizes(
n = static_cast<unsigned int>(this->proto_file_size()); i < n; i++) {
target = ::google::protobuf::internal::WireFormatLite::
InternalWriteMessageToArray(
15, this->proto_file(static_cast<int>(i)), deterministic, target);
15, this->proto_file(static_cast<int>(i)), target);
}
if (_internal_metadata_.have_unknown_fields()) {
@ -1328,13 +1318,13 @@ const char* CodeGeneratorResponse_File::_InternalParse(const char* begin, const
::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end;
auto ptr = begin;
while (ptr < end) {
ptr = Varint::Parse32Inline(ptr, &tag);
ptr = ::google::protobuf::io::Parse32(ptr, &tag);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
switch (tag >> 3) {
// optional string name = 1;
case 1: {
if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual;
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
ctx->extra_parse_data().SetFieldName("google.protobuf.compiler.CodeGeneratorResponse.File.name");
auto str = msg->mutable_name();
@ -1353,7 +1343,7 @@ const char* CodeGeneratorResponse_File::_InternalParse(const char* begin, const
// optional string insertion_point = 2;
case 2: {
if (static_cast<::google::protobuf::uint8>(tag) != 18) goto handle_unusual;
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
ctx->extra_parse_data().SetFieldName("google.protobuf.compiler.CodeGeneratorResponse.File.insertion_point");
auto str = msg->mutable_insertion_point();
@ -1372,7 +1362,7 @@ const char* CodeGeneratorResponse_File::_InternalParse(const char* begin, const
// optional string content = 15;
case 15: {
if (static_cast<::google::protobuf::uint8>(tag) != 122) goto handle_unusual;
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
ctx->extra_parse_data().SetFieldName("google.protobuf.compiler.CodeGeneratorResponse.File.content");
auto str = msg->mutable_content();
@ -1389,7 +1379,7 @@ const char* CodeGeneratorResponse_File::_InternalParse(const char* begin, const
break;
}
default: {
handle_unusual: (void)&&handle_unusual;
handle_unusual:
if ((tag & 7) == 4 || tag == 0) {
ctx->EndGroup(tag);
return ptr;
@ -1403,13 +1393,9 @@ const char* CodeGeneratorResponse_File::_InternalParse(const char* begin, const
} // switch
} // while
return ptr;
len_delim_till_end: (void)&&len_delim_till_end;
len_delim_till_end:
return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg},
{parser_till_end, object}, size);
group_continues: (void)&&group_continues;
GOOGLE_DCHECK(ptr >= end);
GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->StoreGroup({_InternalParse, msg}, {parser_till_end, object}, depth, tag));
return ptr;
{parser_till_end, object}, size);
}
#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
bool CodeGeneratorResponse_File::MergePartialFromCodedStream(
@ -1533,8 +1519,7 @@ void CodeGeneratorResponse_File::SerializeWithCachedSizes(
}
::google::protobuf::uint8* CodeGeneratorResponse_File::InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const {
(void)deterministic; // Unused
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:google.protobuf.compiler.CodeGeneratorResponse.File)
::google::protobuf::uint32 cached_has_bits = 0;
(void) cached_has_bits;
@ -1785,13 +1770,13 @@ const char* CodeGeneratorResponse::_InternalParse(const char* begin, const char*
::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end;
auto ptr = begin;
while (ptr < end) {
ptr = Varint::Parse32Inline(ptr, &tag);
ptr = ::google::protobuf::io::Parse32(ptr, &tag);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
switch (tag >> 3) {
// optional string error = 1;
case 1: {
if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual;
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
ctx->extra_parse_data().SetFieldName("google.protobuf.compiler.CodeGeneratorResponse.error");
auto str = msg->mutable_error();
@ -1811,7 +1796,7 @@ const char* CodeGeneratorResponse::_InternalParse(const char* begin, const char*
case 15: {
if (static_cast<::google::protobuf::uint8>(tag) != 122) goto handle_unusual;
do {
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
parser_till_end = ::google::protobuf::compiler::CodeGeneratorResponse_File::_InternalParse;
object = msg->add_file();
@ -1826,7 +1811,7 @@ const char* CodeGeneratorResponse::_InternalParse(const char* begin, const char*
break;
}
default: {
handle_unusual: (void)&&handle_unusual;
handle_unusual:
if ((tag & 7) == 4 || tag == 0) {
ctx->EndGroup(tag);
return ptr;
@ -1840,13 +1825,9 @@ const char* CodeGeneratorResponse::_InternalParse(const char* begin, const char*
} // switch
} // while
return ptr;
len_delim_till_end: (void)&&len_delim_till_end;
len_delim_till_end:
return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg},
{parser_till_end, object}, size);
group_continues: (void)&&group_continues;
GOOGLE_DCHECK(ptr >= end);
GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->StoreGroup({_InternalParse, msg}, {parser_till_end, object}, depth, tag));
return ptr;
{parser_till_end, object}, size);
}
#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
bool CodeGeneratorResponse::MergePartialFromCodedStream(
@ -1940,8 +1921,7 @@ void CodeGeneratorResponse::SerializeWithCachedSizes(
}
::google::protobuf::uint8* CodeGeneratorResponse::InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const {
(void)deterministic; // Unused
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:google.protobuf.compiler.CodeGeneratorResponse)
::google::protobuf::uint32 cached_has_bits = 0;
(void) cached_has_bits;
@ -1963,7 +1943,7 @@ void CodeGeneratorResponse::SerializeWithCachedSizes(
n = static_cast<unsigned int>(this->file_size()); i < n; i++) {
target = ::google::protobuf::internal::WireFormatLite::
InternalWriteMessageToArray(
15, this->file(static_cast<int>(i)), deterministic, target);
15, this->file(static_cast<int>(i)), target);
}
if (_internal_metadata_.have_unknown_fields()) {

View File

@ -161,7 +161,7 @@ class PROTOC_EXPORT Version : public ::google::protobuf::Message /* @@protoc_ins
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
@ -313,7 +313,7 @@ class PROTOC_EXPORT CodeGeneratorRequest : public ::google::protobuf::Message /*
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
@ -487,7 +487,7 @@ class PROTOC_EXPORT CodeGeneratorResponse_File : public ::google::protobuf::Mess
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
@ -647,7 +647,7 @@ class PROTOC_EXPORT CodeGeneratorResponse : public ::google::protobuf::Message /
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:

View File

@ -359,6 +359,22 @@ bool Generator::Generate(const FileDescriptor* file,
return !printer.failed();
}
// BEGIN GOOGLE-INTERNAL
// Strip the google3.third_party.py. prefix off of a module name as we
// NEVER want that invalid module import path to be generated in google3.
// Our sys.path has google3/third_party/py/ in it. All modules from
// that tree need to be imported using just their own name.
// See http://go/ThirdPartyPython
void StripThirdPartyPy(string* module_name) {
const string third_party_py_prefix = "google3.third_party.py.";
int len = third_party_py_prefix.length();
if (module_name->compare(0, len,
third_party_py_prefix, 0,
len) == 0) {
*module_name = module_name->erase(0, len);
}
}
// END GOOGLE-INTERNAL
// Prints Python imports for all modules imported by |file|.
void Generator::PrintImports() const {
@ -367,6 +383,9 @@ void Generator::PrintImports() const {
string module_name = ModuleName(filename);
string module_alias = ModuleAlias(filename);
// BEGIN GOOGLE-INTERNAL
StripThirdPartyPy(&module_name);
// END GOOGLE-INTERNAL
if (ContainsPythonKeyword(module_name)) {
// If the module path contains a Python keyword, we have to quote the
// module name and import it using importlib. Otherwise the usual kind of
@ -397,6 +416,9 @@ void Generator::PrintImports() const {
// Print public imports.
for (int i = 0; i < file_->public_dependency_count(); ++i) {
string module_name = ModuleName(file_->public_dependency(i)->name());
// BEGIN GOOGLE-INTERNAL
StripThirdPartyPy(&module_name);
// END GOOGLE-INTERNAL
printer_->Print("from $module$ import *\n", "module", module_name);
}
printer_->Print("\n");

View File

@ -60,9 +60,6 @@
// Author: kenton@google.com (Kenton Varda)
#ifndef GOOGLE_PROTOBUF_COMPILER_ZIP_WRITER_H__
#define GOOGLE_PROTOBUF_COMPILER_ZIP_WRITER_H__
#include <vector>
#include <google/protobuf/stubs/common.h>
#include <google/protobuf/io/zero_copy_stream.h>
@ -94,5 +91,3 @@ class ZipWriter {
} // namespace compiler
} // namespace protobuf
} // namespace google
#endif // GOOGLE_PROTOBUF_COMPILER_ZIP_WRITER_H__

View File

@ -5073,10 +5073,12 @@ void DescriptorBuilder::CheckEnumValueUniqueness(
if (!inserted && insert_result.first->second->name() != value->name() &&
insert_result.first->second->number() != value->number()) {
string error_message =
"When enum name is stripped and label is PascalCased (" + stripped +
"), this value label conflicts with " + values[stripped]->name() +
". This will make the proto fail to compile for some languages, such "
"as C#.";
"Enum name " + value->name() + " has the same name as " +
values[stripped]->name() +
" if you ignore case and strip out the enum name prefix (if any). "
"This is error-prone and can lead to undefined behavior. "
"Please avoid doing this. If you are using allow_alias, please "
"assign the same numeric value to both enums.";
// There are proto2 enums out there with conflicting names, so to preserve
// compatibility we issue only a warning for proto2.
if (result->file()->syntax() == FileDescriptor::SYNTAX_PROTO2) {

View File

@ -232,6 +232,8 @@ class PROTOBUF_EXPORT LazyDescriptor {
// Use DescriptorPool to construct your own descriptors.
class PROTOBUF_EXPORT Descriptor {
public:
typedef DescriptorProto Proto;
// The name of the message type, not including its scope.
const std::string& name() const;
@ -345,6 +347,8 @@ class PROTOBUF_EXPORT Descriptor {
// A range of field numbers which are designated for third-party
// extensions.
struct ExtensionRange {
typedef DescriptorProto_ExtensionRange Proto;
typedef ExtensionRangeOptions OptionsType;
// See Descriptor::CopyTo().
@ -511,6 +515,8 @@ class PROTOBUF_EXPORT Descriptor {
// Use DescriptorPool to construct your own descriptors.
class PROTOBUF_EXPORT FieldDescriptor {
public:
typedef FieldDescriptorProto Proto;
// Identifies a field type. 0 is reserved for errors. The order is weird
// for historical reasons. Types 12 and up are new in proto2.
enum Type {
@ -821,6 +827,8 @@ class PROTOBUF_EXPORT FieldDescriptor {
// Describes a oneof defined in a message type.
class PROTOBUF_EXPORT OneofDescriptor {
public:
typedef OneofDescriptorProto Proto;
const std::string& name() const; // Name of this oneof.
const std::string& full_name() const; // Fully-qualified name of the oneof.
@ -895,6 +903,8 @@ class PROTOBUF_EXPORT OneofDescriptor {
// to construct your own descriptors.
class PROTOBUF_EXPORT EnumDescriptor {
public:
typedef EnumDescriptorProto Proto;
// The name of this enum type in the containing scope.
const std::string& name() const;
@ -1051,6 +1061,8 @@ class PROTOBUF_EXPORT EnumDescriptor {
// your own descriptors.
class PROTOBUF_EXPORT EnumValueDescriptor {
public:
typedef EnumValueDescriptorProto Proto;
const std::string& name() const; // Name of this enum constant.
int index() const; // Index within the enums's Descriptor.
int number() const; // Numeric value of this enum constant.
@ -1129,6 +1141,8 @@ class PROTOBUF_EXPORT EnumValueDescriptor {
// ServiceDescriptor. Use DescriptorPool to construct your own descriptors.
class PROTOBUF_EXPORT ServiceDescriptor {
public:
typedef ServiceDescriptorProto Proto;
// The name of the service, not including its containing scope.
const std::string& name() const;
// The fully-qualified name of the service, scope delimited by periods.
@ -1209,6 +1223,8 @@ class PROTOBUF_EXPORT ServiceDescriptor {
// own descriptors.
class PROTOBUF_EXPORT MethodDescriptor {
public:
typedef MethodDescriptorProto Proto;
// Name of this method, not including containing scope.
const std::string& name() const;
// The fully-qualified name of the method, scope delimited by periods.
@ -1294,6 +1310,8 @@ class PROTOBUF_EXPORT MethodDescriptor {
// descriptor->file(). Use DescriptorPool to construct your own descriptors.
class PROTOBUF_EXPORT FileDescriptor {
public:
typedef FileDescriptorProto Proto;
// The filename, relative to the source tree.
// e.g. "foo/bar/baz.proto"
const std::string& name() const;

File diff suppressed because it is too large Load Diff

View File

@ -386,7 +386,7 @@ class PROTOBUF_EXPORT FileDescriptorSet : public ::google::protobuf::Message /*
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
@ -526,7 +526,7 @@ class PROTOBUF_EXPORT FileDescriptorProto : public ::google::protobuf::Message /
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
@ -855,7 +855,7 @@ class PROTOBUF_EXPORT DescriptorProto_ExtensionRange : public ::google::protobuf
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
@ -1011,7 +1011,7 @@ class PROTOBUF_EXPORT DescriptorProto_ReservedRange : public ::google::protobuf:
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
@ -1154,7 +1154,7 @@ class PROTOBUF_EXPORT DescriptorProto : public ::google::protobuf::Message /* @@
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
@ -1436,7 +1436,7 @@ class PROTOBUF_EXPORT ExtensionRangeOptions : public ::google::protobuf::Message
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
@ -1579,7 +1579,7 @@ class PROTOBUF_EXPORT FieldDescriptorProto : public ::google::protobuf::Message
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
@ -1962,7 +1962,7 @@ class PROTOBUF_EXPORT OneofDescriptorProto : public ::google::protobuf::Message
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
@ -2127,7 +2127,7 @@ class PROTOBUF_EXPORT EnumDescriptorProto_EnumReservedRange : public ::google::p
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
@ -2270,7 +2270,7 @@ class PROTOBUF_EXPORT EnumDescriptorProto : public ::google::protobuf::Message /
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
@ -2486,7 +2486,7 @@ class PROTOBUF_EXPORT EnumValueDescriptorProto : public ::google::protobuf::Mess
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
@ -2659,7 +2659,7 @@ class PROTOBUF_EXPORT ServiceDescriptorProto : public ::google::protobuf::Messag
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
@ -2837,7 +2837,7 @@ class PROTOBUF_EXPORT MethodDescriptorProto : public ::google::protobuf::Message
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
@ -3068,7 +3068,7 @@ class PROTOBUF_EXPORT FileOptions : public ::google::protobuf::Message /* @@prot
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
@ -3569,7 +3569,7 @@ class PROTOBUF_EXPORT MessageOptions : public ::google::protobuf::Message /* @@p
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
@ -3744,7 +3744,7 @@ class PROTOBUF_EXPORT FieldOptions : public ::google::protobuf::Message /* @@pro
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
@ -3991,7 +3991,7 @@ class PROTOBUF_EXPORT OneofOptions : public ::google::protobuf::Message /* @@pro
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
@ -4134,7 +4134,7 @@ class PROTOBUF_EXPORT EnumOptions : public ::google::protobuf::Message /* @@prot
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
@ -4293,7 +4293,7 @@ class PROTOBUF_EXPORT EnumValueOptions : public ::google::protobuf::Message /* @
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
@ -4444,7 +4444,7 @@ class PROTOBUF_EXPORT ServiceOptions : public ::google::protobuf::Message /* @@p
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
@ -4595,7 +4595,7 @@ class PROTOBUF_EXPORT MethodOptions : public ::google::protobuf::Message /* @@pr
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
@ -4782,7 +4782,7 @@ class PROTOBUF_EXPORT UninterpretedOption_NamePart : public ::google::protobuf::
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
@ -4945,7 +4945,7 @@ class PROTOBUF_EXPORT UninterpretedOption : public ::google::protobuf::Message /
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
@ -5186,7 +5186,7 @@ class PROTOBUF_EXPORT SourceCodeInfo_Location : public ::google::protobuf::Messa
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
@ -5414,7 +5414,7 @@ class PROTOBUF_EXPORT SourceCodeInfo : public ::google::protobuf::Message /* @@p
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
@ -5556,7 +5556,7 @@ class PROTOBUF_EXPORT GeneratedCodeInfo_Annotation : public ::google::protobuf::
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
@ -5738,7 +5738,7 @@ class PROTOBUF_EXPORT GeneratedCodeInfo : public ::google::protobuf::Message /*
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:

View File

@ -6005,6 +6005,34 @@ TEST_F(ValidationErrorTest, MapEntryConflictsWithEnum) {
"with an existing enum type.\n");
}
TEST_F(ValidationErrorTest, EnumValuesConflictWithDifferentCasing) {
BuildFileWithErrors(
"syntax: 'proto3'"
"name: 'foo.proto' "
"enum_type {"
" name: 'FooEnum' "
" value { name: 'BAR' number: 0 }"
" value { name: 'bar' number: 1 }"
"}",
"foo.proto: bar: NAME: Enum name bar has the same name as BAR "
"if you ignore case and strip out the enum name prefix (if any). "
"This is error-prone and can lead to undefined behavior. "
"Please avoid doing this. If you are using allow_alias, please assign "
"the same numeric value to both enums.\n");
// Not an error because both enums are mapped to the same value.
BuildFile(
"syntax: 'proto3'"
"name: 'foo.proto' "
"enum_type {"
" name: 'FooEnum' "
" options { allow_alias: true }"
" value { name: 'UNKNOWN' number: 0 }"
" value { name: 'BAR' number: 1 }"
" value { name: 'bar' number: 1 }"
"}");
}
TEST_F(ValidationErrorTest, EnumValuesConflictWhenPrefixesStripped) {
BuildFileWithErrors(
"syntax: 'proto3'"
@ -6014,9 +6042,11 @@ TEST_F(ValidationErrorTest, EnumValuesConflictWhenPrefixesStripped) {
" value { name: 'FOO_ENUM_BAZ' number: 0 }"
" value { name: 'BAZ' number: 1 }"
"}",
"foo.proto: BAZ: NAME: When enum name is stripped and label is "
"PascalCased (Baz), this value label conflicts with FOO_ENUM_BAZ. This "
"will make the proto fail to compile for some languages, such as C#.\n");
"foo.proto: BAZ: NAME: Enum name BAZ has the same name as FOO_ENUM_BAZ "
"if you ignore case and strip out the enum name prefix (if any). "
"This is error-prone and can lead to undefined behavior. "
"Please avoid doing this. If you are using allow_alias, please assign "
"the same numeric value to both enums.\n");
BuildFileWithErrors(
"syntax: 'proto3'"
@ -6026,9 +6056,11 @@ TEST_F(ValidationErrorTest, EnumValuesConflictWhenPrefixesStripped) {
" value { name: 'FOOENUM_BAZ' number: 0 }"
" value { name: 'BAZ' number: 1 }"
"}",
"foo.proto: BAZ: NAME: When enum name is stripped and label is "
"PascalCased (Baz), this value label conflicts with FOOENUM_BAZ. This "
"will make the proto fail to compile for some languages, such as C#.\n");
"foo.proto: BAZ: NAME: Enum name BAZ has the same name as FOOENUM_BAZ "
"if you ignore case and strip out the enum name prefix (if any). "
"This is error-prone and can lead to undefined behavior. "
"Please avoid doing this. If you are using allow_alias, please assign "
"the same numeric value to both enums.\n");
BuildFileWithErrors(
"syntax: 'proto3'"
@ -6038,10 +6070,11 @@ TEST_F(ValidationErrorTest, EnumValuesConflictWhenPrefixesStripped) {
" value { name: 'FOO_ENUM_BAR_BAZ' number: 0 }"
" value { name: 'BAR__BAZ' number: 1 }"
"}",
"foo.proto: BAR__BAZ: NAME: When enum name is stripped and label is "
"PascalCased (BarBaz), this value label conflicts with "
"FOO_ENUM_BAR_BAZ. This will make the proto fail to compile for some "
"languages, such as C#.\n");
"foo.proto: BAR__BAZ: NAME: Enum name BAR__BAZ has the same name as "
"FOO_ENUM_BAR_BAZ if you ignore case and strip out the enum name prefix "
"(if any). This is error-prone and can lead to undefined behavior. "
"Please avoid doing this. If you are using allow_alias, please assign "
"the same numeric value to both enums.\n");
BuildFileWithErrors(
"syntax: 'proto3'"
@ -6051,10 +6084,11 @@ TEST_F(ValidationErrorTest, EnumValuesConflictWhenPrefixesStripped) {
" value { name: 'FOO_ENUM__BAR_BAZ' number: 0 }"
" value { name: 'BAR_BAZ' number: 1 }"
"}",
"foo.proto: BAR_BAZ: NAME: When enum name is stripped and label is "
"PascalCased (BarBaz), this value label conflicts with "
"FOO_ENUM__BAR_BAZ. This will make the proto fail to compile for some "
"languages, such as C#.\n");
"foo.proto: BAR_BAZ: NAME: Enum name BAR_BAZ has the same name as "
"FOO_ENUM__BAR_BAZ if you ignore case and strip out the enum name prefix "
"(if any). This is error-prone and can lead to undefined behavior. "
"Please avoid doing this. If you are using allow_alias, please assign "
"the same numeric value to both enums.\n");
// This isn't an error because the underscore will cause the PascalCase to
// differ by case (BarBaz vs. Barbaz).

View File

@ -182,14 +182,14 @@ const char* Duration::_InternalParse(const char* begin, const char* end, void* o
::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end;
auto ptr = begin;
while (ptr < end) {
ptr = Varint::Parse32Inline(ptr, &tag);
ptr = ::google::protobuf::io::Parse32(ptr, &tag);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
switch (tag >> 3) {
// int64 seconds = 1;
case 1: {
if (static_cast<::google::protobuf::uint8>(tag) != 8) goto handle_unusual;
::google::protobuf::uint64 val;
ptr = Varint::Parse64(ptr, &val);
ptr = ::google::protobuf::io::Parse64(ptr, &val);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
::google::protobuf::int64 value = val;
msg->set_seconds(value);
@ -199,14 +199,14 @@ const char* Duration::_InternalParse(const char* begin, const char* end, void* o
case 2: {
if (static_cast<::google::protobuf::uint8>(tag) != 16) goto handle_unusual;
::google::protobuf::uint64 val;
ptr = Varint::Parse64(ptr, &val);
ptr = ::google::protobuf::io::Parse64(ptr, &val);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
::google::protobuf::int32 value = val;
msg->set_nanos(value);
break;
}
default: {
handle_unusual: (void)&&handle_unusual;
handle_unusual:
if ((tag & 7) == 4 || tag == 0) {
ctx->EndGroup(tag);
return ptr;
@ -220,13 +220,6 @@ const char* Duration::_InternalParse(const char* begin, const char* end, void* o
} // switch
} // while
return ptr;
len_delim_till_end: (void)&&len_delim_till_end;
return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg},
{parser_till_end, object}, size);
group_continues: (void)&&group_continues;
GOOGLE_DCHECK(ptr >= end);
GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->StoreGroup({_InternalParse, msg}, {parser_till_end, object}, depth, tag));
return ptr;
}
#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
bool Duration::MergePartialFromCodedStream(
@ -310,8 +303,7 @@ void Duration::SerializeWithCachedSizes(
}
::google::protobuf::uint8* Duration::InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const {
(void)deterministic; // Unused
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:google.protobuf.Duration)
::google::protobuf::uint32 cached_has_bits = 0;
(void) cached_has_bits;

View File

@ -139,7 +139,7 @@ class PROTOBUF_EXPORT Duration : public ::google::protobuf::Message /* @@protoc_
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:

View File

@ -168,11 +168,10 @@ const char* Empty::_InternalParse(const char* begin, const char* end, void* obje
::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end;
auto ptr = begin;
while (ptr < end) {
ptr = Varint::Parse32Inline(ptr, &tag);
ptr = ::google::protobuf::io::Parse32(ptr, &tag);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
switch (tag >> 3) {
default: {
handle_unusual: (void)&&handle_unusual;
if ((tag & 7) == 4 || tag == 0) {
ctx->EndGroup(tag);
return ptr;
@ -186,13 +185,6 @@ const char* Empty::_InternalParse(const char* begin, const char* end, void* obje
} // switch
} // while
return ptr;
len_delim_till_end: (void)&&len_delim_till_end;
return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg},
{parser_till_end, object}, size);
group_continues: (void)&&group_continues;
GOOGLE_DCHECK(ptr >= end);
GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->StoreGroup({_InternalParse, msg}, {parser_till_end, object}, depth, tag));
return ptr;
}
#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
bool Empty::MergePartialFromCodedStream(
@ -235,8 +227,7 @@ void Empty::SerializeWithCachedSizes(
}
::google::protobuf::uint8* Empty::InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const {
(void)deterministic; // Unused
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:google.protobuf.Empty)
::google::protobuf::uint32 cached_has_bits = 0;
(void) cached_has_bits;

View File

@ -139,7 +139,7 @@ class PROTOBUF_EXPORT Empty : public ::google::protobuf::Message /* @@protoc_ins
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:

View File

@ -48,6 +48,7 @@
#include <google/protobuf/stubs/common.h>
#include <google/protobuf/stubs/logging.h>
#include <google/protobuf/parse_context.h>
#include <google/protobuf/io/coded_stream.h>
#include <google/protobuf/port.h>
#include <google/protobuf/repeated_field.h>
#include <google/protobuf/wire_format_lite.h>
@ -461,13 +462,11 @@ class PROTOBUF_EXPORT ExtensionSet {
// Returns a pointer past the last written byte.
uint8* InternalSerializeWithCachedSizesToArray(int start_field_number,
int end_field_number,
bool deterministic,
uint8* target) const;
// Like above but serializes in MessageSet format.
void SerializeMessageSetWithCachedSizes(io::CodedOutputStream* output) const;
uint8* InternalSerializeMessageSetWithCachedSizesToArray(bool deterministic,
uint8* target) const;
uint8* InternalSerializeMessageSetWithCachedSizesToArray(uint8* target) const;
// For backward-compatibility, versions of two of the above methods that
// serialize deterministically iff SetDefaultSerializationDeterministic()
@ -531,12 +530,6 @@ class PROTOBUF_EXPORT ExtensionSet {
virtual void WriteMessage(int number,
io::CodedOutputStream* output) const = 0;
virtual uint8* WriteMessageToArray(int number, uint8* target) const = 0;
virtual uint8* InternalWriteMessageToArray(int number, bool,
uint8* target) const {
// TODO(gpike): make this pure virtual. This is a placeholder because we
// need to update third_party/upb, for example.
return WriteMessageToArray(number, target);
}
private:
virtual void UnusedKeyMethod(); // Dummy key method to avoid weak vtable.
@ -606,12 +599,11 @@ class PROTOBUF_EXPORT ExtensionSet {
void SerializeFieldWithCachedSizes(int number,
io::CodedOutputStream* output) const;
uint8* InternalSerializeFieldWithCachedSizesToArray(int number,
bool deterministic,
uint8* target) const;
void SerializeMessageSetItemWithCachedSizes(
int number, io::CodedOutputStream* output) const;
uint8* InternalSerializeMessageSetItemWithCachedSizesToArray(
int number, bool deterministic, uint8* target) const;
int number, uint8* target) const;
size_t ByteSize(int number) const;
size_t MessageSetItemByteSize(int number) const;
void Clear();
@ -819,11 +811,10 @@ const char* ParseMessageSet(const char* begin, const char* end, Msg* msg,
ExtensionSet* ext, Metadata* metadata,
internal::ParseContext* ctx) {
auto ptr = begin;
int depth;
(void)depth;
int depth = 0;
while (ptr < end) {
uint32 tag;
ptr = Varint::Parse32Inline(ptr, &tag);
ptr = io::Parse32(ptr, &tag);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
if (tag == WireFormatLite::kMessageSetItemStartTag) {
ctx->extra_parse_data().payload.clear();
@ -848,6 +839,7 @@ const char* ParseMessageSet(const char* begin, const char* end, Msg* msg,
}
return ptr;
}
#endif
// These are just for convenience...

View File

@ -399,7 +399,7 @@ const char* ExtensionSet::ParseMessageSetItem(
uint32 tag = *ptr++;
if (tag == WireFormatLite::kMessageSetTypeIdTag) {
uint32 type_id;
ptr = Varint::Parse32(ptr, &type_id);
ptr = io::Parse32(ptr, &type_id);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
if (ctx->extra_parse_data().payload.empty()) {
@ -439,7 +439,7 @@ const char* ExtensionSet::ParseMessageSetItem(
break;
} else if (tag == WireFormatLite::kMessageSetMessageTag) {
uint32 size;
ptr = Varint::Parse32Inline(ptr, &size);
ptr = io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
ParseClosure child = {internal::StringParser,
&ctx->extra_parse_data().payload};
@ -452,7 +452,7 @@ const char* ExtensionSet::ParseMessageSetItem(
}
} else {
ptr--;
ptr = Varint::Parse32(ptr, &tag);
ptr = io::Parse32(ptr, &tag);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
auto res =
ParseField(tag, parent, ptr, end, containing_type, metadata, ctx);
@ -555,26 +555,23 @@ size_t ExtensionSet::Extension::SpaceUsedExcludingSelfLong() const {
uint8* ExtensionSet::SerializeWithCachedSizesToArray(int start_field_number,
int end_field_number,
uint8* target) const {
return InternalSerializeWithCachedSizesToArray(
start_field_number, end_field_number,
io::CodedOutputStream::IsDefaultSerializationDeterministic(), target);
return InternalSerializeWithCachedSizesToArray(start_field_number,
end_field_number, target);
}
uint8* ExtensionSet::SerializeMessageSetWithCachedSizesToArray(
uint8* target) const {
return InternalSerializeMessageSetWithCachedSizesToArray(
io::CodedOutputStream::IsDefaultSerializationDeterministic(), target);
return InternalSerializeMessageSetWithCachedSizesToArray(target);
}
uint8* ExtensionSet::InternalSerializeWithCachedSizesToArray(
int start_field_number, int end_field_number, bool deterministic,
uint8* target) const {
int start_field_number, int end_field_number, uint8* target) const {
if (PROTOBUF_PREDICT_FALSE(is_large())) {
const auto& end = map_.large->end();
for (auto it = map_.large->lower_bound(start_field_number);
it != end && it->first < end_field_number; ++it) {
target = it->second.InternalSerializeFieldWithCachedSizesToArray(
it->first, deterministic, target);
it->first, target);
}
return target;
}
@ -582,23 +579,23 @@ uint8* ExtensionSet::InternalSerializeWithCachedSizesToArray(
for (const KeyValue* it = std::lower_bound(
flat_begin(), end, start_field_number, KeyValue::FirstComparator());
it != end && it->first < end_field_number; ++it) {
target = it->second.InternalSerializeFieldWithCachedSizesToArray(
it->first, deterministic, target);
target = it->second.InternalSerializeFieldWithCachedSizesToArray(it->first,
target);
}
return target;
}
uint8* ExtensionSet::InternalSerializeMessageSetWithCachedSizesToArray(
bool deterministic, uint8* target) const {
ForEach([deterministic, &target](int number, const Extension& ext) {
target = ext.InternalSerializeMessageSetItemWithCachedSizesToArray(
number, deterministic, target);
uint8* target) const {
ForEach([&target](int number, const Extension& ext) {
target = ext.InternalSerializeMessageSetItemWithCachedSizesToArray(number,
target);
});
return target;
}
uint8* ExtensionSet::Extension::InternalSerializeFieldWithCachedSizesToArray(
int number, bool deterministic, uint8* target) const {
int number, uint8* target) const {
if (is_repeated) {
if (is_packed) {
if (cached_size == 0) return target;
@ -666,14 +663,13 @@ uint8* ExtensionSet::Extension::InternalSerializeFieldWithCachedSizesToArray(
HANDLE_TYPE( BYTES, Bytes, string);
HANDLE_TYPE( ENUM, Enum, enum);
#undef HANDLE_TYPE
#define HANDLE_TYPE(UPPERCASE, CAMELCASE, LOWERCASE) \
case FieldDescriptor::TYPE_##UPPERCASE: \
for (int i = 0; i < repeated_##LOWERCASE##_value->size(); i++) { \
target = WireFormatLite::InternalWrite##CAMELCASE##ToArray( \
number, repeated_##LOWERCASE##_value->Get(i), \
deterministic, target); \
} \
break
#define HANDLE_TYPE(UPPERCASE, CAMELCASE, LOWERCASE) \
case FieldDescriptor::TYPE_##UPPERCASE: \
for (int i = 0; i < repeated_##LOWERCASE##_value->size(); i++) { \
target = WireFormatLite::InternalWrite##CAMELCASE##ToArray( \
number, repeated_##LOWERCASE##_value->Get(i), target); \
} \
break
HANDLE_TYPE( GROUP, Group, message);
HANDLE_TYPE( MESSAGE, Message, message);
@ -708,11 +704,10 @@ uint8* ExtensionSet::Extension::InternalSerializeFieldWithCachedSizesToArray(
#undef HANDLE_TYPE
case FieldDescriptor::TYPE_MESSAGE:
if (is_lazy) {
target = lazymessage_value->InternalWriteMessageToArray(
number, deterministic, target);
target = lazymessage_value->WriteMessageToArray(number, target);
} else {
target = WireFormatLite::InternalWriteMessageToArray(
number, *message_value, deterministic, target);
number, *message_value, target);
}
break;
}
@ -722,12 +717,11 @@ uint8* ExtensionSet::Extension::InternalSerializeFieldWithCachedSizesToArray(
uint8*
ExtensionSet::Extension::InternalSerializeMessageSetItemWithCachedSizesToArray(
int number, bool deterministic, uint8* target) const {
int number, uint8* target) const {
if (type != WireFormatLite::TYPE_MESSAGE || is_repeated) {
// Not a valid MessageSet extension, but serialize it the normal way.
GOOGLE_LOG(WARNING) << "Invalid message set extension.";
return InternalSerializeFieldWithCachedSizesToArray(number, deterministic,
target);
return InternalSerializeFieldWithCachedSizesToArray(number, target);
}
if (is_cleared) return target;
@ -740,12 +734,11 @@ ExtensionSet::Extension::InternalSerializeMessageSetItemWithCachedSizesToArray(
WireFormatLite::kMessageSetTypeIdNumber, number, target);
// Write message.
if (is_lazy) {
target = lazymessage_value->InternalWriteMessageToArray(
WireFormatLite::kMessageSetMessageNumber, deterministic, target);
target = lazymessage_value->WriteMessageToArray(
WireFormatLite::kMessageSetMessageNumber, target);
} else {
target = WireFormatLite::InternalWriteMessageToArray(
WireFormatLite::kMessageSetMessageNumber, *message_value, deterministic,
target);
WireFormatLite::kMessageSetMessageNumber, *message_value, target);
}
// End group.
target = io::CodedOutputStream::WriteTagToArray(

View File

@ -92,7 +92,7 @@ std::pair<const char*, bool> ExtensionSet::ParseFieldWithExtensionInfo(
#define HANDLE_VARINT_TYPE(UPPERCASE, CPP_CAMELCASE) \
case WireFormatLite::TYPE_##UPPERCASE: { \
uint64 value; \
ptr = Varint::Parse64(ptr, &value); \
ptr = io::Parse64(ptr, &value); \
GOOGLE_PROTOBUF_ASSERT_RETURN(ptr, std::make_pair(nullptr, true)); \
if (extension.is_repeated) { \
Add##CPP_CAMELCASE(number, WireFormatLite::TYPE_##UPPERCASE, \
@ -111,7 +111,7 @@ std::pair<const char*, bool> ExtensionSet::ParseFieldWithExtensionInfo(
#define HANDLE_SVARINT_TYPE(UPPERCASE, CPP_CAMELCASE, SIZE) \
case WireFormatLite::TYPE_##UPPERCASE: { \
uint64 val; \
ptr = Varint::Parse64(ptr, &val); \
ptr = io::Parse64(ptr, &val); \
GOOGLE_PROTOBUF_ASSERT_RETURN(ptr, std::make_pair(nullptr, true)); \
auto value = WireFormatLite::ZigZagDecode##SIZE(val); \
if (extension.is_repeated) { \
@ -151,7 +151,7 @@ std::pair<const char*, bool> ExtensionSet::ParseFieldWithExtensionInfo(
case WireFormatLite::TYPE_ENUM: {
uint64 val;
ptr = Varint::Parse64(ptr, &val);
ptr = io::Parse64(ptr, &val);
GOOGLE_PROTOBUF_ASSERT_RETURN(ptr, std::make_pair(nullptr, true));
int value = val;
@ -221,7 +221,7 @@ std::pair<const char*, bool> ExtensionSet::ParseFieldWithExtensionInfo(
length_delim:
uint32 size;
ptr = Varint::Parse32Inline(ptr, &size);
ptr = io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_ASSERT_RETURN(ptr, std::make_pair(nullptr, true));
if (size > end - ptr) goto len_delim_till_end;
{

View File

@ -176,14 +176,14 @@ const char* FieldMask::_InternalParse(const char* begin, const char* end, void*
::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end;
auto ptr = begin;
while (ptr < end) {
ptr = Varint::Parse32Inline(ptr, &tag);
ptr = ::google::protobuf::io::Parse32(ptr, &tag);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
switch (tag >> 3) {
// repeated string paths = 1;
case 1: {
if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual;
do {
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
ctx->extra_parse_data().SetFieldName("google.protobuf.FieldMask.paths");
auto str = msg->add_paths();
@ -202,7 +202,7 @@ const char* FieldMask::_InternalParse(const char* begin, const char* end, void*
break;
}
default: {
handle_unusual: (void)&&handle_unusual;
handle_unusual:
if ((tag & 7) == 4 || tag == 0) {
ctx->EndGroup(tag);
return ptr;
@ -216,13 +216,9 @@ const char* FieldMask::_InternalParse(const char* begin, const char* end, void*
} // switch
} // while
return ptr;
len_delim_till_end: (void)&&len_delim_till_end;
len_delim_till_end:
return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg},
{parser_till_end, object}, size);
group_continues: (void)&&group_continues;
GOOGLE_DCHECK(ptr >= end);
GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->StoreGroup({_InternalParse, msg}, {parser_till_end, object}, depth, tag));
return ptr;
{parser_till_end, object}, size);
}
#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
bool FieldMask::MergePartialFromCodedStream(
@ -296,8 +292,7 @@ void FieldMask::SerializeWithCachedSizes(
}
::google::protobuf::uint8* FieldMask::InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const {
(void)deterministic; // Unused
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:google.protobuf.FieldMask)
::google::protobuf::uint32 cached_has_bits = 0;
(void) cached_has_bits;

View File

@ -139,7 +139,7 @@ class PROTOBUF_EXPORT FieldMask : public ::google::protobuf::Message /* @@protoc
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:

View File

@ -1706,8 +1706,6 @@ void* GeneratedMessageReflection::MutableRawRepeatedField(
if (field->cpp_type() != cpptype)
ReportReflectionUsageTypeError(descriptor_,
field, "MutableRawRepeatedField", cpptype);
if (ctype >= 0 && !field->is_extension())
GOOGLE_CHECK_EQ(field->options().ctype(), ctype) << "subtype mismatch";
if (desc != NULL)
GOOGLE_CHECK_EQ(field->message_type(), desc) << "wrong submessage type";
if (field->is_extension()) {
@ -2224,7 +2222,7 @@ void* GeneratedMessageReflection::RepeatedFieldData(
}
}
MapFieldBase* GeneratedMessageReflection::MapData(
MapFieldBase* GeneratedMessageReflection::MutableMapData(
Message* message, const FieldDescriptor* field) const {
USAGE_CHECK(IsMapFieldInApi(field),
"GetMapData",
@ -2232,6 +2230,14 @@ MapFieldBase* GeneratedMessageReflection::MapData(
return MutableRaw<MapFieldBase>(message, field);
}
const MapFieldBase* GeneratedMessageReflection::GetMapData(
const Message& message, const FieldDescriptor* field) const {
USAGE_CHECK(IsMapFieldInApi(field),
"GetMapData",
"Field is not a map field.");
return &(GetRaw<MapFieldBase>(message, field));
}
namespace {
// Helper function to transform migration schema into reflection schema.

View File

@ -670,8 +670,11 @@ class GeneratedMessageReflection final : public Reflection {
Message* sub_message,
const FieldDescriptor* field) const;
internal::MapFieldBase* MapData(Message* message,
const FieldDescriptor* field) const override;
internal::MapFieldBase* MutableMapData(
Message* message, const FieldDescriptor* field) const override;
const internal::MapFieldBase* GetMapData(
const Message& message, const FieldDescriptor* field) const override;
friend inline // inline so nobody can call this function.
void

View File

@ -52,6 +52,8 @@
#error "You cannot SWIG proto headers"
#endif
#include <google/protobuf/port_def.inc>
namespace google {
namespace protobuf {
namespace internal {
@ -79,6 +81,41 @@ enum ProcessingTypes {
static_assert(TYPE_MAP < kRepeatedMask, "Invalid enum");
struct PROTOBUF_EXPORT FieldMetadata {
uint32 offset; // offset of this field in the struct
uint32 tag; // field * 8 + wire_type
// byte offset * 8 + bit_offset;
// if the high bit is set then this is the byte offset of the oneof_case
// for this field.
uint32 has_offset;
uint32 type; // the type of this field.
const void* ptr; // auxiliary data
// From the serializer point of view each fundamental type can occur in
// 4 different ways. For simplicity we treat all combinations as a cartesion
// product although not all combinations are allowed.
enum FieldTypeClass {
kPresence,
kNoPresence,
kRepeated,
kPacked,
kOneOf,
kNumTypeClasses // must be last enum
};
// C++ protobuf has 20 fundamental types, were we added Cord and StringPiece
// and also distinquish the same types if they have different wire format.
enum {
kCordType = 19,
kStringPieceType = 20,
kInlinedType = 21,
kNumTypes = 21,
kSpecial = kNumTypes * kNumTypeClasses,
};
static int CalculateType(int fundamental_type, FieldTypeClass type_class);
};
// TODO(ckennelly): Add a static assertion to ensure that these masks do not
// conflict with wiretypes.
@ -199,8 +236,108 @@ bool ParseMap(io::CodedInputStream* input, void* map_field) {
return WireFormatLite::ReadMessageNoVirtual(input, &parser);
}
struct SerializationTable {
int num_fields;
const FieldMetadata* field_table;
};
PROTOBUF_EXPORT void SerializeInternal(const uint8* base,
const FieldMetadata* table,
int32 num_fields,
io::CodedOutputStream* output);
inline void TableSerialize(const MessageLite& msg,
const SerializationTable* table,
io::CodedOutputStream* output) {
const FieldMetadata* field_table = table->field_table;
int num_fields = table->num_fields - 1;
const uint8* base = reinterpret_cast<const uint8*>(&msg);
// TODO(gerbens) This skips the first test if we could use the fast
// array serialization path, we should make this
// int cached_size =
// *reinterpret_cast<const int32*>(base + field_table->offset);
// SerializeWithCachedSize(msg, field_table + 1, num_fields, cached_size, ...)
// But we keep conformance with the old way for now.
SerializeInternal(base, field_table + 1, num_fields, output);
}
uint8* SerializeInternalToArray(const uint8* base, const FieldMetadata* table,
int32 num_fields, bool is_deterministic,
uint8* buffer);
inline uint8* TableSerializeToArray(const MessageLite& msg,
const SerializationTable* table,
bool is_deterministic, uint8* buffer) {
const uint8* base = reinterpret_cast<const uint8*>(&msg);
const FieldMetadata* field_table = table->field_table + 1;
int num_fields = table->num_fields - 1;
return SerializeInternalToArray(base, field_table, num_fields,
is_deterministic, buffer);
}
template <typename T>
struct CompareHelper {
bool operator()(const T& a, const T& b) { return a < b; }
};
template <>
struct CompareHelper<ArenaStringPtr> {
bool operator()(const ArenaStringPtr& a, const ArenaStringPtr& b) {
return a.Get() < b.Get();
}
};
struct CompareMapKey {
template <typename T>
bool operator()(const MapEntryHelper<T>& a, const MapEntryHelper<T>& b) {
return Compare(a.key_, b.key_);
}
template <typename T>
bool Compare(const T& a, const T& b) {
return CompareHelper<T>()(a, b);
}
};
template <typename MapFieldType, const SerializationTable* table>
void MapFieldSerializer(const uint8* base, uint32 offset, uint32 tag,
uint32 has_offset, io::CodedOutputStream* output) {
typedef MapEntryHelper<typename MapFieldType::EntryTypeTrait> Entry;
typedef typename MapFieldType::MapType::const_iterator Iter;
const MapFieldType& map_field =
*reinterpret_cast<const MapFieldType*>(base + offset);
const SerializationTable* t =
table +
has_offset; // has_offset is overloaded for maps to mean table offset
if (!output->IsSerializationDeterministic()) {
for (Iter it = map_field.GetMap().begin(); it != map_field.GetMap().end();
++it) {
Entry map_entry(*it);
output->WriteVarint32(tag);
output->WriteVarint32(map_entry._cached_size_);
SerializeInternal(reinterpret_cast<const uint8*>(&map_entry),
t->field_table, t->num_fields, output);
}
} else {
std::vector<Entry> v;
for (Iter it = map_field.GetMap().begin(); it != map_field.GetMap().end();
++it) {
v.push_back(Entry(*it));
}
std::sort(v.begin(), v.end(), CompareMapKey());
for (int i = 0; i < v.size(); i++) {
output->WriteVarint32(tag);
output->WriteVarint32(v[i]._cached_size_);
SerializeInternal(reinterpret_cast<const uint8*>(&v[i]), t->field_table,
t->num_fields, output);
}
}
}
} // namespace internal
} // namespace protobuf
} // namespace google
#include <google/protobuf/port_undef.inc>
#endif // GOOGLE_PROTOBUF_GENERATED_MESSAGE_TABLE_DRIVEN_H__

View File

@ -44,6 +44,7 @@
#include <google/protobuf/io/coded_stream.h>
#include <google/protobuf/arenastring.h>
#include <google/protobuf/extension_set.h>
#include <google/protobuf/generated_message_table_driven.h>
#include <google/protobuf/message_lite.h>
#include <google/protobuf/metadata_lite.h>
#include <google/protobuf/stubs/mutex.h>
@ -293,8 +294,15 @@ void SerializeMessageNoTable(const MessageLite* msg,
}
void SerializeMessageNoTable(const MessageLite* msg, ArrayOutput* output) {
output->ptr = msg->InternalSerializeWithCachedSizesToArray(
output->is_deterministic, output->ptr);
if (output->is_deterministic) {
io::ArrayOutputStream array_stream(output->ptr, INT_MAX);
io::CodedOutputStream o(&array_stream);
o.SetSerializationDeterministic(true);
msg->SerializeWithCachedSizes(&o);
output->ptr += o.ByteCount();
} else {
output->ptr = msg->InternalSerializeWithCachedSizesToArray(output->ptr);
}
}
// Helper to branch to fast path if possible
@ -303,14 +311,15 @@ void SerializeMessageDispatch(const MessageLite& msg,
int32 cached_size,
io::CodedOutputStream* output) {
const uint8* base = reinterpret_cast<const uint8*>(&msg);
// Try the fast path
uint8* ptr = output->GetDirectBufferForNBytesAndAdvance(cached_size);
if (ptr) {
// We use virtual dispatch to enable dedicated generated code for the
// fast path.
msg.InternalSerializeWithCachedSizesToArray(
output->IsSerializationDeterministic(), ptr);
return;
if (!output->IsSerializationDeterministic()) {
// Try the fast path
uint8* ptr = output->GetDirectBufferForNBytesAndAdvance(cached_size);
if (ptr) {
// We use virtual dispatch to enable dedicated generated code for the
// fast path.
msg.InternalSerializeWithCachedSizesToArray(ptr);
return;
}
}
SerializeInternal(base, field_table, num_fields, output);
}
@ -647,7 +656,7 @@ void SerializeInternal(const uint8* base,
// Special cases
case FieldMetadata::kSpecial:
func = reinterpret_cast<SpecialSerializer>(
func = reinterpret_cast<SpecialSerializer>(
const_cast<void*>(field_metadata.ptr));
func (base, field_metadata.offset, field_metadata.tag,
field_metadata.has_offset, output);
@ -694,9 +703,9 @@ uint8* SerializeInternalToArray(const uint8* base,
io::ArrayOutputStream array_stream(array_output.ptr, INT_MAX);
io::CodedOutputStream output(&array_stream);
output.SetSerializationDeterministic(is_deterministic);
func = reinterpret_cast<SpecialSerializer>(
func = reinterpret_cast<SpecialSerializer>(
const_cast<void*>(field_metadata.ptr));
func (base, field_metadata.offset, field_metadata.tag,
func (base, field_metadata.offset, field_metadata.tag,
field_metadata.has_offset, &output);
array_output.ptr += output.ByteCount();
} break;

View File

@ -49,7 +49,6 @@
#include <google/protobuf/parse_context.h>
#include <google/protobuf/has_bits.h>
#include <google/protobuf/implicit_weak_message.h>
#include <google/protobuf/map_entry_lite.h>
#include <google/protobuf/message_lite.h>
#include <google/protobuf/stubs/once.h> // Add direct dep on port for pb.cc
#include <google/protobuf/port.h>
@ -108,40 +107,6 @@ bool AllAreInitializedWeak(const RepeatedPtrField<T>& t) {
return true;
}
struct PROTOBUF_EXPORT FieldMetadata {
uint32 offset; // offset of this field in the struct
uint32 tag; // field * 8 + wire_type
// byte offset * 8 + bit_offset;
// if the high bit is set then this is the byte offset of the oneof_case
// for this field.
uint32 has_offset;
uint32 type; // the type of this field.
const void* ptr; // auxiliary data
// From the serializer point of view each fundamental type can occur in
// 4 different ways. For simplicity we treat all combinations as a cartesion
// product although not all combinations are allowed.
enum FieldTypeClass {
kPresence,
kNoPresence,
kRepeated,
kPacked,
kOneOf,
kNumTypeClasses // must be last enum
};
// C++ protobuf has 20 fundamental types, were we added Cord and StringPiece
// and also distinquish the same types if they have different wire format.
enum {
kCordType = 19,
kStringPieceType = 20,
kInlinedType = 21,
kNumTypes = 21,
kSpecial = kNumTypes * kNumTypeClasses,
};
static int CalculateType(int fundamental_type, FieldTypeClass type_class);
};
inline bool IsPresent(const void* base, uint32 hasbit) {
const uint32* has_bits_array = static_cast<const uint32*>(base);
return (has_bits_array[hasbit / 32] & (1u << (hasbit & 31))) != 0;
@ -165,104 +130,6 @@ PROTOBUF_EXPORT void UnknownFieldSerializerLite(const uint8* base,
uint32 has_offset,
io::CodedOutputStream* output);
struct SerializationTable {
int num_fields;
const FieldMetadata* field_table;
};
PROTOBUF_EXPORT void SerializeInternal(const uint8* base,
const FieldMetadata* table,
int32 num_fields,
io::CodedOutputStream* output);
inline void TableSerialize(const MessageLite& msg,
const SerializationTable* table,
io::CodedOutputStream* output) {
const FieldMetadata* field_table = table->field_table;
int num_fields = table->num_fields - 1;
const uint8* base = reinterpret_cast<const uint8*>(&msg);
// TODO(gerbens) This skips the first test if we could use the fast
// array serialization path, we should make this
// int cached_size =
// *reinterpret_cast<const int32*>(base + field_table->offset);
// SerializeWithCachedSize(msg, field_table + 1, num_fields, cached_size, ...)
// But we keep conformance with the old way for now.
SerializeInternal(base, field_table + 1, num_fields, output);
}
uint8* SerializeInternalToArray(const uint8* base, const FieldMetadata* table,
int32 num_fields, bool is_deterministic,
uint8* buffer);
inline uint8* TableSerializeToArray(const MessageLite& msg,
const SerializationTable* table,
bool is_deterministic, uint8* buffer) {
const uint8* base = reinterpret_cast<const uint8*>(&msg);
const FieldMetadata* field_table = table->field_table + 1;
int num_fields = table->num_fields - 1;
return SerializeInternalToArray(base, field_table, num_fields,
is_deterministic, buffer);
}
template <typename T>
struct CompareHelper {
bool operator()(const T& a, const T& b) { return a < b; }
};
template <>
struct CompareHelper<ArenaStringPtr> {
bool operator()(const ArenaStringPtr& a, const ArenaStringPtr& b) {
return a.Get() < b.Get();
}
};
struct CompareMapKey {
template <typename T>
bool operator()(const MapEntryHelper<T>& a, const MapEntryHelper<T>& b) {
return Compare(a.key_, b.key_);
}
template <typename T>
bool Compare(const T& a, const T& b) {
return CompareHelper<T>()(a, b);
}
};
template <typename MapFieldType, const SerializationTable* table>
void MapFieldSerializer(const uint8* base, uint32 offset, uint32 tag,
uint32 has_offset, io::CodedOutputStream* output) {
typedef MapEntryHelper<typename MapFieldType::EntryTypeTrait> Entry;
typedef typename MapFieldType::MapType::const_iterator Iter;
const MapFieldType& map_field =
*reinterpret_cast<const MapFieldType*>(base + offset);
const SerializationTable* t =
table +
has_offset; // has_offset is overloaded for maps to mean table offset
if (!output->IsSerializationDeterministic()) {
for (Iter it = map_field.GetMap().begin(); it != map_field.GetMap().end();
++it) {
Entry map_entry(*it);
output->WriteVarint32(tag);
output->WriteVarint32(map_entry._cached_size_);
SerializeInternal(reinterpret_cast<const uint8*>(&map_entry),
t->field_table, t->num_fields, output);
}
} else {
std::vector<Entry> v;
for (Iter it = map_field.GetMap().begin(); it != map_field.GetMap().end();
++it) {
v.push_back(Entry(*it));
}
std::sort(v.begin(), v.end(), CompareMapKey());
for (int i = 0; i < v.size(); i++) {
output->WriteVarint32(tag);
output->WriteVarint32(v[i]._cached_size_);
SerializeInternal(reinterpret_cast<const uint8*>(&v[i]), t->field_table,
t->num_fields, output);
}
}
}
PROTOBUF_EXPORT MessageLite* DuplicateIfNonNullInternal(MessageLite* message);
PROTOBUF_EXPORT MessageLite* GetOwnedMessageInternal(Arena* message_arena,
MessageLite* submessage,

View File

@ -34,7 +34,6 @@
#include <google/protobuf/io/zero_copy_stream_impl_lite.h>
#include <google/protobuf/stubs/once.h>
#include <google/protobuf/wire_format_lite.h>
#include <google/protobuf/wire_format_lite_inl.h>
#include <google/protobuf/port_def.inc>

View File

@ -139,10 +139,6 @@
#include <google/protobuf/port_def.inc>
#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
#include "util/coding/varint.h"
#endif
namespace google {
namespace protobuf {
@ -168,6 +164,35 @@ T UnalignedLoad(const void* p) {
return res;
}
// TODO(gerbens) Experiment with best implementation.
// Clang unrolls loop and generating pretty good code on O2, gcc doesn't.
// Unclear if we want 64 bit parse loop unrolled, inlined or opaque function
// call. Hence experimentation is needed.
// Important guarantee is that it doesn't read more than size bytes from p.
template <int size, typename T>
const char* VarintParse(const char* p, T* out) {
T res = 0;
T extra = 0;
for (int i = 0; i < size; i++) {
T byte = static_cast<uint8>(p[i]);
res += byte << (i * 7);
int j = i + 1;
if (PROTOBUF_PREDICT_TRUE(byte < 128)) {
*out = res - extra;
return p + j;
}
extra += 128ull << (i * 7);
}
return nullptr;
}
inline const char* Parse32(const char* p, uint32* out) {
return VarintParse<5>(p, out);
}
inline const char* Parse64(const char* p, uint64* out) {
return VarintParse<10>(p, out);
}
// Class which reads and decodes binary data which is composed of varint-
// encoded integers and fixed-width pieces. Wraps a ZeroCopyInputStream.
// Most users will not need to deal with CodedInputStream.
@ -175,6 +200,8 @@ T UnalignedLoad(const void* p) {
// Most methods of CodedInputStream that return a bool return false if an
// underlying I/O error occurs or if the data is malformed. Once such a
// failure occurs, the CodedInputStream is broken and is no longer useful.
// After a failure, callers also should assume writes to "out" args may have
// occurred, though nothing useful can be determined from those writes.
class PROTOBUF_EXPORT CodedInputStream {
public:
// Create a CodedInputStream that reads from the given ZeroCopyInputStream.
@ -418,6 +445,7 @@ class PROTOBUF_EXPORT CodedInputStream {
void SetRecursionLimit(int limit);
int RecursionBudget() { return recursion_budget_; }
static int GetDefaultRecursionLimit() { return default_recursion_limit_; }
// Increments the current recursion depth. Returns true if the depth is
// under the limit, false if it has gone over.

View File

@ -249,11 +249,9 @@ class MapEntryImpl : public Base {
}
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* output) const override {
output = KeyTypeHandler::InternalWriteToArray(kKeyFieldNumber, key(),
deterministic, output);
output = ValueTypeHandler::InternalWriteToArray(kValueFieldNumber, value(),
deterministic, output);
::google::protobuf::uint8* output) const override {
output = KeyTypeHandler::WriteToArray(kKeyFieldNumber, key(), output);
output = ValueTypeHandler::WriteToArray(kValueFieldNumber, value(), output);
return output;
}

View File

@ -742,7 +742,7 @@ class PROTOBUF_EXPORT MapIterator {
public:
MapIterator(Message* message, const FieldDescriptor* field) {
const Reflection* reflection = message->GetReflection();
map_ = reflection->MapData(message, field);
map_ = reflection->MutableMapData(message, field);
key_.SetType(field->message_type()->FindFieldByName("key")->cpp_type());
value_.SetType(field->message_type()->FindFieldByName("value")->cpp_type());
map_->InitializeIterator(this);

View File

@ -2042,19 +2042,30 @@ TEST(GeneratedMapFieldTest, DynamicMessageMergeFromDynamicMessage) {
unittest::TestMap::descriptor());
reflection_tester.SetMapFieldsViaMapReflection(message1.get());
// message2 is created by same factory.
std::unique_ptr<Message> message2;
message2.reset(
factory.GetPrototype(unittest::TestMap::descriptor())->New());
reflection_tester.SetMapFieldsViaMapReflection(message2.get());
// message3 is created by different factory.
DynamicMessageFactory factory3;
std::unique_ptr<Message> message3;
message3.reset(
factory3.GetPrototype(unittest::TestMap::descriptor())->New());
reflection_tester.SetMapFieldsViaMapReflection(message3.get());
message2->MergeFrom(*message1);
message3->MergeFrom(*message1);
// Test MergeFrom does not sync to repeated fields and
// there is no duplicate keys in text format.
string output1, output2;
string output1, output2, output3;
TextFormat::PrintToString(*message1, &output1);
TextFormat::PrintToString(*message2, &output2);
TextFormat::PrintToString(*message3, &output3);
EXPECT_EQ(output1, output2);
EXPECT_EQ(output1, output3);
}
TEST(GeneratedMapFieldTest, DynamicMessageCopyFrom) {

View File

@ -167,9 +167,6 @@ class MapTypeHandler<WireFormatLite::TYPE_MESSAGE, Type> {
MapEntryAccessorType* value);
static inline void Write(int field, const MapEntryAccessorType& value,
io::CodedOutputStream* output);
static inline uint8* InternalWriteToArray(int field,
const MapEntryAccessorType& value,
bool deterministic, uint8* target);
static inline uint8* WriteToArray(int field,
const MapEntryAccessorType& value,
uint8* target);
@ -226,14 +223,9 @@ class MapTypeHandler<WireFormatLite::TYPE_MESSAGE, Type> {
MapEntryAccessorType* value); \
static inline void Write(int field, const MapEntryAccessorType& value, \
io::CodedOutputStream* output); \
static inline uint8* InternalWriteToArray( \
int field, const MapEntryAccessorType& value, bool deterministic, \
uint8* target); \
static inline uint8* WriteToArray(int field, \
const MapEntryAccessorType& value, \
uint8* target) { \
return InternalWriteToArray(field, value, false, target); \
} \
uint8* target); \
static inline const MapEntryAccessorType& GetExternalReference( \
const TypeOnMemory& value); \
static inline void DeleteNoArena(const TypeOnMemory& x); \
@ -374,12 +366,9 @@ inline void MapTypeHandler<WireFormatLite::TYPE_MESSAGE, Type>::Write(
}
template <typename Type>
inline uint8*
MapTypeHandler<WireFormatLite::TYPE_MESSAGE, Type>::InternalWriteToArray(
int field, const MapEntryAccessorType& value, bool deterministic,
uint8* target) {
return WireFormatLite::InternalWriteMessageToArray(field, value,
deterministic, target);
inline uint8* MapTypeHandler<WireFormatLite::TYPE_MESSAGE, Type>::WriteToArray(
int field, const MapEntryAccessorType& value, uint8* target) {
return WireFormatLite::InternalWriteMessageToArray(field, value, target);
}
#define WRITE_METHOD(FieldType, DeclaredType) \
@ -391,9 +380,8 @@ MapTypeHandler<WireFormatLite::TYPE_MESSAGE, Type>::InternalWriteToArray(
} \
template <typename Type> \
inline uint8* \
MapTypeHandler<WireFormatLite::TYPE_##FieldType, \
Type>::InternalWriteToArray( \
int field, const MapEntryAccessorType& value, bool, uint8* target) { \
MapTypeHandler<WireFormatLite::TYPE_##FieldType, Type>::WriteToArray( \
int field, const MapEntryAccessorType& value, uint8* target) { \
return WireFormatLite::Write##DeclaredType##ToArray(field, value, target); \
}

View File

@ -290,7 +290,6 @@ ParseClosure GetPackedField(const FieldDescriptor* field, Message* msg,
if (field->file()->syntax() == FileDescriptor::SYNTAX_PROTO3) {
return {internal::PackedEnumParser, object};
} else {
GOOGLE_CHECK_EQ(field->file()->options().cc_api_version(), 2);
ctx->extra_parse_data().SetEnumValidatorArg(
ReflectiveValidator, field->enum_type(),
reflection->MutableUnknownFields(msg), field->number());
@ -307,6 +306,7 @@ ParseClosure GetPackedField(const FieldDescriptor* field, Message* msg,
default:
GOOGLE_LOG(FATAL) << "Type is not packable " << field->type();
return {}; // Make compiler happy
}
}
@ -315,19 +315,13 @@ ParseClosure GetLenDelim(int field_number, const FieldDescriptor* field,
internal::ParseContext* ctx) {
if (WireFormat::WireTypeForFieldType(field->type()) !=
WireFormatLite::WIRETYPE_LENGTH_DELIMITED) {
ABSL_ASSERT(field->is_packable());
GOOGLE_DCHECK(field->is_packable());
return GetPackedField(field, msg, reflection, ctx);
}
enum { kNone = 0, kVerify, kStrict } utf8_level = kNone;
internal::ParseFunc string_parsers[] = {internal::StringParser,
internal::StringParserUTF8Verify,
internal::StringParserUTF8};
internal::ParseFunc cord_parsers[] = {internal::CordParser,
internal::CordParserUTF8Verify,
internal::CordParserUTF8};
internal::ParseFunc string_piece_parsers[] = {
internal::StringPieceParser, internal::StringPieceParserUTF8Verify,
internal::StringPieceParserUTF8};
switch (field->type()) {
case FieldDescriptor::TYPE_STRING:
if (field->file()->syntax() == FileDescriptor::SYNTAX_PROTO3
@ -339,7 +333,7 @@ ParseClosure GetLenDelim(int field_number, const FieldDescriptor* field,
ctx->extra_parse_data().SetFieldName(field->full_name().c_str());
utf8_level = kVerify;
}
FALLTHROUGH_INTENDED;
PROTOBUF_FALLTHROUGH_INTENDED;
case FieldDescriptor::TYPE_BYTES: {
if (field->is_repeated()) {
int index = reflection->FieldSize(*msg, field);
@ -350,17 +344,10 @@ ParseClosure GetLenDelim(int field_number, const FieldDescriptor* field,
auto object = reflection->MutableRepeatedPtrField<string>(msg, field)
->Mutable(index);
return {string_parsers[utf8_level], object};
} else if (field->options().ctype() == FieldOptions::CORD) {
auto object = reflection->MutableRepeatedField<Cord>(msg, field)
} else {
auto object = reflection->MutableRepeatedPtrField<string>(msg, field)
->Mutable(index);
return {cord_parsers[utf8_level], object};
} else if (field->options().ctype() == FieldOptions::STRING_PIECE) {
auto object =
reflection
->MutableRepeatedPtrField<internal::StringPieceField>(msg,
field)
->Mutable(index);
return {string_piece_parsers[utf8_level], object};
return {string_parsers[utf8_level], object};
}
} else {
// Clear value and make sure it's set.
@ -372,16 +359,10 @@ ParseClosure GetLenDelim(int field_number, const FieldDescriptor* field,
reflection->GetStringReference(*msg, field, nullptr));
return {string_parsers[utf8_level], object};
} else {
void* object =
internal::ReflectionAccessor::GetOffset(msg, field, reflection);
if (field->containing_oneof()) {
object = *static_cast<Cord**>(object);
}
if (field->options().ctype() == FieldOptions::CORD) {
return {cord_parsers[utf8_level], object};
} else if (field->options().ctype() == FieldOptions::STRING_PIECE) {
return {string_piece_parsers[utf8_level], object};
}
// HACK around inability to get mutable_string in reflection
string* object = &const_cast<string&>(
reflection->GetStringReference(*msg, field, nullptr));
return {string_parsers[utf8_level], object};
}
}
GOOGLE_LOG(FATAL) << "No other type than string supported";
@ -399,6 +380,7 @@ ParseClosure GetLenDelim(int field_number, const FieldDescriptor* field,
default:
GOOGLE_LOG(FATAL) << "Wrong type for length delim " << field->type();
}
return {}; // Make compiler happy.
}
ParseClosure GetGroup(int field_number, const FieldDescriptor* field,
@ -507,8 +489,8 @@ const char* Message::_InternalParse(const char* begin, const char* end,
reflection_(msg->GetReflection()),
ctx_(ctx),
is_item_(is_item) {
GOOGLE_CHECK(descriptor_) << typeid(*this).name();
GOOGLE_CHECK(reflection_) << descriptor_->name() << " " << typeid(*this).name();
GOOGLE_CHECK(descriptor_) << msg->GetTypeName();
GOOGLE_CHECK(reflection_) << msg->GetTypeName();
}
const FieldDescriptor* Field(int num, int wire_type) {

View File

@ -1049,11 +1049,16 @@ class PROTOBUF_EXPORT Reflection {
// Help method for MapIterator.
friend class MapIterator;
virtual internal::MapFieldBase* MapData(
virtual internal::MapFieldBase* MutableMapData(
Message* /* message */, const FieldDescriptor* /* field */) const {
return NULL;
}
virtual const internal::MapFieldBase* GetMapData(
const Message& /* message */, const FieldDescriptor* /* field */) const {
return NULL;
}
GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(Reflection);
};

View File

@ -45,6 +45,7 @@
#include <google/protobuf/io/zero_copy_stream_impl_lite.h>
#include <google/protobuf/arena.h>
#include <google/protobuf/generated_message_util.h>
#include <google/protobuf/generated_message_table_driven.h>
#include <google/protobuf/message_lite.h>
#include <google/protobuf/repeated_field.h>
#include <google/protobuf/stubs/strutil.h>
@ -287,8 +288,9 @@ bool MessageLite::MergePartialFromCodedStream(io::CodedInputStream* input) {
}
range = next();
}
if (!parser.Done()) return false;
input->SetConsumed();
return parser.Done();
return true;
}
#endif
@ -355,8 +357,27 @@ bool MessageLite::ParsePartialFromArray(const void* data, int size) {
// ===================================================================
uint8* MessageLite::SerializeWithCachedSizesToArray(uint8* target) const {
return InternalSerializeWithCachedSizesToArray(
io::CodedOutputStream::IsDefaultSerializationDeterministic(), target);
const internal::SerializationTable* table =
static_cast<const internal::SerializationTable*>(InternalGetTable());
auto deterministic =
io::CodedOutputStream::IsDefaultSerializationDeterministic();
if (table) {
return internal::TableSerializeToArray(*this, table, deterministic, target);
} else {
if (deterministic) {
// We only optimize this when using optimize_for = SPEED. In other cases
// we just use the CodedOutputStream path.
int size = GetCachedSize();
io::ArrayOutputStream out(target, size);
io::CodedOutputStream coded_out(&out);
coded_out.SetSerializationDeterministic(true);
SerializeWithCachedSizes(&coded_out);
GOOGLE_CHECK(!coded_out.HadError());
return target + size;
} else {
return InternalSerializeWithCachedSizesToArray(target);
}
}
}
bool MessageLite::SerializeToCodedStream(io::CodedOutputStream* output) const {
@ -373,29 +394,29 @@ bool MessageLite::SerializePartialToCodedStream(
return false;
}
uint8* buffer = output->GetDirectBufferForNBytesAndAdvance(size);
if (buffer != NULL) {
uint8* end = InternalSerializeWithCachedSizesToArray(
output->IsSerializationDeterministic(), buffer);
if (end - buffer != size) {
ByteSizeConsistencyError(size, ByteSizeLong(), end - buffer, *this);
if (!output->IsSerializationDeterministic()) {
uint8* buffer = output->GetDirectBufferForNBytesAndAdvance(size);
if (buffer != nullptr) {
uint8* end = InternalSerializeWithCachedSizesToArray(buffer);
if (end - buffer != size) {
ByteSizeConsistencyError(size, ByteSizeLong(), end - buffer, *this);
}
return true;
}
return true;
} else {
int original_byte_count = output->ByteCount();
SerializeWithCachedSizes(output);
if (output->HadError()) {
return false;
}
int final_byte_count = output->ByteCount();
if (final_byte_count - original_byte_count != size) {
ByteSizeConsistencyError(size, ByteSizeLong(),
final_byte_count - original_byte_count, *this);
}
return true;
}
int original_byte_count = output->ByteCount();
SerializeWithCachedSizes(output);
if (output->HadError()) {
return false;
}
int final_byte_count = output->ByteCount();
if (final_byte_count - original_byte_count != size) {
ByteSizeConsistencyError(size, ByteSizeLong(),
final_byte_count - original_byte_count, *this);
}
return true;
}
bool MessageLite::SerializeToZeroCopyStream(
@ -496,7 +517,7 @@ void MessageLite::SerializeWithCachedSizes(
// generated code for maximum speed. If the proto is optimized for size or
// is lite, then we need to specialize this to avoid infinite recursion.
uint8* MessageLite::InternalSerializeWithCachedSizesToArray(
bool deterministic, uint8* target) const {
uint8* target) const {
const internal::SerializationTable* table =
static_cast<const internal::SerializationTable*>(InternalGetTable());
if (table == NULL) {
@ -505,12 +526,11 @@ uint8* MessageLite::InternalSerializeWithCachedSizesToArray(
int size = GetCachedSize();
io::ArrayOutputStream out(target, size);
io::CodedOutputStream coded_out(&out);
coded_out.SetSerializationDeterministic(deterministic);
SerializeWithCachedSizes(&coded_out);
GOOGLE_CHECK(!coded_out.HadError());
return target + size;
} else {
return internal::TableSerializeToArray(*this, table, deterministic, target);
return internal::TableSerializeToArray(*this, table, false, target);
}
}

View File

@ -400,12 +400,9 @@ class PROTOBUF_EXPORT MessageLite {
// method.)
virtual int GetCachedSize() const = 0;
virtual uint8* InternalSerializeWithCachedSizesToArray(bool deterministic,
uint8* target) const;
#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
virtual internal::ParseFunc _ParseFunc() const {
GOOGLE_LOG(FATAL) << "Type " << typeid(*this).name()
GOOGLE_LOG(FATAL) << "Type " << GetTypeName()
<< " doesn't implement _InternalParse";
return nullptr;
}
@ -451,6 +448,11 @@ class PROTOBUF_EXPORT MessageLite {
// TODO(gerbens) make this a pure abstract function
virtual const void* InternalGetTable() const { return NULL; }
// Fast path when conditions match (ie. non-deterministic)
public:
virtual uint8* InternalSerializeWithCachedSizesToArray(uint8* target) const;
private:
friend class internal::WireFormatLite;
friend class Message;
friend class internal::WeakFieldMap;

View File

@ -30,18 +30,12 @@
#include <google/protobuf/parse_context.h>
#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
#include <google/protobuf/stubs/stringprintf.h>
#include <google/protobuf/io/coded_stream.h>
#include <google/protobuf/message_lite.h>
#include <google/protobuf/repeated_field.h>
#include <google/protobuf/string_piece_field_support.h>
#include <google/protobuf/wire_format_lite.h>
#include "third_party/absl/strings/str_format.h"
#include <google/protobuf/stubs/strutil.h>
#include "util/coding/varint.h"
#include "util/utf8/public/unilib.h"
#include <google/protobuf/port_def.inc>
@ -50,8 +44,8 @@ namespace protobuf {
namespace internal {
bool ParseContext::ParseEndsInSlopRegion(const char* begin, int overrun) const {
ABSL_ASSERT(overrun >= 0);
ABSL_ASSERT(overrun <= kSlopBytes);
GOOGLE_DCHECK(overrun >= 0);
GOOGLE_DCHECK(overrun <= kSlopBytes);
auto ptr = begin + overrun;
auto end = begin + kSlopBytes;
int n = end - ptr;
@ -60,7 +54,7 @@ bool ParseContext::ParseEndsInSlopRegion(const char* begin, int overrun) const {
// bytes (or more if on the stack there are further limits)
int d = depth_;
if (limit_ != -1) {
ABSL_ASSERT(d < start_depth_); // Top-level never has a limit.
GOOGLE_DCHECK(d < start_depth_); // Top-level never has a limit.
// rewind the stack until all limits disappear.
int limit = limit_;
if (limit >= n) return false;
@ -93,7 +87,7 @@ bool ParseContext::ParseEndsInSlopRegion(const char* begin, int overrun) const {
// any way so we make no attempt to leave the stream at a well specified pos.
while (ptr < end) {
uint32 tag;
ptr = Varint::Parse32(ptr, &tag);
ptr = io::Parse32(ptr, &tag);
if (ptr == nullptr || ptr > end) return false;
// ending on 0 tag is allowed and is the major reason for the necessity of
// this function.
@ -101,7 +95,7 @@ bool ParseContext::ParseEndsInSlopRegion(const char* begin, int overrun) const {
switch (tag & 7) {
case 0: { // Varint
uint64 val;
ptr = Varint::Parse64(ptr, &val);
ptr = io::Parse64(ptr, &val);
if (ptr == nullptr) return false;
break;
}
@ -111,7 +105,7 @@ bool ParseContext::ParseEndsInSlopRegion(const char* begin, int overrun) const {
}
case 2: { // len delim
uint32 size;
ptr = Varint::Parse32(ptr, &size);
ptr = io::Parse32(ptr, &size);
if (ptr == nullptr) return false;
ptr += size;
break;
@ -145,12 +139,12 @@ std::pair<bool, int> ParseContext::ParseRangeWithLimit(const char* begin,
const char* end) {
auto ptr = begin;
do {
ABSL_ASSERT(ptr < end);
GOOGLE_DCHECK(ptr < end);
const char* limited_end;
if (limit_ == -1) {
limited_end = end;
} else {
ABSL_ASSERT(limit_ > 0);
GOOGLE_DCHECK(limit_ > 0);
limited_end = ptr + std::min(static_cast<int32>(end - ptr), limit_);
limit_ -= limited_end - ptr;
}
@ -159,7 +153,7 @@ std::pair<bool, int> ParseContext::ParseRangeWithLimit(const char* begin,
// an end-group. If this is the case we continue parsing the range with
// the parent parser.
do {
ABSL_ASSERT(ptr < limited_end);
GOOGLE_DCHECK(ptr < limited_end);
ptr = parser_(ptr, limited_end, this);
if (PROTOBUF_PREDICT_FALSE(ptr == nullptr)) {
// Clear last_tag_minus_1_ so that the hard error encountered is not
@ -170,16 +164,16 @@ std::pair<bool, int> ParseContext::ParseRangeWithLimit(const char* begin,
if (!EndedOnTag()) {
// The parser ended still parsing the initial message. This can only
// happen because it crossed the end.
ABSL_ASSERT(ptr >= limited_end);
GOOGLE_DCHECK(ptr >= limited_end);
break;
}
// Child parser terminated on an end-group / 0 tag.
ABSL_ASSERT(depth_ <= start_depth_);
GOOGLE_DCHECK(depth_ <= start_depth_);
if (depth_ == start_depth_) {
// The parse was already at the top-level and there is no parent.
// This can happen due to encountering 0 or due to this parser being
// called for parsing a sub-group message in custom parsing code.
return {false, ptr - end};
return {false, static_cast<int>(ptr - end)};
}
auto state = Pop();
// Verify the ending tag is correct and continue parsing the range with
@ -192,8 +186,8 @@ std::pair<bool, int> ParseContext::ParseRangeWithLimit(const char* begin,
parser_ = state.parser; // Load parent parser
} while (ptr < limited_end);
int overrun = ptr - limited_end;
ABSL_ASSERT(overrun >= 0);
ABSL_ASSERT(overrun <= kSlopBytes); // wireformat guarantees this limit
GOOGLE_DCHECK(overrun >= 0);
GOOGLE_DCHECK(overrun <= kSlopBytes); // wireformat guarantees this limit
if (limit_ != -1) {
limit_ -= overrun; // Adjust limit relative to new position.
if (limit_ < 0) return {}; // We overrun the limit
@ -201,7 +195,7 @@ std::pair<bool, int> ParseContext::ParseRangeWithLimit(const char* begin,
// We are at an actual ending of a length delimited field.
// The top level has no limit (ie. limit_ == -1) so we can assert
// that the stack is non-empty.
ABSL_ASSERT(depth_ < start_depth_);
GOOGLE_DCHECK(depth_ < start_depth_);
// else continue parsing the parent message.
auto state = Pop();
parser_ = state.parser;
@ -212,7 +206,7 @@ std::pair<bool, int> ParseContext::ParseRangeWithLimit(const char* begin,
}
}
} while (ptr < end);
return {true, ptr - end};
return {true, static_cast<int>(ptr - end)};
}
const char* StringParser(const char* begin, const char* end, void* object,
@ -222,41 +216,6 @@ const char* StringParser(const char* begin, const char* end, void* object,
return end;
}
const char* CordParser(const char* begin, const char* end, void* object,
ParseContext* ctx) {
auto cord = static_cast<Cord*>(object);
cord->Append(StringPiece(begin, end - begin));
return end;
}
void StringPieceField::Append(const char *begin, size_t chunk_size, int limit) {
if (size_ == 0) {
auto tot = chunk_size + limit;
if (tot > scratch_size_) {
auto old_scratch_size = scratch_size_;
scratch_size_ = tot;
// TODO(gerbens) Security against big
if (arena_ != NULL) {
scratch_ = ::google::protobuf::Arena::CreateArray<char>(arena_, scratch_size_);
} else {
std::allocator<char>().deallocate(scratch_, old_scratch_size);
scratch_ = std::allocator<char>().allocate(scratch_size_);
}
}
data_ = scratch_;
}
std::memcpy(scratch_ + size_, begin, chunk_size);
size_ += chunk_size;
}
const char* StringPieceParser(const char* begin, const char* end, void* object,
ParseContext* ctx) {
auto s = static_cast<StringPieceField*>(object);
auto limit = ctx->CurrentLimit();
s->Append(begin, end - begin, limit);
return end;
}
// Defined in wire_format_lite.cc
void PrintUTF8ErrorLog(const char* field_name, const char* operation_str,
bool emit_stacktrace);
@ -269,14 +228,6 @@ bool VerifyUTF8(StringPiece str, ParseContext* ctx) {
return true;
}
bool VerifyUTF8Cord(const Cord& value, ParseContext* ctx) {
if (!UniLib::CordIsStructurallyValid(value)) {
PrintUTF8ErrorLog(ctx->extra_parse_data().FieldName(), "parsing", false);
return false;
}
return true;
}
const char* StringParserUTF8(const char* begin, const char* end, void* object,
ParseContext* ctx) {
StringParser(begin, end, object, ctx);
@ -287,26 +238,6 @@ const char* StringParserUTF8(const char* begin, const char* end, void* object,
return end;
}
const char* CordParserUTF8(const char* begin, const char* end, void* object,
ParseContext* ctx) {
CordParser(begin, end, object, ctx);
if (ctx->AtLimit()) {
auto str = static_cast<Cord*>(object);
GOOGLE_PROTOBUF_PARSER_ASSERT(VerifyUTF8Cord(*str, ctx));
}
return end;
}
const char* StringPieceParserUTF8(const char* begin, const char* end,
void* object, ParseContext* ctx) {
StringPieceParser(begin, end, object, ctx);
if (ctx->AtLimit()) {
auto s = static_cast<StringPieceField*>(object);
GOOGLE_PROTOBUF_PARSER_ASSERT(VerifyUTF8(s->Get(), ctx));
}
return end;
}
const char* StringParserUTF8Verify(const char* begin, const char* end,
void* object, ParseContext* ctx) {
StringParser(begin, end, object, ctx);
@ -319,35 +250,12 @@ const char* StringParserUTF8Verify(const char* begin, const char* end,
return end;
}
const char* CordParserUTF8Verify(const char* begin, const char* end,
void* object, ParseContext* ctx) {
CordParser(begin, end, object, ctx);
#ifndef NDEBUG
if (ctx->AtLimit()) {
auto str = static_cast<Cord*>(object);
VerifyUTF8Cord(*str, ctx);
}
#endif
return end;
}
const char* StringPieceParserUTF8Verify(const char* begin, const char* end,
void* object, ParseContext* ctx) {
return StringPieceParser(begin, end, object, ctx);
#ifndef NDEBUG
if (ctx->AtLimit()) {
auto s = static_cast<StringPieceField*>(object);
VerifyUTF8(s->Get(), ctx);
}
#endif
return end;
}
const char* GreedyStringParser(const char* begin, const char* end, void* object,
ParseContext* ctx) {
auto str = static_cast<string*>(object);
auto limit = ctx->CurrentLimit();
ABSL_ASSERT(limit != -1); // Always length delimited
GOOGLE_DCHECK(limit != -1); // Always length delimited
end += std::min<int>(limit, ParseContext::kSlopBytes);
str->append(begin, end - begin);
return end;
@ -356,7 +264,7 @@ const char* GreedyStringParser(const char* begin, const char* end, void* object,
const char* GreedyStringParserUTF8(const char* begin, const char* end, void* object,
ParseContext* ctx) {
auto limit = ctx->CurrentLimit();
ABSL_ASSERT(limit != -1); // Always length delimited
GOOGLE_DCHECK(limit != -1); // Always length delimited
bool at_end;
if (limit <= ParseContext::kSlopBytes) {
end += limit;
@ -376,7 +284,7 @@ const char* GreedyStringParserUTF8(const char* begin, const char* end, void* obj
const char* GreedyStringParserUTF8Verify(const char* begin, const char* end, void* object,
ParseContext* ctx) {
auto limit = ctx->CurrentLimit();
ABSL_ASSERT(limit != -1); // Always length delimited
GOOGLE_DCHECK(limit != -1); // Always length delimited
bool at_end;
if (limit <= ParseContext::kSlopBytes) {
end += limit;
@ -402,7 +310,7 @@ const char* VarintParser(const char* begin, const char* end, void* object,
auto ptr = begin;
while (ptr < end) {
uint64 varint;
ptr = Varint::Parse64(ptr, &varint);
ptr = io::Parse64(ptr, &varint);
if (!ptr) return nullptr;
T val;
if (sign) {
@ -467,7 +375,7 @@ const char* PackedValidEnumParserLite(const char* begin, const char* end,
auto ptr = begin;
while (ptr < end) {
uint64 varint;
ptr = Varint::Parse64(ptr, &varint);
ptr = io::Parse64(ptr, &varint);
if (!ptr) return nullptr;
int val = varint;
if (ctx->extra_parse_data().ValidateEnum<string>(val))
@ -482,7 +390,7 @@ const char* PackedValidEnumParserLiteArg(const char* begin, const char* end,
auto ptr = begin;
while (ptr < end) {
uint64 varint;
ptr = Varint::Parse64(ptr, &varint);
ptr = io::Parse64(ptr, &varint);
if (!ptr) return nullptr;
int val = varint;
if (ctx->extra_parse_data().ValidateEnumArg<string>(val))
@ -623,5 +531,3 @@ const char* SlowMapEntryParser(const char* begin, const char* end, void* object,
} // namespace internal
} // namespace protobuf
} // namespace google
#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER

View File

@ -31,17 +31,13 @@
#ifndef GOOGLE_PROTOBUF_PARSE_CONTEXT_H__
#define GOOGLE_PROTOBUF_PARSE_CONTEXT_H__
#include <cstring>
#include <string>
#include <google/protobuf/port.h>
#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
#include <google/protobuf/io/coded_stream.h>
#include <google/protobuf/port.h>
#include <google/protobuf/wire_format_lite.h>
#include <google/protobuf/stubs/common.h>
#include "third_party/absl/base/optimization.h"
#include <google/protobuf/stubs/strutil.h>
#include "util/coding/varint.h"
#include <google/protobuf/port_def.inc>
@ -55,7 +51,8 @@ class MessageFactory;
namespace internal {
// Template code below needs to know about the existence of these functions.
void WriteVarint(uint32 num, uint64 val, std::string* s);
PROTOBUF_EXPORT void WriteVarint(uint32 num, uint64 val, std::string* s);
PROTOBUF_EXPORT
void WriteLengthDelimited(uint32 num, StringPiece val, std::string* s);
// Inline because it is just forwarding to s->WriteVarint
inline void WriteVarint(uint32 num, uint64 val, UnknownFieldSet* s);
@ -141,7 +138,7 @@ struct ParseClosure {
// All tag/value pairs between in [begin, retval) are parsed and retval
// points to start of a tag.
const char* operator()(const char* ptr, const char* end, ParseContext* ctx) {
ABSL_ASSERT(ptr < end);
GOOGLE_DCHECK(ptr < end);
return func(ptr, end, object, ctx);
}
};
@ -157,7 +154,7 @@ struct ParseClosure {
// all the parser code that deals with seams is located in what would otherwise
// be error paths of a parser that wouldn't need to deal with seams.
class ParseContext {
class PROTOBUF_EXPORT ParseContext {
public:
enum {
// Tag is atmost 5 bytes, varint is atmost 10 resulting in 15 bytes. We
@ -243,7 +240,7 @@ class ParseContext {
inlined_depth_(std::max(0, rec_limit - kInlinedDepth)) {}
~ParseContext() {
if (inlined_depth_ == -1) delete stack_;
if (inlined_depth_ == -1) delete[] stack_;
}
void StartParse(ParseClosure parser) { parser_ = parser; }
@ -261,9 +258,10 @@ class ParseContext {
// EndedOnTag() to find if the parse failed due to an error or ended on
// terminating tag.
bool ParseRange(StringPiece chunk, int* overrun_ptr) {
ABSL_ASSERT(!chunk.empty());
GOOGLE_DCHECK(!chunk.empty());
int& overrun = *overrun_ptr;
if (overrun >= chunk.size()) {
GOOGLE_DCHECK(overrun >= 0);
if (overrun >= static_cast<int>(chunk.size())) {
// This case can easily happen in patch buffers and we like to inline
// this case.
overrun -= chunk.size();
@ -317,7 +315,7 @@ class ParseContext {
if (!EndedOnTag()) {
// The group hasn't been terminated by an end-group and thus continues,
// hence it must have ended because it crossed "end".
ABSL_ASSERT(ptr >= end);
GOOGLE_DCHECK(ptr >= end);
return {ptr, true};
}
// Verify that the terminating tag matches the start group tag. As an extra
@ -332,7 +330,7 @@ class ParseContext {
}
void EndGroup(uint32 tag) {
ABSL_ASSERT(tag == 0 || (tag & 7) == 4);
GOOGLE_DCHECK(tag == 0 || (tag & 7) == 4);
// Because of the above assert last_tag_minus_1 is never set to 0, and the
// caller can verify the child parser was terminated, by comparing to 0.
last_tag_minus_1_ = tag - 1;
@ -355,7 +353,7 @@ class ParseContext {
// overflow.
int64 safe_new_limit = size - static_cast<int64>(end - ptr);
if (safe_new_limit > INT_MAX) return nullptr;
ABSL_ASSERT(safe_new_limit > 0); // only call this if it's crossing end
GOOGLE_DCHECK(safe_new_limit > 0); // only call this if it's crossing end
int32 new_limit = static_cast<int32>(safe_new_limit);
int32 delta;
if (limit_ != -1) {
@ -373,6 +371,10 @@ class ParseContext {
}
// Helper function for a child group that has crossed the boundary.
#if defined(__GNUC__) && !defined(__clang__)
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wmaybe-uninitialized"
#endif // defined(__GNUC__) && !defined(__clang__)
bool StoreGroup(ParseClosure current_parser, ParseClosure child_parser,
int depth, uint32 tag) {
// The group must still read an end-group tag, so it can't be at a limit.
@ -386,10 +388,13 @@ class ParseContext {
// parse context in this case. We need to make the child parser active.
parser_ = child_parser;
}
if (ABSL_PREDICT_FALSE(depth < inlined_depth_)) SwitchStack();
if (PROTOBUF_PREDICT_FALSE(depth < inlined_depth_)) SwitchStack();
stack_[depth] = {current_parser, static_cast<int32>(~(tag >> 3))};
return true;
}
#if defined(__GNUC__) && !defined(__clang__)
#pragma GCC diagnostic pop
#endif // defined(__GNUC__) && !defined(__clang__)
private:
// This the "active" or current parser.
@ -428,9 +433,9 @@ class ParseContext {
int inlined_depth_;
bool Push(ParseClosure parser, int32 delta) {
ABSL_ASSERT(delta >= -1); // Make sure it's a valid len-delim
GOOGLE_DCHECK(delta >= -1); // Make sure it's a valid len-delim
if (PROTOBUF_PREDICT_FALSE(--depth_ < 0)) return false;
if (ABSL_PREDICT_FALSE(depth_ < inlined_depth_)) SwitchStack();
if (PROTOBUF_PREDICT_FALSE(depth_ < inlined_depth_)) SwitchStack();
stack_[depth_] = {parser, delta};
return true;
}
@ -537,7 +542,7 @@ class EpsCopyParser {
// EndedOnTag() on the underlying ParseContext to find out if the parse ended
// correctly on a terminating tag.
bool Parse(StringPiece range) {
ABSL_ASSERT(!range.empty());
GOOGLE_DCHECK(!range.empty());
auto size = range.size();
if (size > kSlopBytes) {
// The buffer is large enough to be able to parse the (size - kSlopBytes)
@ -563,7 +568,7 @@ class EpsCopyParser {
// We care about leaving the stream at the right place and the stream will
// indeed terminate, so just parse it.
auto res = ParseRange({buffer_, kSlopBytes}, size);
ABSL_ASSERT(!res);
GOOGLE_DCHECK(!res);
return false;
}
return true;
@ -582,7 +587,7 @@ class EpsCopyParser {
// The reason of ensure_non_negative_skip and ParseEndsInSlopRegion is that
// the following assert holds. Which implies the stream doesn't need to
// backup.
ABSL_ASSERT(!ensure_non_negative_skip || overrun_ >= 0);
GOOGLE_DCHECK(!ensure_non_negative_skip || overrun_ >= 0);
return overrun_;
}
@ -623,7 +628,7 @@ std::pair<const char*, bool> FieldParser(uint64 tag, ParseClosure parent,
const char* end, ParseContext* ctx) {
auto ptr = begin;
uint32 number = tag >> 3;
if (ABSL_PREDICT_FALSE(number == 0)) {
if (PROTOBUF_PREDICT_FALSE(number == 0)) {
GOOGLE_PROTOBUF_ASSERT_RETURN(tag == 0, {});
// Special case scenario of 0 termination.
ctx->EndGroup(tag);
@ -633,7 +638,7 @@ std::pair<const char*, bool> FieldParser(uint64 tag, ParseClosure parent,
switch (tag & 7) {
case WireType::WIRETYPE_VARINT: {
uint64 value;
ptr = Varint::Parse64(ptr, &value);
ptr = io::Parse64(ptr, &value);
GOOGLE_PROTOBUF_ASSERT_RETURN(ptr != nullptr, {});
field_parser.AddVarint(number, value);
break;
@ -646,7 +651,7 @@ std::pair<const char*, bool> FieldParser(uint64 tag, ParseClosure parent,
}
case WireType::WIRETYPE_LENGTH_DELIMITED: {
uint32 size;
ptr = Varint::Parse32(ptr, &size);
ptr = io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_ASSERT_RETURN(ptr != nullptr, {});
ParseClosure child = field_parser.AddLengthDelimited(number, size);
if (size > end - ptr) {
@ -685,7 +690,7 @@ std::pair<const char*, bool> FieldParser(uint64 tag, ParseClosure parent,
default:
GOOGLE_PROTOBUF_ASSERT_RETURN(false, {});
}
ABSL_ASSERT(ptr != nullptr);
GOOGLE_DCHECK(ptr != nullptr);
return {ptr, false};
}
@ -696,7 +701,7 @@ const char* WireFormatParser(ParseClosure parent, T field_parser,
auto ptr = begin;
while (ptr < end) {
uint32 tag;
ptr = Varint::Parse32(ptr, &tag);
ptr = io::Parse32(ptr, &tag);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr != nullptr);
auto res = FieldParser(tag, parent, field_parser, ptr, end, ctx);
ptr = res.first;
@ -714,45 +719,61 @@ const char* WireFormatParser(ParseClosure parent, T field_parser,
// caller needs to set prior to the call.
// The null parser does not do anything, but is useful as a substitute.
PROTOBUF_EXPORT
const char* NullParser(const char* begin, const char* end, void* object,
ParseContext*);
// Helper for verification of utf8
PROTOBUF_EXPORT
bool VerifyUTF8(StringPiece s, ParseContext* ctx);
// All the string parsers with or without UTF checking and for all CTypes.
PROTOBUF_EXPORT
const char* StringParser(const char* begin, const char* end, void* object,
ParseContext*);
PROTOBUF_EXPORT
const char* CordParser(const char* begin, const char* end, void* object,
ParseContext*);
PROTOBUF_EXPORT
const char* StringPieceParser(const char* begin, const char* end, void* object,
ParseContext*);
PROTOBUF_EXPORT
const char* StringParserUTF8(const char* begin, const char* end, void* object,
ParseContext*);
PROTOBUF_EXPORT
const char* CordParserUTF8(const char* begin, const char* end, void* object,
ParseContext*);
PROTOBUF_EXPORT
const char* StringPieceParserUTF8(const char* begin, const char* end,
void* object, ParseContext*);
PROTOBUF_EXPORT
const char* StringParserUTF8Verify(const char* begin, const char* end,
void* object, ParseContext*);
PROTOBUF_EXPORT
const char* CordParserUTF8Verify(const char* begin, const char* end,
void* object, ParseContext*);
PROTOBUF_EXPORT
const char* StringPieceParserUTF8Verify(const char* begin, const char* end,
void* object, ParseContext*);
// Parsers that also eat the slopbytes if possible. Can only be called in a
// ParseContext where limit_ is set properly.
PROTOBUF_EXPORT
const char* GreedyStringParser(const char* begin, const char* end, void* object,
ParseContext*);
PROTOBUF_EXPORT
const char* GreedyStringParserUTF8(const char* begin, const char* end, void* object,
ParseContext*);
PROTOBUF_EXPORT
const char* GreedyStringParserUTF8Verify(const char* begin, const char* end,
void* object, ParseContext*);
// This is the only recursive parser.
PROTOBUF_EXPORT
const char* UnknownGroupLiteParse(const char* begin, const char* end,
void* object, ParseContext* ctx);
// This is a helper to for the UnknownGroupLiteParse but is actually also
// useful in the generated code. It uses overload on string* vs
// UnknownFieldSet* to make the generated code isomorphic between full and lite.
PROTOBUF_EXPORT
std::pair<const char*, bool> UnknownFieldParse(uint32 tag, ParseClosure parent,
const char* begin,
const char* end, std::string* unknown,
@ -762,44 +783,60 @@ std::pair<const char*, bool> UnknownFieldParse(uint32 tag, ParseClosure parent,
// corresponding field
// These are packed varints
PROTOBUF_EXPORT
const char* PackedInt32Parser(const char* begin, const char* end, void* object,
ParseContext* ctx);
PROTOBUF_EXPORT
const char* PackedUInt32Parser(const char* begin, const char* end, void* object,
ParseContext* ctx);
PROTOBUF_EXPORT
const char* PackedInt64Parser(const char* begin, const char* end, void* object,
ParseContext* ctx);
PROTOBUF_EXPORT
const char* PackedUInt64Parser(const char* begin, const char* end, void* object,
ParseContext* ctx);
PROTOBUF_EXPORT
const char* PackedSInt32Parser(const char* begin, const char* end, void* object,
ParseContext* ctx);
PROTOBUF_EXPORT
const char* PackedSInt64Parser(const char* begin, const char* end, void* object,
ParseContext* ctx);
PROTOBUF_EXPORT
const char* PackedBoolParser(const char* begin, const char* end, void* object,
ParseContext* ctx);
// Enums in proto3 do not require verification
PROTOBUF_EXPORT
const char* PackedEnumParser(const char* begin, const char* end, void* object,
ParseContext* ctx);
// Enums in proto2 require verification. So an additional verification function
// needs to be passed into ExtraParseData.
// If it's a generated verification function we only need the function pointer.
PROTOBUF_EXPORT
const char* PackedValidEnumParserLite(const char* begin, const char* end,
void* object, ParseContext* ctx);
// If it's reflective we need a function that takes an additional argument.
PROTOBUF_EXPORT
const char* PackedValidEnumParserLiteArg(const char* begin, const char* end,
void* object, ParseContext* ctx);
// These are the packed fixed field parsers.
PROTOBUF_EXPORT
const char* PackedFixed32Parser(const char* begin, const char* end,
void* object, ParseContext* ctx);
PROTOBUF_EXPORT
const char* PackedSFixed32Parser(const char* begin, const char* end,
void* object, ParseContext* ctx);
PROTOBUF_EXPORT
const char* PackedFixed64Parser(const char* begin, const char* end,
void* object, ParseContext* ctx);
PROTOBUF_EXPORT
const char* PackedSFixed64Parser(const char* begin, const char* end,
void* object, ParseContext* ctx);
PROTOBUF_EXPORT
const char* PackedFloatParser(const char* begin, const char* end, void* object,
ParseContext* ctx);
PROTOBUF_EXPORT
const char* PackedDoubleParser(const char* begin, const char* end, void* object,
ParseContext* ctx);
@ -808,6 +845,7 @@ const char* PackedDoubleParser(const char* begin, const char* end, void* object,
// to a MapField in which we parse the payload upon done (we detect this when
// this function is called with limit_ == 0), by calling parse_map (also stored
// in ctx) on the resulting string.
PROTOBUF_EXPORT
const char* SlowMapEntryParser(const char* begin, const char* end, void* object,
internal::ParseContext* ctx);
@ -817,5 +855,4 @@ const char* SlowMapEntryParser(const char* begin, const char* end, void* object,
#include <google/protobuf/port_undef.inc>
#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
#endif // GOOGLE_PROTOBUF_PARSE_CONTEXT_H__

View File

@ -48,10 +48,10 @@
// detect/prohibit anytime it is #included twice without a corresponding
// #undef.
// These macros are private and should always be #undef'd from headers.
// If any of these errors fire, you should either properly #include
// port_undef.h at the end of your header that #includes port.h, or
// don't #include port.h twice in a .cc file.
// These macros are private and should always be
// ::util::RetrieveErrorSpace(*this) headers. If any of these errors fire, you
// should either properly #include port_undef.h at the end of your header that
// #includes port.h, or don't #include port.h twice in a .cc file.
#ifdef PROTOBUF_NAMESPACE
#error PROTOBUF_NAMESPACE was previously defined
#endif

View File

@ -77,6 +77,10 @@ void ReflectionOps::Merge(const Message& from, Message* to) {
const Reflection* from_reflection = GetReflectionOrDie(from);
const Reflection* to_reflection = GetReflectionOrDie(*to);
bool is_from_generated = (from_reflection->GetMessageFactory() ==
google::protobuf::MessageFactory::generated_factory());
bool is_to_generated = (to_reflection->GetMessageFactory() ==
google::protobuf::MessageFactory::generated_factory());
std::vector<const FieldDescriptor*> fields;
from_reflection->ListFields(from, &fields);
@ -84,15 +88,17 @@ void ReflectionOps::Merge(const Message& from, Message* to) {
const FieldDescriptor* field = fields[i];
if (field->is_repeated()) {
if (field->is_map()) {
MapFieldBase* from_field =
from_reflection->MapData(const_cast<Message*>(&from), field);
// Use map reflection if both are in map status and have the
// same map type to avoid sync with repeated field.
// Note: As from and to messages have the same descriptor, the
// map field types are the same if they are both generated
// messages or both dynamic messages.
if (is_from_generated == is_to_generated && field->is_map()) {
const MapFieldBase* from_field =
from_reflection->GetMapData(from, field);
MapFieldBase* to_field =
to_reflection->MapData(const_cast<Message*>(to), field);
// Use map reflection if both are in map status and have the
// same map type to avoid sync with repeated field.
if (to_field->IsMapValid() && from_field->IsMapValid()
&& typeid(*from_field) == typeid(*to_field)) {
to_reflection->MutableMapData(to, field);
if (to_field->IsMapValid() && from_field->IsMapValid()) {
to_field->MergeFrom(*from_field);
continue;
}
@ -189,8 +195,8 @@ bool ReflectionOps::IsInitialized(const Message& message) {
if (field->is_map()) {
const FieldDescriptor* value_field = field->message_type()->field(1);
if (value_field->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE) {
MapFieldBase* map_field =
reflection->MapData(const_cast<Message*>(&message), field);
const MapFieldBase* map_field =
reflection->GetMapData(message, field);
if (map_field->IsMapValid()) {
MapIterator iter(const_cast<Message*>(&message), field);
MapIterator end(const_cast<Message*>(&message), field);
@ -238,6 +244,25 @@ void ReflectionOps::DiscardUnknownFields(Message* message) {
const FieldDescriptor* field = fields[i];
if (field->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE) {
if (field->is_repeated()) {
if (field->is_map()) {
const FieldDescriptor* value_field = field->message_type()->field(1);
if (value_field->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE) {
const MapFieldBase* map_field =
reflection->MutableMapData(message, field);
if (map_field->IsMapValid()) {
MapIterator iter(message, field);
MapIterator end(message, field);
for (map_field->MapBegin(&iter), map_field->MapEnd(&end);
iter != end; ++iter) {
iter.MutableValueRef()->MutableMessageValue()
->DiscardUnknownFields();
}
continue;
}
} else {
continue;
}
}
int size = reflection->FieldSize(*message, field);
for (int j = 0; j < size; j++) {
reflection->MutableRepeatedMessage(message, field, j)

View File

@ -167,13 +167,13 @@ const char* SourceContext::_InternalParse(const char* begin, const char* end, vo
::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end;
auto ptr = begin;
while (ptr < end) {
ptr = Varint::Parse32Inline(ptr, &tag);
ptr = ::google::protobuf::io::Parse32(ptr, &tag);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
switch (tag >> 3) {
// string file_name = 1;
case 1: {
if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual;
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
ctx->extra_parse_data().SetFieldName("google.protobuf.SourceContext.file_name");
auto str = msg->mutable_file_name();
@ -190,7 +190,7 @@ const char* SourceContext::_InternalParse(const char* begin, const char* end, vo
break;
}
default: {
handle_unusual: (void)&&handle_unusual;
handle_unusual:
if ((tag & 7) == 4 || tag == 0) {
ctx->EndGroup(tag);
return ptr;
@ -204,13 +204,9 @@ const char* SourceContext::_InternalParse(const char* begin, const char* end, vo
} // switch
} // while
return ptr;
len_delim_till_end: (void)&&len_delim_till_end;
len_delim_till_end:
return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg},
{parser_till_end, object}, size);
group_continues: (void)&&group_continues;
GOOGLE_DCHECK(ptr >= end);
GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->StoreGroup({_InternalParse, msg}, {parser_till_end, object}, depth, tag));
return ptr;
{parser_till_end, object}, size);
}
#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
bool SourceContext::MergePartialFromCodedStream(
@ -283,8 +279,7 @@ void SourceContext::SerializeWithCachedSizes(
}
::google::protobuf::uint8* SourceContext::InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const {
(void)deterministic; // Unused
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:google.protobuf.SourceContext)
::google::protobuf::uint32 cached_has_bits = 0;
(void) cached_has_bits;

View File

@ -132,7 +132,7 @@ class PROTOBUF_EXPORT SourceContext : public ::google::protobuf::Message /* @@pr
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:

View File

@ -306,14 +306,14 @@ const char* Struct::_InternalParse(const char* begin, const char* end, void* obj
::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end;
auto ptr = begin;
while (ptr < end) {
ptr = Varint::Parse32Inline(ptr, &tag);
ptr = ::google::protobuf::io::Parse32(ptr, &tag);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
switch (tag >> 3) {
// map<string, .google.protobuf.Value> fields = 1;
case 1: {
if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual;
do {
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
parser_till_end = ::google::protobuf::internal::SlowMapEntryParser;
auto parse_map = ::google::protobuf::Struct_FieldsEntry_DoNotUse::_ParseMap;
@ -329,7 +329,7 @@ const char* Struct::_InternalParse(const char* begin, const char* end, void* obj
break;
}
default: {
handle_unusual: (void)&&handle_unusual;
handle_unusual:
if ((tag & 7) == 4 || tag == 0) {
ctx->EndGroup(tag);
return ptr;
@ -343,13 +343,9 @@ const char* Struct::_InternalParse(const char* begin, const char* end, void* obj
} // switch
} // while
return ptr;
len_delim_till_end: (void)&&len_delim_till_end;
len_delim_till_end:
return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg},
{parser_till_end, object}, size);
group_continues: (void)&&group_continues;
GOOGLE_DCHECK(ptr >= end);
GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->StoreGroup({_InternalParse, msg}, {parser_till_end, object}, depth, tag));
return ptr;
{parser_till_end, object}, size);
}
#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
bool Struct::MergePartialFromCodedStream(
@ -470,8 +466,7 @@ void Struct::SerializeWithCachedSizes(
}
::google::protobuf::uint8* Struct::InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const {
(void)deterministic; // Unused
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:google.protobuf.Struct)
::google::protobuf::uint32 cached_has_bits = 0;
(void) cached_has_bits;
@ -491,7 +486,7 @@ void Struct::SerializeWithCachedSizes(
}
};
if (deterministic &&
if (false &&
this->fields().size() > 1) {
::std::unique_ptr<SortItem[]> items(
new SortItem[this->fields().size()]);
@ -506,7 +501,7 @@ void Struct::SerializeWithCachedSizes(
::std::unique_ptr<Struct_FieldsEntry_DoNotUse> entry;
for (size_type i = 0; i < n; i++) {
entry.reset(fields_.NewEntryWrapper(items[static_cast<ptrdiff_t>(i)]->first, items[static_cast<ptrdiff_t>(i)]->second));
target = ::google::protobuf::internal::WireFormatLite::InternalWriteMessageNoVirtualToArray(1, *entry, deterministic, target);
target = ::google::protobuf::internal::WireFormatLite::InternalWriteMessageNoVirtualToArray(1, *entry, target);
if (entry->GetArena() != nullptr) {
entry.release();
}
@ -518,7 +513,7 @@ void Struct::SerializeWithCachedSizes(
it = this->fields().begin();
it != this->fields().end(); ++it) {
entry.reset(fields_.NewEntryWrapper(it->first, it->second));
target = ::google::protobuf::internal::WireFormatLite::InternalWriteMessageNoVirtualToArray(1, *entry, deterministic, target);
target = ::google::protobuf::internal::WireFormatLite::InternalWriteMessageNoVirtualToArray(1, *entry, target);
if (entry->GetArena() != nullptr) {
entry.release();
}
@ -855,14 +850,14 @@ const char* Value::_InternalParse(const char* begin, const char* end, void* obje
::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end;
auto ptr = begin;
while (ptr < end) {
ptr = Varint::Parse32Inline(ptr, &tag);
ptr = ::google::protobuf::io::Parse32(ptr, &tag);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
switch (tag >> 3) {
// .google.protobuf.NullValue null_value = 1;
case 1: {
if (static_cast<::google::protobuf::uint8>(tag) != 8) goto handle_unusual;
::google::protobuf::uint64 val;
ptr = Varint::Parse64(ptr, &val);
ptr = ::google::protobuf::io::Parse64(ptr, &val);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
::google::protobuf::NullValue value = static_cast<::google::protobuf::NullValue>(val);
msg->set_null_value(value);
@ -880,7 +875,7 @@ const char* Value::_InternalParse(const char* begin, const char* end, void* obje
// string string_value = 3;
case 3: {
if (static_cast<::google::protobuf::uint8>(tag) != 26) goto handle_unusual;
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
ctx->extra_parse_data().SetFieldName("google.protobuf.Value.string_value");
auto str = msg->mutable_string_value();
@ -900,7 +895,7 @@ const char* Value::_InternalParse(const char* begin, const char* end, void* obje
case 4: {
if (static_cast<::google::protobuf::uint8>(tag) != 32) goto handle_unusual;
::google::protobuf::uint64 val;
ptr = Varint::Parse64(ptr, &val);
ptr = ::google::protobuf::io::Parse64(ptr, &val);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
bool value = val;
msg->set_bool_value(value);
@ -909,7 +904,7 @@ const char* Value::_InternalParse(const char* begin, const char* end, void* obje
// .google.protobuf.Struct struct_value = 5;
case 5: {
if (static_cast<::google::protobuf::uint8>(tag) != 42) goto handle_unusual;
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
parser_till_end = ::google::protobuf::Struct::_InternalParse;
object = msg->mutable_struct_value();
@ -924,7 +919,7 @@ const char* Value::_InternalParse(const char* begin, const char* end, void* obje
// .google.protobuf.ListValue list_value = 6;
case 6: {
if (static_cast<::google::protobuf::uint8>(tag) != 50) goto handle_unusual;
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
parser_till_end = ::google::protobuf::ListValue::_InternalParse;
object = msg->mutable_list_value();
@ -937,7 +932,7 @@ const char* Value::_InternalParse(const char* begin, const char* end, void* obje
break;
}
default: {
handle_unusual: (void)&&handle_unusual;
handle_unusual:
if ((tag & 7) == 4 || tag == 0) {
ctx->EndGroup(tag);
return ptr;
@ -951,13 +946,9 @@ const char* Value::_InternalParse(const char* begin, const char* end, void* obje
} // switch
} // while
return ptr;
len_delim_till_end: (void)&&len_delim_till_end;
len_delim_till_end:
return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg},
{parser_till_end, object}, size);
group_continues: (void)&&group_continues;
GOOGLE_DCHECK(ptr >= end);
GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->StoreGroup({_InternalParse, msg}, {parser_till_end, object}, depth, tag));
return ptr;
{parser_till_end, object}, size);
}
#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
bool Value::MergePartialFromCodedStream(
@ -1122,8 +1113,7 @@ void Value::SerializeWithCachedSizes(
}
::google::protobuf::uint8* Value::InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const {
(void)deterministic; // Unused
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:google.protobuf.Value)
::google::protobuf::uint32 cached_has_bits = 0;
(void) cached_has_bits;
@ -1159,14 +1149,14 @@ void Value::SerializeWithCachedSizes(
if (has_struct_value()) {
target = ::google::protobuf::internal::WireFormatLite::
InternalWriteMessageToArray(
5, HasBitSetters::struct_value(this), deterministic, target);
5, HasBitSetters::struct_value(this), target);
}
// .google.protobuf.ListValue list_value = 6;
if (has_list_value()) {
target = ::google::protobuf::internal::WireFormatLite::
InternalWriteMessageToArray(
6, HasBitSetters::list_value(this), deterministic, target);
6, HasBitSetters::list_value(this), target);
}
if (_internal_metadata_.have_unknown_fields()) {
@ -1422,14 +1412,14 @@ const char* ListValue::_InternalParse(const char* begin, const char* end, void*
::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end;
auto ptr = begin;
while (ptr < end) {
ptr = Varint::Parse32Inline(ptr, &tag);
ptr = ::google::protobuf::io::Parse32(ptr, &tag);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
switch (tag >> 3) {
// repeated .google.protobuf.Value values = 1;
case 1: {
if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual;
do {
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
parser_till_end = ::google::protobuf::Value::_InternalParse;
object = msg->add_values();
@ -1444,7 +1434,7 @@ const char* ListValue::_InternalParse(const char* begin, const char* end, void*
break;
}
default: {
handle_unusual: (void)&&handle_unusual;
handle_unusual:
if ((tag & 7) == 4 || tag == 0) {
ctx->EndGroup(tag);
return ptr;
@ -1458,13 +1448,9 @@ const char* ListValue::_InternalParse(const char* begin, const char* end, void*
} // switch
} // while
return ptr;
len_delim_till_end: (void)&&len_delim_till_end;
len_delim_till_end:
return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg},
{parser_till_end, object}, size);
group_continues: (void)&&group_continues;
GOOGLE_DCHECK(ptr >= end);
GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->StoreGroup({_InternalParse, msg}, {parser_till_end, object}, depth, tag));
return ptr;
{parser_till_end, object}, size);
}
#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
bool ListValue::MergePartialFromCodedStream(
@ -1532,8 +1518,7 @@ void ListValue::SerializeWithCachedSizes(
}
::google::protobuf::uint8* ListValue::InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const {
(void)deterministic; // Unused
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:google.protobuf.ListValue)
::google::protobuf::uint32 cached_has_bits = 0;
(void) cached_has_bits;
@ -1543,7 +1528,7 @@ void ListValue::SerializeWithCachedSizes(
n = static_cast<unsigned int>(this->values_size()); i < n; i++) {
target = ::google::protobuf::internal::WireFormatLite::
InternalWriteMessageToArray(
1, this->values(static_cast<int>(i)), deterministic, target);
1, this->values(static_cast<int>(i)), target);
}
if (_internal_metadata_.have_unknown_fields()) {

View File

@ -199,7 +199,7 @@ class PROTOBUF_EXPORT Struct : public ::google::protobuf::Message /* @@protoc_in
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
@ -344,7 +344,7 @@ class PROTOBUF_EXPORT Value : public ::google::protobuf::Message /* @@protoc_ins
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
@ -562,7 +562,7 @@ class PROTOBUF_EXPORT ListValue : public ::google::protobuf::Message /* @@protoc
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:

View File

@ -2049,7 +2049,7 @@ bool MapFieldPrinterHelper::SortMap(
std::vector<const Message*>* sorted_map_field) {
bool need_release = false;
const MapFieldBase& base =
*reflection->MapData(const_cast<Message*>(&message), field);
*reflection->GetMapData(message, field);
if (base.IsRepeatedFieldValid()) {
const RepeatedPtrField<Message>& map_field =

View File

@ -182,14 +182,14 @@ const char* Timestamp::_InternalParse(const char* begin, const char* end, void*
::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end;
auto ptr = begin;
while (ptr < end) {
ptr = Varint::Parse32Inline(ptr, &tag);
ptr = ::google::protobuf::io::Parse32(ptr, &tag);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
switch (tag >> 3) {
// int64 seconds = 1;
case 1: {
if (static_cast<::google::protobuf::uint8>(tag) != 8) goto handle_unusual;
::google::protobuf::uint64 val;
ptr = Varint::Parse64(ptr, &val);
ptr = ::google::protobuf::io::Parse64(ptr, &val);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
::google::protobuf::int64 value = val;
msg->set_seconds(value);
@ -199,14 +199,14 @@ const char* Timestamp::_InternalParse(const char* begin, const char* end, void*
case 2: {
if (static_cast<::google::protobuf::uint8>(tag) != 16) goto handle_unusual;
::google::protobuf::uint64 val;
ptr = Varint::Parse64(ptr, &val);
ptr = ::google::protobuf::io::Parse64(ptr, &val);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
::google::protobuf::int32 value = val;
msg->set_nanos(value);
break;
}
default: {
handle_unusual: (void)&&handle_unusual;
handle_unusual:
if ((tag & 7) == 4 || tag == 0) {
ctx->EndGroup(tag);
return ptr;
@ -220,13 +220,6 @@ const char* Timestamp::_InternalParse(const char* begin, const char* end, void*
} // switch
} // while
return ptr;
len_delim_till_end: (void)&&len_delim_till_end;
return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg},
{parser_till_end, object}, size);
group_continues: (void)&&group_continues;
GOOGLE_DCHECK(ptr >= end);
GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->StoreGroup({_InternalParse, msg}, {parser_till_end, object}, depth, tag));
return ptr;
}
#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
bool Timestamp::MergePartialFromCodedStream(
@ -310,8 +303,7 @@ void Timestamp::SerializeWithCachedSizes(
}
::google::protobuf::uint8* Timestamp::InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const {
(void)deterministic; // Unused
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:google.protobuf.Timestamp)
::google::protobuf::uint32 cached_has_bits = 0;
(void) cached_has_bits;

View File

@ -139,7 +139,7 @@ class PROTOBUF_EXPORT Timestamp : public ::google::protobuf::Message /* @@protoc
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:

View File

@ -509,13 +509,13 @@ const char* Type::_InternalParse(const char* begin, const char* end, void* objec
::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end;
auto ptr = begin;
while (ptr < end) {
ptr = Varint::Parse32Inline(ptr, &tag);
ptr = ::google::protobuf::io::Parse32(ptr, &tag);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
switch (tag >> 3) {
// string name = 1;
case 1: {
if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual;
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
ctx->extra_parse_data().SetFieldName("google.protobuf.Type.name");
auto str = msg->mutable_name();
@ -535,7 +535,7 @@ const char* Type::_InternalParse(const char* begin, const char* end, void* objec
case 2: {
if (static_cast<::google::protobuf::uint8>(tag) != 18) goto handle_unusual;
do {
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
parser_till_end = ::google::protobuf::Field::_InternalParse;
object = msg->add_fields();
@ -553,7 +553,7 @@ const char* Type::_InternalParse(const char* begin, const char* end, void* objec
case 3: {
if (static_cast<::google::protobuf::uint8>(tag) != 26) goto handle_unusual;
do {
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
ctx->extra_parse_data().SetFieldName("google.protobuf.Type.oneofs");
auto str = msg->add_oneofs();
@ -575,7 +575,7 @@ const char* Type::_InternalParse(const char* begin, const char* end, void* objec
case 4: {
if (static_cast<::google::protobuf::uint8>(tag) != 34) goto handle_unusual;
do {
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
parser_till_end = ::google::protobuf::Option::_InternalParse;
object = msg->add_options();
@ -592,7 +592,7 @@ const char* Type::_InternalParse(const char* begin, const char* end, void* objec
// .google.protobuf.SourceContext source_context = 5;
case 5: {
if (static_cast<::google::protobuf::uint8>(tag) != 42) goto handle_unusual;
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
parser_till_end = ::google::protobuf::SourceContext::_InternalParse;
object = msg->mutable_source_context();
@ -608,14 +608,14 @@ const char* Type::_InternalParse(const char* begin, const char* end, void* objec
case 6: {
if (static_cast<::google::protobuf::uint8>(tag) != 48) goto handle_unusual;
::google::protobuf::uint64 val;
ptr = Varint::Parse64(ptr, &val);
ptr = ::google::protobuf::io::Parse64(ptr, &val);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
::google::protobuf::Syntax value = static_cast<::google::protobuf::Syntax>(val);
msg->set_syntax(value);
break;
}
default: {
handle_unusual: (void)&&handle_unusual;
handle_unusual:
if ((tag & 7) == 4 || tag == 0) {
ctx->EndGroup(tag);
return ptr;
@ -629,13 +629,9 @@ const char* Type::_InternalParse(const char* begin, const char* end, void* objec
} // switch
} // while
return ptr;
len_delim_till_end: (void)&&len_delim_till_end;
len_delim_till_end:
return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg},
{parser_till_end, object}, size);
group_continues: (void)&&group_continues;
GOOGLE_DCHECK(ptr >= end);
GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->StoreGroup({_InternalParse, msg}, {parser_till_end, object}, depth, tag));
return ptr;
{parser_till_end, object}, size);
}
#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
bool Type::MergePartialFromCodedStream(
@ -811,8 +807,7 @@ void Type::SerializeWithCachedSizes(
}
::google::protobuf::uint8* Type::InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const {
(void)deterministic; // Unused
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:google.protobuf.Type)
::google::protobuf::uint32 cached_has_bits = 0;
(void) cached_has_bits;
@ -833,7 +828,7 @@ void Type::SerializeWithCachedSizes(
n = static_cast<unsigned int>(this->fields_size()); i < n; i++) {
target = ::google::protobuf::internal::WireFormatLite::
InternalWriteMessageToArray(
2, this->fields(static_cast<int>(i)), deterministic, target);
2, this->fields(static_cast<int>(i)), target);
}
// repeated string oneofs = 3;
@ -851,14 +846,14 @@ void Type::SerializeWithCachedSizes(
n = static_cast<unsigned int>(this->options_size()); i < n; i++) {
target = ::google::protobuf::internal::WireFormatLite::
InternalWriteMessageToArray(
4, this->options(static_cast<int>(i)), deterministic, target);
4, this->options(static_cast<int>(i)), target);
}
// .google.protobuf.SourceContext source_context = 5;
if (this->has_source_context()) {
target = ::google::protobuf::internal::WireFormatLite::
InternalWriteMessageToArray(
5, HasBitSetters::source_context(this), deterministic, target);
5, HasBitSetters::source_context(this), target);
}
// .google.protobuf.Syntax syntax = 6;
@ -1166,14 +1161,14 @@ const char* Field::_InternalParse(const char* begin, const char* end, void* obje
::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end;
auto ptr = begin;
while (ptr < end) {
ptr = Varint::Parse32Inline(ptr, &tag);
ptr = ::google::protobuf::io::Parse32(ptr, &tag);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
switch (tag >> 3) {
// .google.protobuf.Field.Kind kind = 1;
case 1: {
if (static_cast<::google::protobuf::uint8>(tag) != 8) goto handle_unusual;
::google::protobuf::uint64 val;
ptr = Varint::Parse64(ptr, &val);
ptr = ::google::protobuf::io::Parse64(ptr, &val);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
::google::protobuf::Field_Kind value = static_cast<::google::protobuf::Field_Kind>(val);
msg->set_kind(value);
@ -1183,7 +1178,7 @@ const char* Field::_InternalParse(const char* begin, const char* end, void* obje
case 2: {
if (static_cast<::google::protobuf::uint8>(tag) != 16) goto handle_unusual;
::google::protobuf::uint64 val;
ptr = Varint::Parse64(ptr, &val);
ptr = ::google::protobuf::io::Parse64(ptr, &val);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
::google::protobuf::Field_Cardinality value = static_cast<::google::protobuf::Field_Cardinality>(val);
msg->set_cardinality(value);
@ -1193,7 +1188,7 @@ const char* Field::_InternalParse(const char* begin, const char* end, void* obje
case 3: {
if (static_cast<::google::protobuf::uint8>(tag) != 24) goto handle_unusual;
::google::protobuf::uint64 val;
ptr = Varint::Parse64(ptr, &val);
ptr = ::google::protobuf::io::Parse64(ptr, &val);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
::google::protobuf::int32 value = val;
msg->set_number(value);
@ -1202,7 +1197,7 @@ const char* Field::_InternalParse(const char* begin, const char* end, void* obje
// string name = 4;
case 4: {
if (static_cast<::google::protobuf::uint8>(tag) != 34) goto handle_unusual;
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
ctx->extra_parse_data().SetFieldName("google.protobuf.Field.name");
auto str = msg->mutable_name();
@ -1221,7 +1216,7 @@ const char* Field::_InternalParse(const char* begin, const char* end, void* obje
// string type_url = 6;
case 6: {
if (static_cast<::google::protobuf::uint8>(tag) != 50) goto handle_unusual;
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
ctx->extra_parse_data().SetFieldName("google.protobuf.Field.type_url");
auto str = msg->mutable_type_url();
@ -1241,7 +1236,7 @@ const char* Field::_InternalParse(const char* begin, const char* end, void* obje
case 7: {
if (static_cast<::google::protobuf::uint8>(tag) != 56) goto handle_unusual;
::google::protobuf::uint64 val;
ptr = Varint::Parse64(ptr, &val);
ptr = ::google::protobuf::io::Parse64(ptr, &val);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
::google::protobuf::int32 value = val;
msg->set_oneof_index(value);
@ -1251,7 +1246,7 @@ const char* Field::_InternalParse(const char* begin, const char* end, void* obje
case 8: {
if (static_cast<::google::protobuf::uint8>(tag) != 64) goto handle_unusual;
::google::protobuf::uint64 val;
ptr = Varint::Parse64(ptr, &val);
ptr = ::google::protobuf::io::Parse64(ptr, &val);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
bool value = val;
msg->set_packed(value);
@ -1261,7 +1256,7 @@ const char* Field::_InternalParse(const char* begin, const char* end, void* obje
case 9: {
if (static_cast<::google::protobuf::uint8>(tag) != 74) goto handle_unusual;
do {
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
parser_till_end = ::google::protobuf::Option::_InternalParse;
object = msg->add_options();
@ -1278,7 +1273,7 @@ const char* Field::_InternalParse(const char* begin, const char* end, void* obje
// string json_name = 10;
case 10: {
if (static_cast<::google::protobuf::uint8>(tag) != 82) goto handle_unusual;
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
ctx->extra_parse_data().SetFieldName("google.protobuf.Field.json_name");
auto str = msg->mutable_json_name();
@ -1297,7 +1292,7 @@ const char* Field::_InternalParse(const char* begin, const char* end, void* obje
// string default_value = 11;
case 11: {
if (static_cast<::google::protobuf::uint8>(tag) != 90) goto handle_unusual;
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
ctx->extra_parse_data().SetFieldName("google.protobuf.Field.default_value");
auto str = msg->mutable_default_value();
@ -1314,7 +1309,7 @@ const char* Field::_InternalParse(const char* begin, const char* end, void* obje
break;
}
default: {
handle_unusual: (void)&&handle_unusual;
handle_unusual:
if ((tag & 7) == 4 || tag == 0) {
ctx->EndGroup(tag);
return ptr;
@ -1328,13 +1323,9 @@ const char* Field::_InternalParse(const char* begin, const char* end, void* obje
} // switch
} // while
return ptr;
len_delim_till_end: (void)&&len_delim_till_end;
len_delim_till_end:
return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg},
{parser_till_end, object}, size);
group_continues: (void)&&group_continues;
GOOGLE_DCHECK(ptr >= end);
GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->StoreGroup({_InternalParse, msg}, {parser_till_end, object}, depth, tag));
return ptr;
{parser_till_end, object}, size);
}
#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
bool Field::MergePartialFromCodedStream(
@ -1596,8 +1587,7 @@ void Field::SerializeWithCachedSizes(
}
::google::protobuf::uint8* Field::InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const {
(void)deterministic; // Unused
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:google.protobuf.Field)
::google::protobuf::uint32 cached_has_bits = 0;
(void) cached_has_bits;
@ -1656,7 +1646,7 @@ void Field::SerializeWithCachedSizes(
n = static_cast<unsigned int>(this->options_size()); i < n; i++) {
target = ::google::protobuf::internal::WireFormatLite::
InternalWriteMessageToArray(
9, this->options(static_cast<int>(i)), deterministic, target);
9, this->options(static_cast<int>(i)), target);
}
// string json_name = 10;
@ -2029,13 +2019,13 @@ const char* Enum::_InternalParse(const char* begin, const char* end, void* objec
::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end;
auto ptr = begin;
while (ptr < end) {
ptr = Varint::Parse32Inline(ptr, &tag);
ptr = ::google::protobuf::io::Parse32(ptr, &tag);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
switch (tag >> 3) {
// string name = 1;
case 1: {
if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual;
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
ctx->extra_parse_data().SetFieldName("google.protobuf.Enum.name");
auto str = msg->mutable_name();
@ -2055,7 +2045,7 @@ const char* Enum::_InternalParse(const char* begin, const char* end, void* objec
case 2: {
if (static_cast<::google::protobuf::uint8>(tag) != 18) goto handle_unusual;
do {
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
parser_till_end = ::google::protobuf::EnumValue::_InternalParse;
object = msg->add_enumvalue();
@ -2073,7 +2063,7 @@ const char* Enum::_InternalParse(const char* begin, const char* end, void* objec
case 3: {
if (static_cast<::google::protobuf::uint8>(tag) != 26) goto handle_unusual;
do {
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
parser_till_end = ::google::protobuf::Option::_InternalParse;
object = msg->add_options();
@ -2090,7 +2080,7 @@ const char* Enum::_InternalParse(const char* begin, const char* end, void* objec
// .google.protobuf.SourceContext source_context = 4;
case 4: {
if (static_cast<::google::protobuf::uint8>(tag) != 34) goto handle_unusual;
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
parser_till_end = ::google::protobuf::SourceContext::_InternalParse;
object = msg->mutable_source_context();
@ -2106,14 +2096,14 @@ const char* Enum::_InternalParse(const char* begin, const char* end, void* objec
case 5: {
if (static_cast<::google::protobuf::uint8>(tag) != 40) goto handle_unusual;
::google::protobuf::uint64 val;
ptr = Varint::Parse64(ptr, &val);
ptr = ::google::protobuf::io::Parse64(ptr, &val);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
::google::protobuf::Syntax value = static_cast<::google::protobuf::Syntax>(val);
msg->set_syntax(value);
break;
}
default: {
handle_unusual: (void)&&handle_unusual;
handle_unusual:
if ((tag & 7) == 4 || tag == 0) {
ctx->EndGroup(tag);
return ptr;
@ -2127,13 +2117,9 @@ const char* Enum::_InternalParse(const char* begin, const char* end, void* objec
} // switch
} // while
return ptr;
len_delim_till_end: (void)&&len_delim_till_end;
len_delim_till_end:
return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg},
{parser_till_end, object}, size);
group_continues: (void)&&group_continues;
GOOGLE_DCHECK(ptr >= end);
GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->StoreGroup({_InternalParse, msg}, {parser_till_end, object}, depth, tag));
return ptr;
{parser_till_end, object}, size);
}
#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
bool Enum::MergePartialFromCodedStream(
@ -2283,8 +2269,7 @@ void Enum::SerializeWithCachedSizes(
}
::google::protobuf::uint8* Enum::InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const {
(void)deterministic; // Unused
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:google.protobuf.Enum)
::google::protobuf::uint32 cached_has_bits = 0;
(void) cached_has_bits;
@ -2305,7 +2290,7 @@ void Enum::SerializeWithCachedSizes(
n = static_cast<unsigned int>(this->enumvalue_size()); i < n; i++) {
target = ::google::protobuf::internal::WireFormatLite::
InternalWriteMessageToArray(
2, this->enumvalue(static_cast<int>(i)), deterministic, target);
2, this->enumvalue(static_cast<int>(i)), target);
}
// repeated .google.protobuf.Option options = 3;
@ -2313,14 +2298,14 @@ void Enum::SerializeWithCachedSizes(
n = static_cast<unsigned int>(this->options_size()); i < n; i++) {
target = ::google::protobuf::internal::WireFormatLite::
InternalWriteMessageToArray(
3, this->options(static_cast<int>(i)), deterministic, target);
3, this->options(static_cast<int>(i)), target);
}
// .google.protobuf.SourceContext source_context = 4;
if (this->has_source_context()) {
target = ::google::protobuf::internal::WireFormatLite::
InternalWriteMessageToArray(
4, HasBitSetters::source_context(this), deterministic, target);
4, HasBitSetters::source_context(this), target);
}
// .google.protobuf.Syntax syntax = 5;
@ -2581,13 +2566,13 @@ const char* EnumValue::_InternalParse(const char* begin, const char* end, void*
::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end;
auto ptr = begin;
while (ptr < end) {
ptr = Varint::Parse32Inline(ptr, &tag);
ptr = ::google::protobuf::io::Parse32(ptr, &tag);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
switch (tag >> 3) {
// string name = 1;
case 1: {
if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual;
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
ctx->extra_parse_data().SetFieldName("google.protobuf.EnumValue.name");
auto str = msg->mutable_name();
@ -2607,7 +2592,7 @@ const char* EnumValue::_InternalParse(const char* begin, const char* end, void*
case 2: {
if (static_cast<::google::protobuf::uint8>(tag) != 16) goto handle_unusual;
::google::protobuf::uint64 val;
ptr = Varint::Parse64(ptr, &val);
ptr = ::google::protobuf::io::Parse64(ptr, &val);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
::google::protobuf::int32 value = val;
msg->set_number(value);
@ -2617,7 +2602,7 @@ const char* EnumValue::_InternalParse(const char* begin, const char* end, void*
case 3: {
if (static_cast<::google::protobuf::uint8>(tag) != 26) goto handle_unusual;
do {
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
parser_till_end = ::google::protobuf::Option::_InternalParse;
object = msg->add_options();
@ -2632,7 +2617,7 @@ const char* EnumValue::_InternalParse(const char* begin, const char* end, void*
break;
}
default: {
handle_unusual: (void)&&handle_unusual;
handle_unusual:
if ((tag & 7) == 4 || tag == 0) {
ctx->EndGroup(tag);
return ptr;
@ -2646,13 +2631,9 @@ const char* EnumValue::_InternalParse(const char* begin, const char* end, void*
} // switch
} // while
return ptr;
len_delim_till_end: (void)&&len_delim_till_end;
len_delim_till_end:
return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg},
{parser_till_end, object}, size);
group_continues: (void)&&group_continues;
GOOGLE_DCHECK(ptr >= end);
GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->StoreGroup({_InternalParse, msg}, {parser_till_end, object}, depth, tag));
return ptr;
{parser_till_end, object}, size);
}
#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
bool EnumValue::MergePartialFromCodedStream(
@ -2763,8 +2744,7 @@ void EnumValue::SerializeWithCachedSizes(
}
::google::protobuf::uint8* EnumValue::InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const {
(void)deterministic; // Unused
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:google.protobuf.EnumValue)
::google::protobuf::uint32 cached_has_bits = 0;
(void) cached_has_bits;
@ -2790,7 +2770,7 @@ void EnumValue::SerializeWithCachedSizes(
n = static_cast<unsigned int>(this->options_size()); i < n; i++) {
target = ::google::protobuf::internal::WireFormatLite::
InternalWriteMessageToArray(
3, this->options(static_cast<int>(i)), deterministic, target);
3, this->options(static_cast<int>(i)), target);
}
if (_internal_metadata_.have_unknown_fields()) {
@ -3052,13 +3032,13 @@ const char* Option::_InternalParse(const char* begin, const char* end, void* obj
::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end;
auto ptr = begin;
while (ptr < end) {
ptr = Varint::Parse32Inline(ptr, &tag);
ptr = ::google::protobuf::io::Parse32(ptr, &tag);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
switch (tag >> 3) {
// string name = 1;
case 1: {
if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual;
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
ctx->extra_parse_data().SetFieldName("google.protobuf.Option.name");
auto str = msg->mutable_name();
@ -3077,7 +3057,7 @@ const char* Option::_InternalParse(const char* begin, const char* end, void* obj
// .google.protobuf.Any value = 2;
case 2: {
if (static_cast<::google::protobuf::uint8>(tag) != 18) goto handle_unusual;
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
parser_till_end = ::google::protobuf::Any::_InternalParse;
object = msg->mutable_value();
@ -3090,7 +3070,7 @@ const char* Option::_InternalParse(const char* begin, const char* end, void* obj
break;
}
default: {
handle_unusual: (void)&&handle_unusual;
handle_unusual:
if ((tag & 7) == 4 || tag == 0) {
ctx->EndGroup(tag);
return ptr;
@ -3104,13 +3084,9 @@ const char* Option::_InternalParse(const char* begin, const char* end, void* obj
} // switch
} // while
return ptr;
len_delim_till_end: (void)&&len_delim_till_end;
len_delim_till_end:
return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg},
{parser_till_end, object}, size);
group_continues: (void)&&group_continues;
GOOGLE_DCHECK(ptr >= end);
GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->StoreGroup({_InternalParse, msg}, {parser_till_end, object}, depth, tag));
return ptr;
{parser_till_end, object}, size);
}
#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
bool Option::MergePartialFromCodedStream(
@ -3200,8 +3176,7 @@ void Option::SerializeWithCachedSizes(
}
::google::protobuf::uint8* Option::InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const {
(void)deterministic; // Unused
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:google.protobuf.Option)
::google::protobuf::uint32 cached_has_bits = 0;
(void) cached_has_bits;
@ -3221,7 +3196,7 @@ void Option::SerializeWithCachedSizes(
if (this->has_value()) {
target = ::google::protobuf::internal::WireFormatLite::
InternalWriteMessageToArray(
2, HasBitSetters::value(this), deterministic, target);
2, HasBitSetters::value(this), target);
}
if (_internal_metadata_.have_unknown_fields()) {

View File

@ -240,7 +240,7 @@ class PROTOBUF_EXPORT Type : public ::google::protobuf::Message /* @@protoc_inse
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
@ -452,7 +452,7 @@ class PROTOBUF_EXPORT Field : public ::google::protobuf::Message /* @@protoc_ins
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
@ -805,7 +805,7 @@ class PROTOBUF_EXPORT Enum : public ::google::protobuf::Message /* @@protoc_inse
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
@ -994,7 +994,7 @@ class PROTOBUF_EXPORT EnumValue : public ::google::protobuf::Message /* @@protoc
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
@ -1157,7 +1157,7 @@ class PROTOBUF_EXPORT Option : public ::google::protobuf::Message /* @@protoc_in
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:

View File

@ -54,22 +54,21 @@ const UnknownFieldSet* UnknownFieldSet::default_instance() {
}
void UnknownFieldSet::ClearFallback() {
GOOGLE_DCHECK(fields_ != NULL && fields_->size() > 0);
int n = fields_->size();
GOOGLE_DCHECK(!fields_.empty());
int n = fields_.size();
do {
(*fields_)[--n].Delete();
(fields_)[--n].Delete();
} while (n > 0);
delete fields_;
fields_ = NULL;
fields_.clear();
}
void UnknownFieldSet::InternalMergeFrom(const UnknownFieldSet& other) {
int other_field_count = other.field_count();
if (other_field_count > 0) {
fields_ = new std::vector<UnknownField>();
fields_.reserve(fields_.size() + other_field_count);
for (int i = 0; i < other_field_count; i++) {
fields_->push_back((*other.fields_)[i]);
fields_->back().DeepCopy((*other.fields_)[i]);
fields_.push_back((other.fields_)[i]);
fields_.back().DeepCopy((other.fields_)[i]);
}
}
}
@ -77,10 +76,10 @@ void UnknownFieldSet::InternalMergeFrom(const UnknownFieldSet& other) {
void UnknownFieldSet::MergeFrom(const UnknownFieldSet& other) {
int other_field_count = other.field_count();
if (other_field_count > 0) {
if (fields_ == NULL) fields_ = new std::vector<UnknownField>();
fields_.reserve(fields_.size() + other_field_count);
for (int i = 0; i < other_field_count; i++) {
fields_->push_back((*other.fields_)[i]);
fields_->back().DeepCopy((*other.fields_)[i]);
fields_.push_back((other.fields_)[i]);
fields_.back().DeepCopy((other.fields_)[i]);
}
}
}
@ -88,16 +87,14 @@ void UnknownFieldSet::MergeFrom(const UnknownFieldSet& other) {
// A specialized MergeFrom for performance when we are merging from an UFS that
// is temporary and can be destroyed in the process.
void UnknownFieldSet::MergeFromAndDestroy(UnknownFieldSet* other) {
int other_field_count = other->field_count();
if (other_field_count > 0) {
if (fields_ == NULL) fields_ = new std::vector<UnknownField>();
for (int i = 0; i < other_field_count; i++) {
fields_->push_back((*other->fields_)[i]);
(*other->fields_)[i].Reset();
}
if (fields_.empty()) {
fields_ = std::move(other->fields_);
} else {
fields_.insert(fields_.end(),
std::make_move_iterator(other->fields_.begin()),
std::make_move_iterator(other->fields_.end()));
}
delete other->fields_;
other->fields_ = NULL;
other->fields_.clear();
}
void UnknownFieldSet::MergeToInternalMetdata(
@ -107,12 +104,12 @@ void UnknownFieldSet::MergeToInternalMetdata(
}
size_t UnknownFieldSet::SpaceUsedExcludingSelfLong() const {
if (fields_ == NULL) return 0;
if (fields_.empty()) return 0;
size_t total_size = sizeof(*fields_) + sizeof(UnknownField) * fields_->size();
size_t total_size = sizeof(fields_) + sizeof(UnknownField) * fields_.size();
for (int i = 0; i < fields_->size(); i++) {
const UnknownField& field = (*fields_)[i];
for (int i = 0; i < fields_.size(); i++) {
const UnknownField& field = (fields_)[i];
switch (field.type()) {
case UnknownField::TYPE_LENGTH_DELIMITED:
total_size += sizeof(*field.data_.length_delimited_.string_value_) +
@ -138,8 +135,7 @@ void UnknownFieldSet::AddVarint(int number, uint64 value) {
field.number_ = number;
field.SetType(UnknownField::TYPE_VARINT);
field.data_.varint_ = value;
if (fields_ == NULL) fields_ = new std::vector<UnknownField>();
fields_->push_back(field);
fields_.push_back(field);
}
void UnknownFieldSet::AddFixed32(int number, uint32 value) {
@ -147,8 +143,7 @@ void UnknownFieldSet::AddFixed32(int number, uint32 value) {
field.number_ = number;
field.SetType(UnknownField::TYPE_FIXED32);
field.data_.fixed32_ = value;
if (fields_ == NULL) fields_ = new std::vector<UnknownField>();
fields_->push_back(field);
fields_.push_back(field);
}
void UnknownFieldSet::AddFixed64(int number, uint64 value) {
@ -156,8 +151,7 @@ void UnknownFieldSet::AddFixed64(int number, uint64 value) {
field.number_ = number;
field.SetType(UnknownField::TYPE_FIXED64);
field.data_.fixed64_ = value;
if (fields_ == NULL) fields_ = new std::vector<UnknownField>();
fields_->push_back(field);
fields_.push_back(field);
}
string* UnknownFieldSet::AddLengthDelimited(int number) {
@ -165,8 +159,7 @@ string* UnknownFieldSet::AddLengthDelimited(int number) {
field.number_ = number;
field.SetType(UnknownField::TYPE_LENGTH_DELIMITED);
field.data_.length_delimited_.string_value_ = new string;
if (fields_ == NULL) fields_ = new std::vector<UnknownField>();
fields_->push_back(field);
fields_.push_back(field);
return field.data_.length_delimited_.string_value_;
}
@ -176,57 +169,44 @@ UnknownFieldSet* UnknownFieldSet::AddGroup(int number) {
field.number_ = number;
field.SetType(UnknownField::TYPE_GROUP);
field.data_.group_ = new UnknownFieldSet;
if (fields_ == NULL) fields_ = new std::vector<UnknownField>();
fields_->push_back(field);
fields_.push_back(field);
return field.data_.group_;
}
void UnknownFieldSet::AddField(const UnknownField& field) {
if (fields_ == NULL) fields_ = new std::vector<UnknownField>();
fields_->push_back(field);
fields_->back().DeepCopy(field);
fields_.push_back(field);
fields_.back().DeepCopy(field);
}
void UnknownFieldSet::DeleteSubrange(int start, int num) {
// Delete the specified fields.
for (int i = 0; i < num; ++i) {
(*fields_)[i + start].Delete();
(fields_)[i + start].Delete();
}
// Slide down the remaining fields.
for (int i = start + num; i < fields_->size(); ++i) {
(*fields_)[i - num] = (*fields_)[i];
for (int i = start + num; i < fields_.size(); ++i) {
(fields_)[i - num] = (fields_)[i];
}
// Pop off the # of deleted fields.
for (int i = 0; i < num; ++i) {
fields_->pop_back();
}
if (fields_ && fields_->size() == 0) {
// maintain invariant: never hold fields_ if empty.
delete fields_;
fields_ = NULL;
fields_.pop_back();
}
}
void UnknownFieldSet::DeleteByNumber(int number) {
if (fields_ == NULL) return;
int left = 0; // The number of fields left after deletion.
for (int i = 0; i < fields_->size(); ++i) {
UnknownField* field = &(*fields_)[i];
for (int i = 0; i < fields_.size(); ++i) {
UnknownField* field = &(fields_)[i];
if (field->number() == number) {
field->Delete();
} else {
if (i != left) {
(*fields_)[left] = (*fields_)[i];
(fields_)[left] = (fields_)[i];
}
++left;
}
}
fields_->resize(left);
if (left == 0) {
// maintain invariant: never hold fields_ if empty.
delete fields_;
fields_ = NULL;
}
fields_.resize(left);
}
bool UnknownFieldSet::MergeFromCodedStream(io::CodedInputStream* input) {
@ -269,22 +249,6 @@ void UnknownField::Delete() {
}
}
// Reset all owned ptrs, a special function for performance, to avoid double
// owning the ptrs, when we merge from a temporary UnknownFieldSet objects.
void UnknownField::Reset() {
switch (type()) {
case UnknownField::TYPE_LENGTH_DELIMITED:
data_.length_delimited_.string_value_ = NULL;
break;
case UnknownField::TYPE_GROUP: {
data_.group_ = NULL;
break;
}
default:
break;
}
}
void UnknownField::DeepCopy(const UnknownField& other) {
switch (type()) {
case UnknownField::TYPE_LENGTH_DELIMITED:
@ -328,7 +292,7 @@ const char* PackedValidEnumParser(const char* begin, const char* end,
auto ptr = begin;
while (ptr < end) {
uint64 varint;
ptr = Varint::Parse64(ptr, &varint);
ptr = io::Parse64(ptr, &varint);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
int val = varint;
if (ctx->extra_parse_data().ValidateEnum<UnknownFieldSet>(val))
@ -343,7 +307,7 @@ const char* PackedValidEnumParserArg(const char* begin, const char* end,
auto ptr = begin;
while (ptr < end) {
uint64 varint;
ptr = Varint::Parse64(ptr, &varint);
ptr = io::Parse64(ptr, &varint);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
int val = varint;
if (ctx->extra_parse_data().ValidateEnumArg<UnknownFieldSet>(val))

View File

@ -176,17 +176,11 @@ class PROTOBUF_EXPORT UnknownFieldSet {
// For InternalMergeFrom
friend class UnknownField;
// Merges from other UnknownFieldSet. This method assumes, that this object
// is newly created and has fields_ == NULL;
// is newly created and has no fields.
void InternalMergeFrom(const UnknownFieldSet& other);
void ClearFallback();
// fields_ is either NULL, or a pointer to a vector that is *non-empty*. We
// never hold the empty vector because we want the 'do we have any unknown
// fields' check to be fast, and avoid a cache miss: the UFS instance gets
// embedded in the message object, so 'fields_ != NULL' tests a member
// variable hot in the cache, without the need to go touch a vector somewhere
// else in memory.
std::vector<UnknownField>* fields_;
std::vector<UnknownField> fields_;
GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(UnknownFieldSet);
};
@ -200,13 +194,16 @@ inline void WriteLengthDelimited(uint32 num, StringPiece val,
unknown->AddLengthDelimited(num)->assign(val.data(), val.size());
}
PROTOBUF_EXPORT
const char* PackedValidEnumParser(const char* begin, const char* end,
void* object, ParseContext* ctx);
PROTOBUF_EXPORT
const char* PackedValidEnumParserArg(const char* begin, const char* end,
void* object, ParseContext* ctx);
PROTOBUF_EXPORT
const char* UnknownGroupParse(const char* begin, const char* end, void* object,
ParseContext* ctx);
PROTOBUF_EXPORT
std::pair<const char*, bool> UnknownFieldParse(uint64 tag, ParseClosure parent,
const char* begin,
const char* end,
@ -262,10 +259,6 @@ class PROTOBUF_EXPORT UnknownField {
// If this UnknownField contains a pointer, delete it.
void Delete();
// Reset all the underlying pointers to NULL. A special function to be only
// used while merging from a temporary UFS.
void Reset();
// Make a deep copy of any pointers in this UnknownField.
void DeepCopy(const UnknownField& other);
@ -291,36 +284,34 @@ class PROTOBUF_EXPORT UnknownField {
// ===================================================================
// inline implementations
inline UnknownFieldSet::UnknownFieldSet() : fields_(NULL) {}
inline UnknownFieldSet::UnknownFieldSet() {}
inline UnknownFieldSet::~UnknownFieldSet() { Clear(); }
inline void UnknownFieldSet::ClearAndFreeMemory() { Clear(); }
inline void UnknownFieldSet::Clear() {
if (fields_ != NULL) {
if (!fields_.empty()) {
ClearFallback();
}
}
inline bool UnknownFieldSet::empty() const {
// Invariant: fields_ is never empty if present.
return !fields_;
return fields_.empty();
}
inline void UnknownFieldSet::Swap(UnknownFieldSet* x) {
std::swap(fields_, x->fields_);
fields_.swap(x->fields_);
}
inline int UnknownFieldSet::field_count() const {
return fields_ ? static_cast<int>(fields_->size()) : 0;
return static_cast<int>(fields_.size());
}
inline const UnknownField& UnknownFieldSet::field(int index) const {
GOOGLE_DCHECK(fields_ != NULL);
return (*fields_)[static_cast<size_t>(index)];
return (fields_)[static_cast<size_t>(index)];
}
inline UnknownField* UnknownFieldSet::mutable_field(int index) {
return &(*fields_)[static_cast<size_t>(index)];
return &(fields_)[static_cast<size_t>(index)];
}
inline void UnknownFieldSet::AddLengthDelimited(

View File

@ -274,6 +274,7 @@ StatusOr<string> DataPiece::ToBytes() const {
StatusOr<int> DataPiece::ToEnum(const google::protobuf::Enum* enum_type,
bool use_lower_camel_for_enums,
bool case_insensitive_enum_parsing,
bool ignore_unknown_enum_values,
bool* is_unknown_enum_value) const {
if (type_ == TYPE_NULL) return google::protobuf::NULL_VALUE;
@ -295,11 +296,16 @@ StatusOr<int> DataPiece::ToEnum(const google::protobuf::Enum* enum_type,
}
// Next try a normalized name.
for (string::iterator it = enum_name.begin(); it != enum_name.end(); ++it) {
*it = *it == '-' ? '_' : ascii_toupper(*it);
bool should_normalize_enum =
case_insensitive_enum_parsing || use_lower_camel_for_enums;
if (should_normalize_enum) {
for (string::iterator it = enum_name.begin(); it != enum_name.end();
++it) {
*it = *it == '-' ? '_' : ascii_toupper(*it);
}
value = FindEnumValueByNameOrNull(enum_type, enum_name);
if (value != nullptr) return value->number();
}
value = FindEnumValueByNameOrNull(enum_type, enum_name);
if (value != nullptr) return value->number();
// If use_lower_camel_for_enums is true try with enum name without
// underscore. This will also accept camel case names as the enum_name has

View File

@ -169,7 +169,10 @@ class PROTOBUF_EXPORT DataPiece {
// If none of these succeeds, returns a conversion error status.
util::StatusOr<int> ToEnum(const google::protobuf::Enum* enum_type,
bool use_lower_camel_for_enums) const {
return ToEnum(enum_type, use_lower_camel_for_enums, false, nullptr);
return ToEnum(enum_type, use_lower_camel_for_enums,
/* ignore_unknown_enum_values */ false,
/* case_insensitive_enum_parsing */ true,
/* is_unknown_enum_value */ nullptr);
}
private:
@ -186,6 +189,7 @@ class PROTOBUF_EXPORT DataPiece {
// unknown enum values.
util::StatusOr<int> ToEnum(const google::protobuf::Enum* enum_type,
bool use_lower_camel_for_enums,
bool case_insensitive_enum_parsing,
bool ignore_unknown_enum_values,
bool* is_unknown_enum_value) const;

View File

@ -119,7 +119,7 @@ class PROTOBUF_EXPORT DefaultValueObjectWriter : public ObjectWriter {
DefaultValueObjectWriter* RenderBytes(StringPiece name,
StringPiece value) override;
DefaultValueObjectWriter* RenderNull(StringPiece name) override;
virtual DefaultValueObjectWriter* RenderNull(StringPiece name);
// Register the callback for scrubbing of fields. Owership of
// field_scrub_callback pointer is also transferred to this class

View File

@ -95,20 +95,21 @@ class PROTOBUF_EXPORT JsonObjectWriter : public StructuredObjectWriter {
virtual ~JsonObjectWriter();
// ObjectWriter methods.
JsonObjectWriter* StartObject(StringPiece name) override;
JsonObjectWriter* EndObject() override;
JsonObjectWriter* StartList(StringPiece name) override;
JsonObjectWriter* EndList() override;
JsonObjectWriter* RenderBool(StringPiece name, bool value) override;
JsonObjectWriter* RenderInt32(StringPiece name, int32 value) override;
JsonObjectWriter* RenderUint32(StringPiece name, uint32 value) override;
JsonObjectWriter* RenderInt64(StringPiece name, int64 value) override;
JsonObjectWriter* RenderUint64(StringPiece name, uint64 value) override;
JsonObjectWriter* RenderDouble(StringPiece name, double value) override;
JsonObjectWriter* RenderFloat(StringPiece name, float value) override;
JsonObjectWriter* RenderString(StringPiece name, StringPiece value) override;
JsonObjectWriter* RenderBytes(StringPiece name, StringPiece value) override;
JsonObjectWriter* RenderNull(StringPiece name) override;
virtual JsonObjectWriter* StartObject(StringPiece name);
virtual JsonObjectWriter* EndObject();
virtual JsonObjectWriter* StartList(StringPiece name);
virtual JsonObjectWriter* EndList();
virtual JsonObjectWriter* RenderBool(StringPiece name, bool value);
virtual JsonObjectWriter* RenderInt32(StringPiece name, int32 value);
virtual JsonObjectWriter* RenderUint32(StringPiece name, uint32 value);
virtual JsonObjectWriter* RenderInt64(StringPiece name, int64 value);
virtual JsonObjectWriter* RenderUint64(StringPiece name, uint64 value);
virtual JsonObjectWriter* RenderDouble(StringPiece name, double value);
virtual JsonObjectWriter* RenderFloat(StringPiece name, float value);
virtual JsonObjectWriter* RenderString(StringPiece name,
StringPiece value);
virtual JsonObjectWriter* RenderBytes(StringPiece name, StringPiece value);
virtual JsonObjectWriter* RenderNull(StringPiece name);
virtual JsonObjectWriter* RenderNullAsEmpty(StringPiece name);
void set_use_websafe_base64_for_bytes(bool value) {

View File

@ -140,7 +140,7 @@ class JsonStreamParserTest : public ::testing::Test {
bool allow_empty_null = false) {
util::Status result =
RunTest(json, split, coerce_utf8, allow_empty_null);
EXPECT_EQ(util::error::INVALID_ARGUMENT, result.error_code());
EXPECT_EQ(util::error::INVALID_ARGUMENT, result.code());
StringPiece error_message(result.error_message());
EXPECT_EQ(error_prefix, error_message.substr(0, error_prefix.size()));
}

View File

@ -69,6 +69,7 @@ ProtoWriter::ProtoWriter(TypeResolver* type_resolver,
ignore_unknown_fields_(false),
ignore_unknown_enum_values_(false),
use_lower_camel_for_enums_(false),
case_insensitive_enum_parsing_(true),
element_(nullptr),
size_insert_(),
output_(output),
@ -89,6 +90,7 @@ ProtoWriter::ProtoWriter(const TypeInfo* typeinfo,
ignore_unknown_fields_(false),
ignore_unknown_enum_values_(false),
use_lower_camel_for_enums_(false),
case_insensitive_enum_parsing_(true),
element_(nullptr),
size_insert_(),
output_(output),
@ -589,9 +591,11 @@ Status ProtoWriter::WriteEnum(int field_number, const DataPiece& data,
const google::protobuf::Enum* enum_type,
CodedOutputStream* stream,
bool use_lower_camel_for_enums,
bool case_insensitive_enum_parsing,
bool ignore_unknown_values) {
bool is_unknown_enum_value = false;
StatusOr<int> e = data.ToEnum(enum_type, use_lower_camel_for_enums,
case_insensitive_enum_parsing,
ignore_unknown_values, &is_unknown_enum_value);
if (e.ok() && !is_unknown_enum_value) {
WireFormatLite::WriteEnum(field_number, e.ValueOrDie(), stream);
@ -689,10 +693,10 @@ ProtoWriter* ProtoWriter::RenderPrimitiveField(
break;
}
case google::protobuf::Field_Kind_TYPE_ENUM: {
status = WriteEnum(field.number(), data,
typeinfo_->GetEnumByTypeUrl(field.type_url()),
stream_.get(), use_lower_camel_for_enums_,
ignore_unknown_enum_values_);
status = WriteEnum(
field.number(), data, typeinfo_->GetEnumByTypeUrl(field.type_url()),
stream_.get(), use_lower_camel_for_enums_,
case_insensitive_enum_parsing_, ignore_unknown_enum_values_);
break;
}
default: // TYPE_GROUP or TYPE_MESSAGE

View File

@ -162,6 +162,10 @@ class PROTOBUF_EXPORT ProtoWriter : public StructuredObjectWriter {
use_lower_camel_for_enums_ = use_lower_camel_for_enums;
}
void set_case_insensitive_enum_parsing(bool case_insensitive_enum_parsing) {
case_insensitive_enum_parsing_ = case_insensitive_enum_parsing;
}
protected:
class PROTOBUF_EXPORT ProtoElement : public BaseElement,
public LocationTrackerInterface {
@ -315,6 +319,7 @@ class PROTOBUF_EXPORT ProtoWriter : public StructuredObjectWriter {
const google::protobuf::Enum* enum_type,
io::CodedOutputStream* stream,
bool use_lower_camel_for_enums,
bool case_insensitive_enum_parsing,
bool ignore_unknown_values);
// Variables for describing the structure of the input tree:
@ -338,6 +343,9 @@ class PROTOBUF_EXPORT ProtoWriter : public StructuredObjectWriter {
// field name.
bool use_lower_camel_for_enums_;
// If true, check if enum name in UPPER_CASE matches the field name.
bool case_insensitive_enum_parsing_;
// Variable for internal state processing:
// element_ : the current element.
// size_insert_: sizes of nested messages.

View File

@ -783,13 +783,14 @@ Status ProtoStreamObjectSource::RenderField(
bool use_type_renderer = type_renderer != nullptr;
RETURN_IF_ERROR(IncrementRecursionDepth(type->name(), field_name));
if (use_type_renderer) {
RETURN_IF_ERROR((*type_renderer)(this, *type, field_name, ow));
} else {
RETURN_IF_ERROR(IncrementRecursionDepth(type->name(), field_name));
RETURN_IF_ERROR(WriteMessage(*type, field_name, 0, true, ow));
--recursion_depth_;
}
--recursion_depth_;
if (!stream_->ConsumedEntireMessage()) {
return Status(util::error::INVALID_ARGUMENT,
"Nested protocol message not parsed in its entirety.");

Some files were not shown because too many files have changed in this diff Show More