Sync from Piper @429333699
PROTOBUF_SYNC_PIPER
This commit is contained in:
commit
1ba1d73e0d
5
.gitignore
vendored
5
.gitignore
vendored
@ -215,5 +215,10 @@ _build/
|
||||
.idea
|
||||
*.iml
|
||||
|
||||
# Eclipse
|
||||
**/.settings
|
||||
**/.project
|
||||
**/.classpath
|
||||
|
||||
# BenchmarkDotNet
|
||||
BenchmarkDotNet.Artifacts/
|
||||
|
163
BUILD
163
BUILD
@ -146,6 +146,7 @@ load(
|
||||
"adapt_proto_library",
|
||||
"cc_proto_library",
|
||||
"internal_copied_filegroup",
|
||||
"internal_gen_kt_protos",
|
||||
"internal_gen_well_known_protos_java",
|
||||
"internal_protobuf_py_tests",
|
||||
"py_proto_library",
|
||||
@ -891,6 +892,23 @@ internal_gen_well_known_protos_java(
|
||||
deps = [proto + "_proto" for proto in LITE_WELL_KNOWN_PROTO_MAP.keys()],
|
||||
)
|
||||
|
||||
internal_gen_kt_protos(
|
||||
name = "gen_well_known_protos_kotlin",
|
||||
visibility = [
|
||||
"//java:__subpackages__",
|
||||
],
|
||||
deps = [proto + "_proto" for proto in WELL_KNOWN_PROTO_MAP.keys()],
|
||||
)
|
||||
|
||||
internal_gen_kt_protos(
|
||||
name = "gen_well_known_protos_kotlinlite",
|
||||
visibility = [
|
||||
"//java:__subpackages__",
|
||||
],
|
||||
lite = True,
|
||||
deps = [proto + "_proto" for proto in LITE_WELL_KNOWN_PROTO_MAP.keys()],
|
||||
)
|
||||
|
||||
alias(
|
||||
name = "protobuf_java",
|
||||
actual = "//java/core",
|
||||
@ -1428,143 +1446,60 @@ filegroup(
|
||||
|
||||
# Kotlin proto rules
|
||||
|
||||
genrule(
|
||||
name = "gen_kotlin_unittest_lite",
|
||||
proto_library(
|
||||
name = "kt_unittest_lite",
|
||||
srcs = [
|
||||
"src/google/protobuf/unittest_lite.proto",
|
||||
"src/google/protobuf/unittest_import_lite.proto",
|
||||
"src/google/protobuf/unittest_import_public_lite.proto",
|
||||
"src/google/protobuf/map_lite_unittest.proto",
|
||||
],
|
||||
outs = [
|
||||
"TestAllTypesLiteKt.kt",
|
||||
"ForeignMessageLiteKt.kt",
|
||||
"TestAllExtensionsLiteKt.kt",
|
||||
"TestEmptyMessageLiteKt.kt",
|
||||
"TestEmptyMessageWithExtensionsLiteKt.kt",
|
||||
"TestMapLiteKt.kt",
|
||||
"OptionalGroup_extension_liteKt.kt",
|
||||
"RepeatedGroup_extension_liteKt.kt",
|
||||
],
|
||||
visibility = ["//java:__subpackages__"],
|
||||
cmd = "$(location //:protoc) " +
|
||||
"--kotlin_out=lite:$(@D) -Isrc/ " +
|
||||
"$(locations src/google/protobuf/unittest_lite.proto) && " +
|
||||
"$(location //:protoc) " +
|
||||
"--kotlin_out=lite:$(@D) -Isrc/ " +
|
||||
"$(locations src/google/protobuf/map_lite_unittest.proto) && " +
|
||||
"cp $(@D)/com/google/protobuf/TestAllTypesLiteKt.kt " +
|
||||
"$(location TestAllTypesLiteKt.kt) && " +
|
||||
"cp $(@D)/com/google/protobuf/ForeignMessageLiteKt.kt " +
|
||||
"$(location ForeignMessageLiteKt.kt) && " +
|
||||
"cp $(@D)/com/google/protobuf/TestAllExtensionsLiteKt.kt " +
|
||||
"$(location TestAllExtensionsLiteKt.kt) && " +
|
||||
"cp $(@D)/com/google/protobuf/TestAllTypesLiteKt.kt " +
|
||||
"$(location TestAllTypesLiteKt.kt) && " +
|
||||
"cp $(@D)/com/google/protobuf/TestEmptyMessageLiteKt.kt " +
|
||||
"$(location TestEmptyMessageLiteKt.kt) && " +
|
||||
"cp $(@D)/com/google/protobuf/TestEmptyMessageWithExtensionsLiteKt.kt " +
|
||||
"$(location TestEmptyMessageWithExtensionsLiteKt.kt) && " +
|
||||
"cp $(@D)/protobuf_unittest/TestMapLiteKt.kt " +
|
||||
"$(location TestMapLiteKt.kt) && " +
|
||||
"cp $(@D)/com/google/protobuf/OptionalGroup_extension_liteKt.kt " +
|
||||
"$(location OptionalGroup_extension_liteKt.kt) && " +
|
||||
"cp $(@D)/com/google/protobuf/RepeatedGroup_extension_liteKt.kt " +
|
||||
"$(location RepeatedGroup_extension_liteKt.kt)",
|
||||
tools = [":protoc"],
|
||||
strip_import_prefix = "src",
|
||||
)
|
||||
|
||||
genrule(
|
||||
name = "gen_kotlin_unittest",
|
||||
internal_gen_kt_protos(
|
||||
name = "gen_kotlin_unittest_lite",
|
||||
deps = [":kt_unittest_lite"],
|
||||
lite = True,
|
||||
visibility = ["//java:__subpackages__"],
|
||||
)
|
||||
|
||||
proto_library(
|
||||
name = "kt_unittest",
|
||||
srcs = [
|
||||
"src/google/protobuf/unittest.proto",
|
||||
"src/google/protobuf/unittest_import.proto",
|
||||
"src/google/protobuf/unittest_import_public.proto",
|
||||
"src/google/protobuf/map_proto2_unittest.proto",
|
||||
],
|
||||
outs = [
|
||||
"TestAllTypesKt.kt",
|
||||
"ForeignMessageKt.kt",
|
||||
"TestAllExtensionsKt.kt",
|
||||
"TestEmptyMessageKt.kt",
|
||||
"TestEmptyMessageWithExtensionsKt.kt",
|
||||
"TestIntIntMapKt.kt",
|
||||
"TestEnumMapKt.kt",
|
||||
"TestMapsKt.kt",
|
||||
"OptionalGroup_extensionKt.kt",
|
||||
"RepeatedGroup_extensionKt.kt",
|
||||
],
|
||||
visibility = ["//java:__subpackages__"],
|
||||
cmd = "$(location //:protoc) " +
|
||||
"--kotlin_out=shared,immutable:$(@D) -Isrc/ " +
|
||||
"$(location src/google/protobuf/unittest.proto) && " +
|
||||
"$(location //:protoc) " +
|
||||
"--kotlin_out=shared,immutable:$(@D) -Isrc/ " +
|
||||
"$(location src/google/protobuf/map_proto2_unittest.proto) && " +
|
||||
"cp $(@D)/protobuf_unittest/TestAllTypesKt.kt " +
|
||||
"$(location TestAllTypesKt.kt) && " +
|
||||
"cp $(@D)/protobuf_unittest/ForeignMessageKt.kt " +
|
||||
"$(location ForeignMessageKt.kt) && " +
|
||||
"cp $(@D)/protobuf_unittest/TestAllExtensionsKt.kt " +
|
||||
"$(location TestAllExtensionsKt.kt) && " +
|
||||
"cp $(@D)/protobuf_unittest/TestEmptyMessageKt.kt " +
|
||||
"$(location TestEmptyMessageKt.kt) && " +
|
||||
"cp $(@D)/protobuf_unittest/TestEmptyMessageWithExtensionsKt.kt " +
|
||||
"$(location TestEmptyMessageWithExtensionsKt.kt) && " +
|
||||
"cp $(@D)/protobuf_unittest/TestIntIntMapKt.kt " +
|
||||
"$(location TestIntIntMapKt.kt) && " +
|
||||
"cp $(@D)/protobuf_unittest/TestEnumMapKt.kt " +
|
||||
"$(location TestEnumMapKt.kt) && " +
|
||||
"cp $(@D)/protobuf_unittest/TestMapsKt.kt " +
|
||||
"$(location TestMapsKt.kt) && " +
|
||||
"cp $(@D)/protobuf_unittest/OptionalGroup_extensionKt.kt " +
|
||||
"$(location OptionalGroup_extensionKt.kt) && " +
|
||||
"cp $(@D)/protobuf_unittest/RepeatedGroup_extensionKt.kt " +
|
||||
"$(location RepeatedGroup_extensionKt.kt)",
|
||||
tools = ["//:protoc"],
|
||||
strip_import_prefix = "src",
|
||||
)
|
||||
|
||||
genrule(
|
||||
name = "gen_kotlin_proto3_unittest_lite",
|
||||
srcs = [
|
||||
"src/google/protobuf/unittest_proto3_lite.proto",
|
||||
"src/google/protobuf/unittest_import.proto",
|
||||
"src/google/protobuf/unittest_import_public.proto",
|
||||
],
|
||||
outs = [
|
||||
"TestAllTypesProto3LiteKt.kt",
|
||||
"TestEmptyMessageProto3LiteKt.kt",
|
||||
],
|
||||
internal_gen_kt_protos(
|
||||
name = "gen_kotlin_unittest",
|
||||
deps = [":kt_unittest"],
|
||||
visibility = ["//java:__subpackages__"],
|
||||
cmd = "$(location //:protoc) " +
|
||||
"--kotlin_out=lite:$(@D) -Isrc/ " +
|
||||
"$(location src/google/protobuf/unittest_proto3_lite.proto) && " +
|
||||
"cp $(@D)/proto3_lite_unittest/TestAllTypesKt.kt " +
|
||||
"$(location TestAllTypesProto3LiteKt.kt) && " +
|
||||
"cp $(@D)/proto3_lite_unittest/TestEmptyMessageKt.kt " +
|
||||
"$(location TestEmptyMessageProto3LiteKt.kt)",
|
||||
tools = ["//:protoc"],
|
||||
)
|
||||
|
||||
genrule(
|
||||
name = "gen_kotlin_proto3_unittest",
|
||||
proto_library(
|
||||
name = "kt_proto3_unittest",
|
||||
srcs = [
|
||||
"src/google/protobuf/unittest_proto3.proto",
|
||||
"src/google/protobuf/unittest_import.proto",
|
||||
"src/google/protobuf/unittest_import_public.proto",
|
||||
],
|
||||
outs = [
|
||||
"TestAllTypesProto3Kt.kt",
|
||||
"TestEmptyMessageProto3Kt.kt",
|
||||
],
|
||||
visibility = ["//java:__subpackages__"],
|
||||
cmd = "$(location //:protoc) " +
|
||||
"--kotlin_out=shared,immutable:$(@D) -Isrc/ " +
|
||||
"$(location src/google/protobuf/unittest_proto3.proto) && " +
|
||||
"cp $(@D)/proto3_unittest/TestAllTypesKt.kt " +
|
||||
"$(location TestAllTypesProto3Kt.kt) && " +
|
||||
"cp $(@D)/proto3_unittest/TestEmptyMessageKt.kt " +
|
||||
"$(location TestEmptyMessageProto3Kt.kt)",
|
||||
tools = ["//:protoc"],
|
||||
strip_import_prefix = "src",
|
||||
)
|
||||
|
||||
internal_gen_kt_protos(
|
||||
name = "gen_kotlin_proto3_unittest_lite",
|
||||
deps = [":kt_proto3_unittest"],
|
||||
lite = True,
|
||||
visibility = ["//java:__subpackages__"],
|
||||
)
|
||||
|
||||
internal_gen_kt_protos(
|
||||
name = "gen_kotlin_proto3_unittest",
|
||||
deps = [":kt_proto3_unittest"],
|
||||
visibility = ["//java:__subpackages__"],
|
||||
)
|
||||
|
@ -21,6 +21,7 @@ Unreleased Changes (C++/Java/Python/PHP/Objective-C/C#/Ruby/JavaScript)
|
||||
* Report original exceptions when parsing JSON
|
||||
* Add more info to @deprecated javadoc for set/get/has methods
|
||||
* Fix initialization bug in doc comment line numbers
|
||||
* Fix comments for message set wire format.
|
||||
|
||||
Kotlin
|
||||
* Add orNull extensions for optional message fields in Kotlin.
|
||||
@ -46,6 +47,8 @@ Unreleased Changes (C++/Java/Python/PHP/Objective-C/C#/Ruby/JavaScript)
|
||||
zone. If omitted or None, the function returns a timezone-naive UTC datetime
|
||||
(as previously).
|
||||
* Adds client_streaming and server_streaming fields to MethodDescriptor.
|
||||
* Add "ensure_ascii" parameter to json_format.MessageToJson. This allows smaller
|
||||
JSON serializations with UTF-8 or other non-ASCII encodings.
|
||||
|
||||
Compiler
|
||||
* Migrate IsDefault(const std::string*) and UnsafeSetDefault(const std::string*)
|
||||
@ -63,6 +66,7 @@ Unreleased Changes (C++/Java/Python/PHP/Objective-C/C#/Ruby/JavaScript)
|
||||
* Do not log/report the same descriptor symbol multiple times if it contains
|
||||
more than one invalid character.
|
||||
* Add UnknownFieldSet::SerializeToString and SerializeToCodedStream.
|
||||
* Remove explicit default pointers and deprecated API from protocol compiler
|
||||
|
||||
Arenas
|
||||
* Change Repeated*Field to reuse memory when using arenas.
|
||||
@ -80,6 +84,10 @@ Unreleased Changes (C++/Java/Python/PHP/Objective-C/C#/Ruby/JavaScript)
|
||||
* Generate narrower code
|
||||
* Fix https://github.com/protocolbuffers/protobuf/issues/9378 by removing
|
||||
shadowed _cached_size_ field
|
||||
* Remove GetPointer() and explicit nullptr defaults.
|
||||
* add proto_h flag for speeding up large builds
|
||||
* Add missing overload for reference wrapped fields.
|
||||
|
||||
|
||||
2022-01-28 version 3.19.4 (C++/Java/Python/PHP/Objective-C/C#/Ruby/JavaScript)
|
||||
|
||||
|
150
Makefile.am
150
Makefile.am
@ -54,6 +54,7 @@ csharp_EXTRA_DIST= \
|
||||
csharp/CHANGES.txt \
|
||||
csharp/Google.Protobuf.Tools.targets \
|
||||
csharp/Google.Protobuf.Tools.nuspec \
|
||||
csharp/NuGet.Config \
|
||||
csharp/README.md \
|
||||
csharp/build_packages.bat \
|
||||
csharp/build_tools.sh \
|
||||
@ -181,10 +182,14 @@ csharp_EXTRA_DIST= \
|
||||
csharp/src/Google.Protobuf/Collections/ProtobufEqualityComparers.cs \
|
||||
csharp/src/Google.Protobuf/Collections/ReadOnlyDictionary.cs \
|
||||
csharp/src/Google.Protobuf/Collections/RepeatedField.cs \
|
||||
csharp/src/Google.Protobuf/Compatibility/DynamicallyAccessedMembersAttribute.cs \
|
||||
csharp/src/Google.Protobuf/Compatibility/DynamicallyAccessedMemberTypes.cs \
|
||||
csharp/src/Google.Protobuf/Compatibility/MethodInfoExtensions.cs \
|
||||
csharp/src/Google.Protobuf/Compatibility/PropertyInfoExtensions.cs \
|
||||
csharp/src/Google.Protobuf/Compatibility/RequiresUnreferencedCodeAttribute.cs \
|
||||
csharp/src/Google.Protobuf/Compatibility/StreamExtensions.cs \
|
||||
csharp/src/Google.Protobuf/Compatibility/TypeExtensions.cs \
|
||||
csharp/src/Google.Protobuf/Compatibility/UnconditionalSuppressMessageAttribute.cs \
|
||||
csharp/src/Google.Protobuf/Extension.cs \
|
||||
csharp/src/Google.Protobuf/ExtensionRegistry.cs \
|
||||
csharp/src/Google.Protobuf/ExtensionSet.cs \
|
||||
@ -303,6 +308,7 @@ java_EXTRA_DIST=
|
||||
java/core/src/main/java/com/google/protobuf/CodedInputStreamReader.java \
|
||||
java/core/src/main/java/com/google/protobuf/CodedOutputStream.java \
|
||||
java/core/src/main/java/com/google/protobuf/CodedOutputStreamWriter.java \
|
||||
java/core/src/main/java/com/google/protobuf/CompileTimeConstant.java \
|
||||
java/core/src/main/java/com/google/protobuf/DescriptorMessageInfoFactory.java \
|
||||
java/core/src/main/java/com/google/protobuf/Descriptors.java \
|
||||
java/core/src/main/java/com/google/protobuf/DiscardUnknownFieldsParser.java \
|
||||
@ -326,6 +332,7 @@ java_EXTRA_DIST=
|
||||
java/core/src/main/java/com/google/protobuf/GeneratedMessageInfoFactory.java \
|
||||
java/core/src/main/java/com/google/protobuf/GeneratedMessageLite.java \
|
||||
java/core/src/main/java/com/google/protobuf/GeneratedMessageV3.java \
|
||||
java/core/src/main/java/com/google/protobuf/InlineMe.java \
|
||||
java/core/src/main/java/com/google/protobuf/IntArrayList.java \
|
||||
java/core/src/main/java/com/google/protobuf/Internal.java \
|
||||
java/core/src/main/java/com/google/protobuf/InvalidProtocolBufferException.java \
|
||||
@ -437,9 +444,9 @@ java_EXTRA_DIST=
|
||||
java/core/src/test/java/com/google/protobuf/ExtensionRegistryFactoryTest.java \
|
||||
java/core/src/test/java/com/google/protobuf/FieldPresenceTest.java \
|
||||
java/core/src/test/java/com/google/protobuf/FloatArrayListTest.java \
|
||||
java/core/src/test/java/com/google/protobuf/ForceFieldBuildersPreRun.java \
|
||||
java/core/src/test/java/com/google/protobuf/GeneratedMessageTest.java \
|
||||
java/core/src/test/java/com/google/protobuf/IntArrayListTest.java \
|
||||
java/core/src/test/java/com/google/protobuf/InvalidProtocolBufferExceptionTest.java \
|
||||
java/core/src/test/java/com/google/protobuf/IsValidUtf8Test.java \
|
||||
java/core/src/test/java/com/google/protobuf/IsValidUtf8TestUtil.java \
|
||||
java/core/src/test/java/com/google/protobuf/LazyFieldLiteTest.java \
|
||||
@ -464,15 +471,12 @@ java_EXTRA_DIST=
|
||||
java/core/src/test/java/com/google/protobuf/Proto2ExtensionLookupSchemaTest.java \
|
||||
java/core/src/test/java/com/google/protobuf/Proto2LiteSchemaTest.java \
|
||||
java/core/src/test/java/com/google/protobuf/Proto2MessageFactory.java \
|
||||
java/core/src/test/java/com/google/protobuf/Proto2MessageInfoFactory.java \
|
||||
java/core/src/test/java/com/google/protobuf/Proto2MessageLiteFactory.java \
|
||||
java/core/src/test/java/com/google/protobuf/Proto2SchemaTest.java \
|
||||
java/core/src/test/java/com/google/protobuf/Proto2UnknownEnumValueTest.java \
|
||||
java/core/src/test/java/com/google/protobuf/Proto3LiteSchemaTest.java \
|
||||
java/core/src/test/java/com/google/protobuf/Proto3MessageFactory.java \
|
||||
java/core/src/test/java/com/google/protobuf/Proto3MessageInfoFactory.java \
|
||||
java/core/src/test/java/com/google/protobuf/Proto3MessageLiteFactory.java \
|
||||
java/core/src/test/java/com/google/protobuf/Proto3MessageLiteInfoFactory.java \
|
||||
java/core/src/test/java/com/google/protobuf/Proto3SchemaTest.java \
|
||||
java/core/src/test/java/com/google/protobuf/ProtobufArrayListTest.java \
|
||||
java/core/src/test/java/com/google/protobuf/RepeatedFieldBuilderV3Test.java \
|
||||
@ -493,9 +497,9 @@ java_EXTRA_DIST=
|
||||
java/core/src/test/java/com/google/protobuf/TypeRegistryTest.java \
|
||||
java/core/src/test/java/com/google/protobuf/UnknownEnumValueTest.java \
|
||||
java/core/src/test/java/com/google/protobuf/UnknownFieldSetTest.java \
|
||||
java/core/src/test/java/com/google/protobuf/UnknownFieldSetPerformanceTest.java \
|
||||
java/core/src/test/java/com/google/protobuf/UnmodifiableLazyStringListTest.java \
|
||||
java/core/src/test/java/com/google/protobuf/Utf8Test.java \
|
||||
java/core/src/test/java/com/google/protobuf/Utf8Utils.java \
|
||||
java/core/src/test/java/com/google/protobuf/WellKnownTypesTest.java \
|
||||
java/core/src/test/java/com/google/protobuf/WireFormatLiteTest.java \
|
||||
java/core/src/test/java/com/google/protobuf/WireFormatTest.java \
|
||||
@ -504,6 +508,7 @@ java_EXTRA_DIST=
|
||||
java/core/src/test/proto/com/google/protobuf/any_test.proto \
|
||||
java/core/src/test/proto/com/google/protobuf/cached_field_size_test.proto \
|
||||
java/core/src/test/proto/com/google/protobuf/deprecated_file.proto \
|
||||
java/core/src/test/proto/com/google/protobuf/dynamic_message_test.proto \
|
||||
java/core/src/test/proto/com/google/protobuf/field_presence_test.proto \
|
||||
java/core/src/test/proto/com/google/protobuf/lazy_fields_lite.proto \
|
||||
java/core/src/test/proto/com/google/protobuf/lite_equals_and_hash.proto \
|
||||
@ -512,7 +517,6 @@ java_EXTRA_DIST=
|
||||
java/core/src/test/proto/com/google/protobuf/map_initialization_order_test.proto \
|
||||
java/core/src/test/proto/com/google/protobuf/map_lite_test.proto \
|
||||
java/core/src/test/proto/com/google/protobuf/map_test.proto \
|
||||
java/core/src/test/proto/com/google/protobuf/message_lite_extension_util_test.proto\
|
||||
java/core/src/test/proto/com/google/protobuf/multiple_files_test.proto \
|
||||
java/core/src/test/proto/com/google/protobuf/nested_builders_test.proto \
|
||||
java/core/src/test/proto/com/google/protobuf/nested_extension.proto \
|
||||
@ -535,19 +539,22 @@ java_EXTRA_DIST=
|
||||
java/core/src/test/proto/com/google/protobuf/wrappers_test.proto \
|
||||
java/internal/BUILD \
|
||||
java/internal/testing.bzl \
|
||||
java/kotlin/BUILD \
|
||||
java/kotlin/generate-sources-build.xml \
|
||||
java/kotlin/generate-test-sources-build.xml \
|
||||
java/kotlin/pom.xml \
|
||||
java/kotlin/pom_template.xml \
|
||||
java/kotlin/src/main/kotlin/com/google/protobuf/Anies.kt \
|
||||
java/kotlin/src/main/kotlin/com/google/protobuf/ByteStrings.kt \
|
||||
java/kotlin/src/main/kotlin/com/google/protobuf/DslList.kt \
|
||||
java/kotlin/src/main/kotlin/com/google/protobuf/DslMap.kt \
|
||||
java/kotlin/src/main/kotlin/com/google/protobuf/DslProxy.kt \
|
||||
java/kotlin/src/main/kotlin/com/google/protobuf/ExtendableMessageExtensions.kt \
|
||||
java/kotlin/src/main/kotlin/com/google/protobuf/ExtendableMessageLiteExtensions.kt\
|
||||
java/kotlin/src/main/kotlin/com/google/protobuf/ExtensionList.kt \
|
||||
java/kotlin/src/main/kotlin/com/google/protobuf/OnlyForUseByGeneratedProtoCode.kt\
|
||||
java/kotlin/src/main/kotlin/com/google/protobuf/ProtoDslMarker.kt \
|
||||
java/kotlin/src/main/kotlin/com/google/protobuf/UnmodifiableCollections.kt \
|
||||
java/kotlin/src/test/kotlin/com/google/protobuf/AniesTest.kt \
|
||||
java/kotlin/src/test/kotlin/com/google/protobuf/ByteStringsTest.kt \
|
||||
java/kotlin/src/test/kotlin/com/google/protobuf/DslListTest.kt \
|
||||
java/kotlin/src/test/kotlin/com/google/protobuf/DslMapTest.kt \
|
||||
@ -559,13 +566,20 @@ java_EXTRA_DIST=
|
||||
java/kotlin/src/test/proto/com/google/protobuf/evil_names_proto3.proto \
|
||||
java/kotlin/src/test/proto/com/google/protobuf/example_extensible_message.proto \
|
||||
java/kotlin/src/test/proto/com/google/protobuf/multiple_files_proto3.proto \
|
||||
java/kotlin-lite/BUILD \
|
||||
java/kotlin-lite/generate-sources-build.xml \
|
||||
java/kotlin-lite/generate-test-sources-build.xml \
|
||||
java/kotlin-lite/lite.awk \
|
||||
java/kotlin-lite/pom.xml \
|
||||
java/kotlin-lite/pom_template.xml \
|
||||
java/kotlin-lite/process-lite-sources-build.xml \
|
||||
java/kotlin-lite/src/main/kotlin/com/google/protobuf/ExtendableMessageLiteExtensions.kt\
|
||||
java/kotlin-lite/src/test/kotlin/com/google/protobuf/ExtendableMessageLiteExtensionsTest.kt\
|
||||
java/kotlin-lite/src/test/kotlin/com/google/protobuf/Proto3LiteTest.kt \
|
||||
java/kotlin-lite/src/test/kotlin/com/google/protobuf/Proto2LiteTest.kt \
|
||||
java/kotlin-lite/src/test/proto/com/google/protobuf/evil_names_proto2.proto \
|
||||
java/kotlin-lite/src/test/proto/com/google/protobuf/evil_names_proto3.proto \
|
||||
java/kotlin-lite/src/test/proto/com/google/protobuf/multiple_files_proto3.proto \
|
||||
java/lite.md \
|
||||
java/lite/BUILD \
|
||||
java/lite/generate-sources-build.xml \
|
||||
@ -575,7 +589,6 @@ java_EXTRA_DIST=
|
||||
java/lite/pom_template.xml \
|
||||
java/lite/process-lite-sources-build.xml \
|
||||
java/lite/src/test/java/com/google/protobuf/LiteTest.java \
|
||||
java/lite/src/test/java/com/google/protobuf/Proto2MessageLiteInfoFactory.java \
|
||||
java/BUILD \
|
||||
java/pom.xml \
|
||||
java/util/BUILD \
|
||||
@ -588,10 +601,12 @@ java_EXTRA_DIST=
|
||||
java/util/src/main/java/com/google/protobuf/util/Structs.java \
|
||||
java/util/src/main/java/com/google/protobuf/util/Timestamps.java \
|
||||
java/util/src/main/java/com/google/protobuf/util/Values.java \
|
||||
java/util/src/test/java/com/google/protobuf/util/DurationsTest.java \
|
||||
java/util/src/test/java/com/google/protobuf/util/FieldMaskTreeTest.java \
|
||||
java/util/src/test/java/com/google/protobuf/util/FieldMaskUtilTest.java \
|
||||
java/util/src/test/java/com/google/protobuf/util/JsonFormatTest.java \
|
||||
java/util/src/test/java/com/google/protobuf/util/StructsTest.java \
|
||||
java/util/src/test/java/com/google/protobuf/util/TimestampsTest.java \
|
||||
java/util/src/test/java/com/google/protobuf/util/ValuesTest.java \
|
||||
java/util/src/test/proto/com/google/protobuf/util/json_test.proto
|
||||
|
||||
@ -1372,63 +1387,68 @@ js_EXTRA_DIST= \
|
||||
|
||||
all_EXTRA_DIST=$(csharp_EXTRA_DIST) $(java_EXTRA_DIST) $(objectivec_EXTRA_DIST) $(php_EXTRA_DIST) $(python_EXTRA_DIST) $(ruby_EXTRA_DIST) $(js_EXTRA_DIST)
|
||||
|
||||
EXTRA_DIST = $(@DIST_LANG@_EXTRA_DIST) \
|
||||
autogen.sh \
|
||||
generate_descriptor_proto.sh \
|
||||
README.md \
|
||||
LICENSE \
|
||||
CONTRIBUTORS.txt \
|
||||
CHANGES.txt \
|
||||
update_file_lists.sh \
|
||||
BUILD \
|
||||
WORKSPACE \
|
||||
cmake/CMakeLists.txt \
|
||||
cmake/README.md \
|
||||
cmake/conformance.cmake \
|
||||
cmake/examples.cmake \
|
||||
cmake/extract_includes.bat.in \
|
||||
cmake/install.cmake \
|
||||
cmake/libprotobuf.cmake \
|
||||
cmake/libprotobuf-lite.cmake \
|
||||
cmake/libprotoc.cmake \
|
||||
cmake/protobuf-config-version.cmake.in \
|
||||
cmake/protobuf-config.cmake.in \
|
||||
cmake/protobuf-lite.pc.cmake \
|
||||
cmake/protobuf-module.cmake.in \
|
||||
cmake/protobuf-options.cmake \
|
||||
cmake/protobuf.pc.cmake \
|
||||
cmake/protoc.cmake \
|
||||
cmake/tests.cmake \
|
||||
cmake/version.rc.in \
|
||||
compiler_config_setting.bzl \
|
||||
build_files_updated_unittest.sh \
|
||||
cc_proto_blacklist_test.bzl \
|
||||
editors/README.txt \
|
||||
editors/proto.vim \
|
||||
editors/protobuf-mode.el \
|
||||
examples/AddPerson.java \
|
||||
examples/BUILD \
|
||||
examples/CMakeLists.txt \
|
||||
examples/ListPeople.java \
|
||||
examples/Makefile \
|
||||
examples/README.md \
|
||||
examples/WORKSPACE \
|
||||
examples/add_person.cc \
|
||||
examples/add_person.dart \
|
||||
examples/add_person.go \
|
||||
examples/add_person.py \
|
||||
examples/add_person_test.go \
|
||||
examples/addressbook.proto \
|
||||
examples/list_people.cc \
|
||||
examples/list_people.dart \
|
||||
examples/list_people.go \
|
||||
examples/list_people.py \
|
||||
examples/list_people_test.go \
|
||||
examples/pubspec.yaml \
|
||||
protobuf.bzl \
|
||||
protobuf_deps.bzl \
|
||||
third_party/zlib.BUILD \
|
||||
util/python/BUILD \
|
||||
EXTRA_DIST = $(@DIST_LANG@_EXTRA_DIST) \
|
||||
autogen.sh \
|
||||
generate_descriptor_proto.sh \
|
||||
README.md \
|
||||
LICENSE \
|
||||
CONTRIBUTORS.txt \
|
||||
CHANGES.txt \
|
||||
update_file_lists.sh \
|
||||
BUILD \
|
||||
WORKSPACE \
|
||||
cmake/CMakeLists.txt \
|
||||
cmake/README.md \
|
||||
cmake/conformance.cmake \
|
||||
cmake/examples.cmake \
|
||||
cmake/extract_includes.bat.in \
|
||||
cmake/install.cmake \
|
||||
cmake/libprotobuf.cmake \
|
||||
cmake/libprotobuf-lite.cmake \
|
||||
cmake/libprotoc.cmake \
|
||||
cmake/protobuf-config-version.cmake.in \
|
||||
cmake/protobuf-config.cmake.in \
|
||||
cmake/protobuf-lite.pc.cmake \
|
||||
cmake/protobuf-module.cmake.in \
|
||||
cmake/protobuf-options.cmake \
|
||||
cmake/protobuf.pc.cmake \
|
||||
cmake/protoc.cmake \
|
||||
cmake/tests.cmake \
|
||||
cmake/version.rc.in \
|
||||
compiler_config_setting.bzl \
|
||||
build_files_updated_unittest.sh \
|
||||
cc_proto_blacklist_test.bzl \
|
||||
editors/README.txt \
|
||||
editors/proto.vim \
|
||||
editors/protobuf-mode.el \
|
||||
examples/AddPerson.java \
|
||||
examples/BUILD \
|
||||
examples/CMakeLists.txt \
|
||||
examples/ListPeople.java \
|
||||
examples/Makefile \
|
||||
examples/README.md \
|
||||
examples/WORKSPACE \
|
||||
examples/add_person.cc \
|
||||
examples/add_person.dart \
|
||||
examples/add_person.py \
|
||||
examples/addressbook.proto \
|
||||
examples/go/cmd/add_person/add_person.go \
|
||||
examples/go/cmd/add_person/add_person_test.go \
|
||||
examples/go/cmd/list_people/list_people.go \
|
||||
examples/go/cmd/list_people/list_people_test.go \
|
||||
examples/go/go.sum \
|
||||
examples/go/go.mod \
|
||||
examples/list_people.cc \
|
||||
examples/list_people.dart \
|
||||
examples/list_people.py \
|
||||
examples/pubspec.yaml \
|
||||
maven_install.json \
|
||||
protobuf.bzl \
|
||||
protobuf_deps.bzl \
|
||||
protobuf_release.bzl \
|
||||
protobuf_version.bzl \
|
||||
third_party/zlib.BUILD \
|
||||
util/python/BUILD \
|
||||
internal.bzl
|
||||
|
||||
|
||||
|
@ -1,9 +1,9 @@
|
||||
Pod::Spec.new do |s|
|
||||
s.name = 'Protobuf-C++'
|
||||
s.version = '3.18.1'
|
||||
s.version = '3.19.4'
|
||||
s.summary = 'Protocol Buffers v3 runtime library for C++.'
|
||||
s.homepage = 'https://github.com/google/protobuf'
|
||||
s.license = '3-Clause BSD License'
|
||||
s.license = 'BSD-3-Clause'
|
||||
s.authors = { 'The Protocol Buffers contributors' => 'protobuf@googlegroups.com' }
|
||||
s.cocoapods_version = '>= 1.0'
|
||||
|
||||
|
@ -165,7 +165,7 @@ python_add_init: protoc_middleman protoc_middleman2
|
||||
done \
|
||||
done
|
||||
|
||||
python_cpp_pkg_flags = `pkg-config --cflags --libs python`
|
||||
python_cpp_pkg_flags = `pkg-config --cflags --libs python3`
|
||||
|
||||
lib_LTLIBRARIES = libbenchmark_messages.la
|
||||
libbenchmark_messages_la_SOURCES = python/python_benchmark_messages.cc
|
||||
@ -186,7 +186,7 @@ python-pure-python-benchmark: python_add_init
|
||||
@echo export DYLD_LIBRARY_PATH=$(top_srcdir)/src/.libs >> python-pure-python-benchmark
|
||||
@echo export PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=\'python\' >> python-pure-python-benchmark
|
||||
@echo cp $(srcdir)/python/py_benchmark.py tmp >> python-pure-python-benchmark
|
||||
@echo python tmp/py_benchmark.py '$$@' >> python-pure-python-benchmark
|
||||
@echo python3 tmp/py_benchmark.py '$$@' >> python-pure-python-benchmark
|
||||
@chmod +x python-pure-python-benchmark
|
||||
|
||||
python-cpp-reflection-benchmark: python_add_init
|
||||
@ -196,7 +196,7 @@ python-cpp-reflection-benchmark: python_add_init
|
||||
@echo export DYLD_LIBRARY_PATH=$(top_srcdir)/src/.libs >> python-cpp-reflection-benchmark
|
||||
@echo export PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=\'cpp\' >> python-cpp-reflection-benchmark
|
||||
@echo cp $(srcdir)/python/py_benchmark.py tmp >> python-cpp-reflection-benchmark
|
||||
@echo python tmp/py_benchmark.py '$$@' >> python-cpp-reflection-benchmark
|
||||
@echo python3 tmp/py_benchmark.py '$$@' >> python-cpp-reflection-benchmark
|
||||
@chmod +x python-cpp-reflection-benchmark
|
||||
|
||||
python-cpp-generated-code-benchmark: python_add_init libbenchmark_messages.la
|
||||
@ -206,7 +206,7 @@ python-cpp-generated-code-benchmark: python_add_init libbenchmark_messages.la
|
||||
@echo export DYLD_LIBRARY_PATH=$(top_srcdir)/src/.libs >> python-cpp-generated-code-benchmark
|
||||
@echo export PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=\'cpp\' >> python-cpp-generated-code-benchmark
|
||||
@echo cp $(srcdir)/python/py_benchmark.py tmp >> python-cpp-generated-code-benchmark
|
||||
@echo python tmp/py_benchmark.py --cpp_generated '$$@' >> python-cpp-generated-code-benchmark
|
||||
@echo python3 tmp/py_benchmark.py --cpp_generated '$$@' >> python-cpp-generated-code-benchmark
|
||||
@chmod +x python-cpp-generated-code-benchmark
|
||||
|
||||
python-pure-python: python-pure-python-benchmark
|
||||
|
@ -87,7 +87,17 @@ To run all the benchmark dataset:
|
||||
|
||||
### Java:
|
||||
|
||||
First build the Java binary in the usual way with Maven:
|
||||
|
||||
```
|
||||
$ cd java
|
||||
$ mvn install
|
||||
```
|
||||
|
||||
Assuming that completes successfully,
|
||||
|
||||
```
|
||||
$ cd ../benchmarks
|
||||
$ make java
|
||||
```
|
||||
|
||||
|
@ -1,38 +1,27 @@
|
||||
|
||||
package com.google.protobuf;
|
||||
|
||||
import com.google.caliper.BeforeExperiment;
|
||||
import com.google.caliper.AfterExperiment;
|
||||
import com.google.caliper.Benchmark;
|
||||
import com.google.caliper.Param;
|
||||
import com.google.caliper.api.VmOptions;
|
||||
import com.google.protobuf.ByteString;
|
||||
import com.google.protobuf.CodedOutputStream;
|
||||
import com.google.protobuf.ExtensionRegistry;
|
||||
import com.google.protobuf.Message;
|
||||
import com.google.protobuf.benchmarks.Benchmarks.BenchmarkDataset;
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.io.RandomAccessFile;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
// Caliper set CICompilerCount to 1 for making sure compilation doesn't run in parallel with itself,
|
||||
// This makes TieredCompilation not working. We just disable TieredCompilation by default. In master
|
||||
// branch this has been disabled by default in caliper:
|
||||
// https://github.com/google/caliper/blob/master/caliper-runner/src/main/java/com/google/caliper/runner/target/Jvm.java#L38:14
|
||||
// But this haven't been added into most recent release.
|
||||
@VmOptions("-XX:-TieredCompilation")
|
||||
/**
|
||||
* Basic benchmarks for Java protobuf parsing.
|
||||
*/
|
||||
public class ProtoCaliperBenchmark {
|
||||
public enum BenchmarkMessageType {
|
||||
GOOGLE_MESSAGE1_PROTO3 {
|
||||
@Override ExtensionRegistry getExtensionRegistry() { return ExtensionRegistry.newInstance(); }
|
||||
@Override
|
||||
ExtensionRegistry getExtensionRegistry() {
|
||||
return ExtensionRegistry.newInstance();
|
||||
}
|
||||
@Override
|
||||
Message getDefaultInstance() {
|
||||
return com.google.protobuf.benchmarks.BenchmarkMessage1Proto3.GoogleMessage1
|
||||
@ -40,7 +29,9 @@ public class ProtoCaliperBenchmark {
|
||||
}
|
||||
},
|
||||
GOOGLE_MESSAGE1_PROTO2 {
|
||||
@Override ExtensionRegistry getExtensionRegistry() { return ExtensionRegistry.newInstance(); }
|
||||
@Override ExtensionRegistry getExtensionRegistry() {
|
||||
return ExtensionRegistry.newInstance();
|
||||
}
|
||||
@Override
|
||||
Message getDefaultInstance() {
|
||||
return com.google.protobuf.benchmarks.BenchmarkMessage1Proto2.GoogleMessage1
|
||||
@ -48,7 +39,10 @@ public class ProtoCaliperBenchmark {
|
||||
}
|
||||
},
|
||||
GOOGLE_MESSAGE2 {
|
||||
@Override ExtensionRegistry getExtensionRegistry() { return ExtensionRegistry.newInstance(); }
|
||||
@Override
|
||||
ExtensionRegistry getExtensionRegistry() {
|
||||
return ExtensionRegistry.newInstance();
|
||||
}
|
||||
@Override
|
||||
Message getDefaultInstance() {
|
||||
return com.google.protobuf.benchmarks.BenchmarkMessage2.GoogleMessage2.getDefaultInstance();
|
||||
|
@ -7,13 +7,18 @@
|
||||
#include "datasets/google_message3/benchmark_message3.pb.h"
|
||||
#include "datasets/google_message4/benchmark_message4.pb.h"
|
||||
|
||||
static PyMethodDef python_benchmark_methods[] = {
|
||||
{NULL, NULL, 0, NULL} /* Sentinel */
|
||||
};
|
||||
static struct PyModuleDef _module = {PyModuleDef_HEAD_INIT,
|
||||
"libbenchmark_messages",
|
||||
"Benchmark messages Python module",
|
||||
-1,
|
||||
NULL,
|
||||
NULL,
|
||||
NULL,
|
||||
NULL,
|
||||
NULL};
|
||||
|
||||
|
||||
PyMODINIT_FUNC
|
||||
initlibbenchmark_messages() {
|
||||
extern "C" {
|
||||
PyMODINIT_FUNC PyInit_libbenchmark_messages() {
|
||||
benchmarks::BenchmarkDataset().descriptor();
|
||||
benchmarks::proto3::GoogleMessage1().descriptor();
|
||||
benchmarks::proto2::GoogleMessage1().descriptor();
|
||||
@ -21,9 +26,6 @@ initlibbenchmark_messages() {
|
||||
benchmarks::google_message3::GoogleMessage3().descriptor();
|
||||
benchmarks::google_message4::GoogleMessage4().descriptor();
|
||||
|
||||
PyObject *m;
|
||||
|
||||
m = Py_InitModule("libbenchmark_messages", python_benchmark_methods);
|
||||
if (m == NULL)
|
||||
return;
|
||||
return PyModule_Create(&_module);
|
||||
}
|
||||
}
|
||||
|
@ -61,7 +61,7 @@ def __parse_cpp_result(filename):
|
||||
return
|
||||
if filename[0] != '/':
|
||||
filename = os.path.dirname(os.path.abspath(__file__)) + '/' + filename
|
||||
with open(filename, "rb") as f:
|
||||
with open(filename, encoding="utf-8") as f:
|
||||
results = json.loads(f.read())
|
||||
for benchmark in results["benchmarks"]:
|
||||
data_filename = "".join(
|
||||
@ -96,7 +96,7 @@ def __parse_synthetic_result(filename):
|
||||
return
|
||||
if filename[0] != "/":
|
||||
filename = os.path.dirname(os.path.abspath(__file__)) + "/" + filename
|
||||
with open(filename, "rb") as f:
|
||||
with open(filename, encoding="utf-8") as f:
|
||||
results = json.loads(f.read())
|
||||
for benchmark in results["benchmarks"]:
|
||||
__results.append({
|
||||
@ -126,7 +126,7 @@ def __parse_python_result(filename):
|
||||
return
|
||||
if filename[0] != '/':
|
||||
filename = os.path.dirname(os.path.abspath(__file__)) + '/' + filename
|
||||
with open(filename, "rb") as f:
|
||||
with open(filename, encoding="utf-8") as f:
|
||||
results_list = json.loads(f.read())
|
||||
for results in results_list:
|
||||
for result in results:
|
||||
@ -176,7 +176,7 @@ def __parse_java_result(filename):
|
||||
return
|
||||
if filename[0] != '/':
|
||||
filename = os.path.dirname(os.path.abspath(__file__)) + '/' + filename
|
||||
with open(filename, "rb") as f:
|
||||
with open(filename, encoding="utf-8") as f:
|
||||
results = json.loads(f.read())
|
||||
for result in results:
|
||||
total_weight = 0
|
||||
@ -212,7 +212,7 @@ def __parse_go_result(filename):
|
||||
return
|
||||
if filename[0] != '/':
|
||||
filename = os.path.dirname(os.path.abspath(__file__)) + '/' + filename
|
||||
with open(filename, "rb") as f:
|
||||
with open(filename, encoding="utf-8") as f:
|
||||
for line in f:
|
||||
result_list = re.split(r"[\ \t]+", line)
|
||||
if result_list[0][:9] != "Benchmark":
|
||||
@ -252,7 +252,7 @@ def __parse_custom_result(filename, language):
|
||||
return
|
||||
if filename[0] != '/':
|
||||
filename = os.path.dirname(os.path.abspath(__file__)) + '/' + filename
|
||||
with open(filename, "rb") as f:
|
||||
with open(filename, encoding="utf-8") as f:
|
||||
results = json.loads(f.read())
|
||||
for result in results:
|
||||
_, avg_size = __get_data_size(result["filename"])
|
||||
|
@ -41,6 +41,7 @@ if (CMAKE_CXX_COMPILER_ID MATCHES Intel)
|
||||
endif()
|
||||
|
||||
# Options
|
||||
option(protobuf_INSTALL "Install protobuf binaries and files" ON)
|
||||
if(WITH_PROTOC)
|
||||
set(protobuf_PROTOC_EXE ${WITH_PROTOC} CACHE FILEPATH "Protocol Buffer Compiler executable" FORCE)
|
||||
endif()
|
||||
@ -182,7 +183,11 @@ else (protobuf_BUILD_SHARED_LIBS)
|
||||
# making programmatic control difficult. Prefer the functionality in newer
|
||||
# CMake versions when available.
|
||||
if(CMAKE_VERSION VERSION_GREATER 3.15 OR CMAKE_VERSION VERSION_EQUAL 3.15)
|
||||
set(CMAKE_MSVC_RUNTIME_LIBRARY MultiThreaded$<$<CONFIG:Debug>:Debug>)
|
||||
if (protobuf_MSVC_STATIC_RUNTIME)
|
||||
set(CMAKE_MSVC_RUNTIME_LIBRARY MultiThreaded$<$<CONFIG:Debug>:Debug>)
|
||||
else()
|
||||
set(CMAKE_MSVC_RUNTIME_LIBRARY MultiThreaded$<$<CONFIG:Debug>:Debug>DLL)
|
||||
endif()
|
||||
else()
|
||||
# In case we are building static libraries, link also the runtime library statically
|
||||
# so that MSVCR*.DLL is not required at runtime.
|
||||
@ -210,9 +215,7 @@ if (MSVC)
|
||||
add_definitions(/utf-8)
|
||||
# MSVC warning suppressions
|
||||
add_definitions(
|
||||
/wd4018 # 'expression' : signed/unsigned mismatch
|
||||
/wd4065 # switch statement contains 'default' but no 'case' labels
|
||||
/wd4146 # unary minus operator applied to unsigned type, result still unsigned
|
||||
/wd4244 # 'conversion' conversion from 'type1' to 'type2', possible loss of data
|
||||
/wd4251 # 'identifier' : class 'type' needs to have dll-interface to be used by clients of class 'type2'
|
||||
/wd4267 # 'var' : conversion from 'size_t' to 'type', possible loss of data
|
||||
@ -294,6 +297,7 @@ if (protobuf_BUILD_TESTS OR protobuf_BUILD_CONFORMANCE OR protobuf_BUILD_EXAMPLE
|
||||
endif ()
|
||||
|
||||
if (protobuf_BUILD_TESTS)
|
||||
enable_testing()
|
||||
include(tests.cmake)
|
||||
endif (protobuf_BUILD_TESTS)
|
||||
|
||||
@ -301,7 +305,9 @@ if (protobuf_BUILD_CONFORMANCE)
|
||||
include(conformance.cmake)
|
||||
endif (protobuf_BUILD_CONFORMANCE)
|
||||
|
||||
include(install.cmake)
|
||||
if (protobuf_INSTALL)
|
||||
include(install.cmake)
|
||||
endif (protobuf_INSTALL)
|
||||
|
||||
if (protobuf_BUILD_EXAMPLES)
|
||||
include(examples.cmake)
|
||||
|
180
cmake/README.md
180
cmake/README.md
@ -36,6 +36,10 @@ If *git* command is not available from *Command Prompt*, add it to system *PATH*
|
||||
|
||||
C:\Path\to>set PATH=%PATH%;C:\Program Files\Git\cmd
|
||||
|
||||
Optionally, you will want to download [ninja](https://ninja-build.org/) and add it to your *PATH* variable.
|
||||
|
||||
C:\Path\to>set PATH=%PATH%;C:\tools\ninja
|
||||
|
||||
Good. Now you are ready to continue.
|
||||
|
||||
Getting Sources
|
||||
@ -52,29 +56,25 @@ download `protobuf-all-[VERSION].tar.gz`.
|
||||
|
||||
Or you can use git to clone from protobuf git repository.
|
||||
|
||||
C:\Path\to> git clone -b [release_tag] https://github.com/protocolbuffers/protobuf.git
|
||||
C:\Path\to> mkdir src & cd src
|
||||
C:\Path\to\src> git clone -b [release_tag] https://github.com/protocolbuffers/protobuf.git
|
||||
|
||||
Where *[release_tag]* is a git tag like *v3.0.0-beta-1* or a branch name like *master*
|
||||
if you want to get the latest code.
|
||||
|
||||
Go to the project folder:
|
||||
|
||||
C:\Path\to>cd protobuf
|
||||
C:\Path\to\protobuf>
|
||||
C:\Path\to\src> cd protobuf
|
||||
C:\Path\to\src\protobuf>
|
||||
|
||||
Remember to update any submodules if you are using git clone (you can skip this
|
||||
step if you are using a release .tar.gz or .zip package):
|
||||
|
||||
```console
|
||||
C:\Path\to> git submodule update --init --recursive
|
||||
C:\Path\to\src\protobuf> git submodule update --init --recursive
|
||||
```
|
||||
|
||||
Now go to *cmake* folder in protobuf sources:
|
||||
|
||||
C:\Path\to\protobuf>cd cmake
|
||||
C:\Path\to\protobuf\cmake>
|
||||
|
||||
Good. Now you are ready to *CMake* configuration.
|
||||
Good. Now you are ready for *CMake* configuration.
|
||||
|
||||
CMake Configuration
|
||||
===================
|
||||
@ -82,71 +82,119 @@ CMake Configuration
|
||||
*CMake* supports a lot of different
|
||||
[generators](http://www.cmake.org/cmake/help/latest/manual/cmake-generators.7.html)
|
||||
for various native build systems.
|
||||
We are only interested in
|
||||
[Makefile](http://www.cmake.org/cmake/help/latest/manual/cmake-generators.7.html#makefile-generators)
|
||||
and
|
||||
[Visual Studio](http://www.cmake.org/cmake/help/latest/manual/cmake-generators.7.html#visual-studio-generators)
|
||||
generators.
|
||||
|
||||
We will use shadow building to separate the temporary files from the protobuf source code.
|
||||
Of most interest to Windows programmers are the following:
|
||||
|
||||
* [Makefile](http://www.cmake.org/cmake/help/latest/manual/cmake-generators.7.html#makefile-generators).
|
||||
This generates NMake Makefiles for Visual Studio. These work, but they are rather slow.
|
||||
|
||||
* [Visual Studio](http://www.cmake.org/cmake/help/latest/manual/cmake-generators.7.html#visual-studio-generators)
|
||||
This generates a Visual Studio solution for the project.
|
||||
|
||||
* [Ninja](https://cmake.org/cmake/help/latest/manual/cmake-generators.7.html#ninja-generator)
|
||||
This uses the external tool [Ninja](https://ninja-build.org/) to build. It is the fastest solution available.
|
||||
|
||||
Note that as of Visual Studio 2015, Visual Studio includes
|
||||
[support for opening directly CMake-based projects](https://docs.microsoft.com/en-us/cpp/build/cmake-projects-in-visual-studio).
|
||||
|
||||
It is considered good practice not to build CMake projects in the source tree but in a separate folder.
|
||||
|
||||
Create a temporary *build* folder and change your working directory to it:
|
||||
|
||||
C:\Path\to\protobuf\cmake>mkdir build & cd build
|
||||
C:\Path\to\protobuf\cmake\build>
|
||||
mkdir C:\Path\to\build\protobuf
|
||||
cd C:\Path\to\build\protobuf
|
||||
C:\Path\to\build\protobuf>
|
||||
|
||||
The *Makefile* generator can build the project in only one configuration, so you need to build
|
||||
The *Makefile* and *Ninja* generators can build the project in only one configuration, so you need to build
|
||||
a separate folder for each configuration.
|
||||
|
||||
To start using a *Release* configuration:
|
||||
To start using a *Release* configuration via the *NMmake* generator:
|
||||
|
||||
C:\Path\to\protobuf\cmake\build>mkdir release & cd release
|
||||
C:\Path\to\protobuf\cmake\build\release>cmake -G "NMake Makefiles" ^
|
||||
C:\Path\to\build\protobuf>mkdir release & cd release
|
||||
C:\Path\to\build\protobuf\release>cmake -G "NMake Makefiles" ^
|
||||
-DCMAKE_BUILD_TYPE=Release ^
|
||||
-DCMAKE_INSTALL_PREFIX=../../../../install ^
|
||||
../..
|
||||
-DCMAKE_INSTALL_PREFIX=C:\Path\to\install ^
|
||||
C:\Path\to\src\protobuf
|
||||
|
||||
It will generate *nmake* *Makefile* in current directory.
|
||||
It will generate a *NMake* *Makefile* in the current directory.
|
||||
|
||||
To use *Debug* configuration:
|
||||
To use *Debug* configuration using *Ninja*:
|
||||
|
||||
C:\Path\to\protobuf\cmake\build>mkdir debug & cd debug
|
||||
C:\Path\to\protobuf\cmake\build\debug>cmake -G "NMake Makefiles" ^
|
||||
C:\Path\to\build\protobuf>mkdir debug & cd debug
|
||||
C:\Path\to\build\protobuf\debug>cmake -G "Ninja" ^
|
||||
-DCMAKE_BUILD_TYPE=Debug ^
|
||||
-DCMAKE_INSTALL_PREFIX=../../../../install ^
|
||||
../..
|
||||
-DCMAKE_INSTALL_PREFIX=C:\Path\to\install ^
|
||||
C:\Path\to\src\protobuf
|
||||
|
||||
It will generate *nmake* *Makefile* in current directory.
|
||||
It will generate *Ninja* build scripts in current directory.
|
||||
|
||||
To create *Visual Studio* solution file:
|
||||
The *Visual Studio* generator is multi-configuration: it will generate a single *.sln* file that can be used for both *Debug* and *Release*:
|
||||
|
||||
C:\Path\to\protobuf\cmake\build>mkdir solution & cd solution
|
||||
C:\Path\to\protobuf\cmake\build\solution>cmake -G "Visual Studio 16 2019" ^
|
||||
-DCMAKE_INSTALL_PREFIX=../../../../install ^
|
||||
../..
|
||||
C:\Path\to\build\protobuf>mkdir solution & cd solution
|
||||
C:\Path\to\build\protobuf\solution>cmake -G "Visual Studio 16 2019" ^
|
||||
-DCMAKE_INSTALL_PREFIX=C:\Path\to\install ^
|
||||
C:\Path\to\src\protobuf
|
||||
|
||||
It will generate *Visual Studio* solution file *protobuf.sln* in current directory.
|
||||
|
||||
If the *gmock* directory does not exist, and you do not want to build protobuf unit tests,
|
||||
you need to add *cmake* command argument `-Dprotobuf_BUILD_TESTS=OFF` to disable testing.
|
||||
Unit Tests
|
||||
----------
|
||||
|
||||
To make a *Visual Studio* file for Visual Studio 16 2019, create the *Visual Studio*
|
||||
solution file above and edit the CMakeCache file.
|
||||
Unit tests are being built along with the rest of protobuf. The unit tests require Google Mock (now a part of Google Test).
|
||||
|
||||
C:Path\to\protobuf\cmake\build\solution\CMakeCache
|
||||
A copy of [Google Test](https://github.com/google/googletest) is included as a Git submodule in the `third-party/googletest` folder.
|
||||
(You do need to initialize the Git submodules as explained above.)
|
||||
|
||||
Then create the *Visual Studio* solution file again
|
||||
Alternately, you may want to use protobuf in a larger set-up, you may want to use that standard CMake approach where
|
||||
you build and install a shared copy of Google Test.
|
||||
|
||||
After you've built and installed your Google Test copy, you need add the following definition to your *cmake* command line
|
||||
during the configuration step: `-Dprotobuf_USE_EXTERNAL_GTEST=ON`.
|
||||
This will cause the standard CMake `find_package(GTest REQUIRED)` to be used.
|
||||
|
||||
[find_package](https://cmake.org/cmake/help/latest/command/find_package.html) will search in a default location,
|
||||
which on Windows is *C:\Program Files*. This is most likely not what you want. You will want instead to search for
|
||||
Google Test in your project's root directory (i.e. the same directory you've passed to `CMAKE_INSTALL_PREFIX` when
|
||||
building Google Test). For this, you need to set the `CMAKE_PREFIX_PATH` CMake variable. (There are other ways in CMake,
|
||||
see the [manual](https://cmake.org/cmake/help/latest/command/find_package.html) for details.)
|
||||
|
||||
For example:
|
||||
|
||||
C:\Path\to\build\protobuf>mkdir solution & cd solution
|
||||
C:\Path\to\build\protobuf\solution>cmake -G "Visual Studio 16 2019" ^
|
||||
-DCMAKE_INSTALL_PREFIX=C:\Path\to\install ^
|
||||
-DCMAKE_PREFIX_PATH=C:\Path\to\my_big_project ^
|
||||
-Dprotobuf_USE_EXTERNAL_GTEST=ON ^
|
||||
C:\Path\to\src\protobuf
|
||||
|
||||
In most cases, `CMAKE_PREFIX_PATH` and `CMAKE_INSTALL_PREFIX` will point to the same directory.
|
||||
|
||||
To disable testing completely, you need to add the following argument to you *cmake* command line: `-Dprotobuf_BUILD_TESTS=OFF`.
|
||||
|
||||
For example:
|
||||
|
||||
C:\Path\to\build\protobuf\solution>cmake -G "Visual Studio 16 2019" ^
|
||||
-DCMAKE_INSTALL_PREFIX=C:\Path\to\install ^
|
||||
-Dprotobuf_BUILD_TESTS=OFF ^
|
||||
C:\Path\to\src\protobuf
|
||||
|
||||
Compiling
|
||||
=========
|
||||
|
||||
To compile protobuf:
|
||||
The standard way to compile a *CMake* project is `cmake --build <directory>`.
|
||||
|
||||
C:\Path\to\protobuf\cmake\build\release>nmake
|
||||
|
||||
Note that if your generator supports multiple configurations, you will probably want to specify which one to build:
|
||||
|
||||
cmake --build C:\Path\to\build\protobuf\solution --config Release
|
||||
|
||||
You can also run directly the build tool you've configured:
|
||||
|
||||
C:\Path\to\build\protobuf\release>nmake
|
||||
|
||||
or
|
||||
|
||||
C:\Path\to\protobuf\cmake\build\debug>nmake
|
||||
C:\Path\to\build\protobuf\debug>ninja
|
||||
|
||||
And wait for the compilation to finish.
|
||||
|
||||
@ -164,11 +212,15 @@ Testing
|
||||
To run unit-tests, first you must compile protobuf as described above.
|
||||
Then run:
|
||||
|
||||
C:\Path\to\protobuf\cmake\build\release>nmake check
|
||||
C:\Path\to\protobuf\cmake\build\release>ctest --progress --output-on-failure
|
||||
|
||||
You can also build the `check` target (not idiomatic CMake usage, though):
|
||||
|
||||
C:\Path\to\protobuf\cmake\build\release>cmake --build . --target check
|
||||
|
||||
or
|
||||
|
||||
C:\Path\to\protobuf\cmake\build\debug>nmake check
|
||||
C:\Path\to\build\protobuf\release>ninja check
|
||||
|
||||
You can also build project *check* from Visual Studio solution.
|
||||
Yes, it may sound strange, but it works.
|
||||
@ -183,9 +235,9 @@ You should see output similar to:
|
||||
[==========] 1546 tests from 165 test cases ran. (2529 ms total)
|
||||
[ PASSED ] 1546 tests.
|
||||
|
||||
To run specific tests:
|
||||
To run specific tests, you need to pass some command line arguments to the test program itself:
|
||||
|
||||
C:\Path\to\protobuf>cmake\build\release\tests.exe --gtest_filter=AnyTest*
|
||||
C:\Path\to\build\protobuf\release>tests.exe --gtest_filter=AnyTest*
|
||||
Running main() from gmock_main.cc
|
||||
Note: Google Test filter = AnyTest*
|
||||
[==========] Running 3 tests from 1 test case.
|
||||
@ -210,13 +262,17 @@ If all tests are passed, safely continue.
|
||||
Installing
|
||||
==========
|
||||
|
||||
To install protobuf to the specified *install* folder:
|
||||
To install protobuf to the *install* folder you've specified in the configuration step, you need to build the `install` target:
|
||||
|
||||
C:\Path\to\protobuf\cmake\build\release>nmake install
|
||||
cmake --build C:\Path\to\build\protobuf\solution --config Release --target install
|
||||
|
||||
Or if you prefer:
|
||||
|
||||
C:\Path\to\build\protobuf\release>nmake install
|
||||
|
||||
or
|
||||
|
||||
C:\Path\to\protobuf\cmake\build\debug>nmake install
|
||||
C:\Path\to\build\protobuf\debug>ninja install
|
||||
|
||||
You can also build project *INSTALL* from Visual Studio solution.
|
||||
It sounds not so strange and it works.
|
||||
@ -280,16 +336,16 @@ You can also compile it from source by yourself.
|
||||
|
||||
Getting sources:
|
||||
|
||||
C:\Path\to>git clone -b v1.2.8 https://github.com/madler/zlib.git
|
||||
C:\Path\to>cd zlib
|
||||
C:\Path\to\src>git clone -b v1.2.8 https://github.com/madler/zlib.git
|
||||
C:\Path\to\src>cd zlib
|
||||
|
||||
Compiling and Installing:
|
||||
|
||||
C:\Path\to\zlib>mkdir build & cd build
|
||||
C:\Path\to\zlib\build>mkdir release & cd release
|
||||
C:\Path\to\zlib\build\release>cmake -G "NMake Makefiles" -DCMAKE_BUILD_TYPE=Release ^
|
||||
-DCMAKE_INSTALL_PREFIX=../../../install ../..
|
||||
C:\Path\to\zlib\build\release>nmake & nmake install
|
||||
C:\Path\to\src\zlib>mkdir C:\Path\to\build\zlib & cd C:\Path\to\build\zlib
|
||||
C:\Path\to\build\zlib>mkdir release & cd release
|
||||
C:\Path\to\build\zlib\release>cmake -G "Ninja" -DCMAKE_BUILD_TYPE=Release ^
|
||||
-DCMAKE_INSTALL_PREFIX=C:\Path\to\install C:\Path\to\src\zlib
|
||||
C:\Path\to\src\zlib\build\release>cmake --build . --target install
|
||||
|
||||
You can make *debug* version or use *Visual Studio* generator also as before for the
|
||||
protobuf project.
|
||||
@ -308,8 +364,8 @@ the headers or the .lib file in the right directory.
|
||||
|
||||
If you already have ZLIB library and headers at some other location on your system then alternatively you can define following configuration flags to locate them:
|
||||
|
||||
-DZLIB_INCLUDE_DIR=<path to dir containing zlib headers>
|
||||
-DZLIB_LIB=<path to dir containing zlib>
|
||||
-DZLIB_INCLUDE_DIR=<path to dir containing zlib headers>
|
||||
-DZLIB_LIB=<path to dir containing zlib>
|
||||
|
||||
Build and testing protobuf as usual.
|
||||
|
||||
@ -320,8 +376,6 @@ The following warnings have been disabled while building the protobuf libraries
|
||||
and compiler. You may have to disable some of them in your own project as
|
||||
well, or live with them.
|
||||
|
||||
* C4018 - 'expression' : signed/unsigned mismatch
|
||||
* C4146 - unary minus operator applied to unsigned type, result still unsigned
|
||||
* C4244 - Conversion from 'type1' to 'type2', possible loss of data.
|
||||
* C4251 - 'identifier' : class 'type' needs to have dll-interface to be used by
|
||||
clients of class 'type2'
|
||||
|
@ -59,11 +59,8 @@ copy "${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\generated_enum_reflec
|
||||
copy "${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\generated_enum_util.h" include\google\protobuf\generated_enum_util.h
|
||||
copy "${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\generated_message_bases.h" include\google\protobuf\generated_message_bases.h
|
||||
copy "${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\generated_message_reflection.h" include\google\protobuf\generated_message_reflection.h
|
||||
copy "${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\generated_message_table_driven.h" include\google\protobuf\generated_message_table_driven.h
|
||||
copy "${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\generated_message_table_driven_lite.h" include\google\protobuf\generated_message_table_driven_lite.h
|
||||
copy "${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\generated_message_tctable_decl.h" include\google\protobuf\generated_message_tctable_decl.h
|
||||
copy "${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\generated_message_tctable_impl.h" include\google\protobuf\generated_message_tctable_impl.h
|
||||
copy "${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\generated_message_tctable_impl.inc" include\google\protobuf\generated_message_tctable_impl.inc
|
||||
copy "${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\generated_message_util.h" include\google\protobuf\generated_message_util.h
|
||||
copy "${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\has_bits.h" include\google\protobuf\has_bits.h
|
||||
copy "${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\implicit_weak_message.h" include\google\protobuf\implicit_weak_message.h
|
||||
|
@ -101,7 +101,7 @@ if(${CMAKE_SYSTEM_NAME} STREQUAL "Android")
|
||||
target_link_libraries(libprotobuf-lite log)
|
||||
endif()
|
||||
target_include_directories(libprotobuf-lite PUBLIC ${protobuf_source_dir}/src)
|
||||
if(MSVC AND protobuf_BUILD_SHARED_LIBS)
|
||||
if(protobuf_BUILD_SHARED_LIBS)
|
||||
target_compile_definitions(libprotobuf-lite
|
||||
PUBLIC PROTOBUF_USE_DLLS
|
||||
PRIVATE LIBPROTOBUF_EXPORTS)
|
||||
|
@ -118,13 +118,15 @@ if(${CMAKE_SYSTEM_NAME} STREQUAL "Android")
|
||||
target_link_libraries(libprotobuf log)
|
||||
endif()
|
||||
target_include_directories(libprotobuf PUBLIC ${protobuf_source_dir}/src)
|
||||
if(MSVC AND protobuf_BUILD_SHARED_LIBS)
|
||||
if(protobuf_BUILD_SHARED_LIBS)
|
||||
target_compile_definitions(libprotobuf
|
||||
PUBLIC PROTOBUF_USE_DLLS
|
||||
PRIVATE LIBPROTOBUF_EXPORTS)
|
||||
endif()
|
||||
set_target_properties(libprotobuf PROPERTIES
|
||||
VERSION ${protobuf_VERSION}
|
||||
# Use only the first SO version component for compatibility with Makefile emitted SONAME.
|
||||
SOVERSION 30
|
||||
OUTPUT_NAME ${LIB_PREFIX}protobuf
|
||||
DEBUG_POSTFIX "${protobuf_DEBUG_POSTFIX}")
|
||||
add_library(protobuf::libprotobuf ALIAS libprotobuf)
|
||||
|
@ -11,7 +11,7 @@ function(protobuf_generate)
|
||||
include(CMakeParseArguments)
|
||||
|
||||
set(_options APPEND_PATH)
|
||||
set(_singleargs LANGUAGE OUT_VAR EXPORT_MACRO PROTOC_OUT_DIR PLUGIN)
|
||||
set(_singleargs LANGUAGE OUT_VAR EXPORT_MACRO PROTOC_OUT_DIR PLUGIN PLUGIN_OPTIONS)
|
||||
if(COMMAND target_sources)
|
||||
list(APPEND _singleargs TARGET)
|
||||
endif()
|
||||
@ -39,9 +39,18 @@ function(protobuf_generate)
|
||||
endif()
|
||||
|
||||
if(protobuf_generate_EXPORT_MACRO AND protobuf_generate_LANGUAGE STREQUAL cpp)
|
||||
set(_dll_export_decl "dllexport_decl=${protobuf_generate_EXPORT_MACRO}:")
|
||||
set(_dll_export_decl "dllexport_decl=${protobuf_generate_EXPORT_MACRO}")
|
||||
endif()
|
||||
|
||||
|
||||
foreach(_option ${_dll_export_decl} ${protobuf_generate_PLUGIN_OPTIONS})
|
||||
# append comma - not using CMake lists and string replacement as users
|
||||
# might have semicolons in options
|
||||
if(_plugin_options)
|
||||
set( _plugin_options "${_plugin_options},")
|
||||
endif()
|
||||
set(_plugin_options "${_plugin_options}${_option}")
|
||||
endforeach()
|
||||
|
||||
if(protobuf_generate_PLUGIN)
|
||||
set(_plugin "--plugin=${protobuf_generate_PLUGIN}")
|
||||
endif()
|
||||
@ -127,12 +136,20 @@ function(protobuf_generate)
|
||||
endforeach()
|
||||
list(APPEND _generated_srcs_all ${_generated_srcs})
|
||||
|
||||
set(_comment "Running ${protobuf_generate_LANGUAGE} protocol buffer compiler on ${_proto}")
|
||||
if(protobuf_generate_PROTOC_OPTIONS)
|
||||
set(_comment "${_comment}, protoc-options: ${protobuf_generate_PROTOC_OPTIONS}")
|
||||
endif()
|
||||
if(_plugin_options)
|
||||
set(_comment "${_comment}, plugin-options: ${_plugin_options}")
|
||||
endif()
|
||||
|
||||
add_custom_command(
|
||||
OUTPUT ${_generated_srcs}
|
||||
COMMAND protobuf::protoc
|
||||
ARGS ${protobuf_generate_PROTOC_OPTIONS} --${protobuf_generate_LANGUAGE}_out ${_dll_export_decl}${protobuf_generate_PROTOC_OUT_DIR} ${_plugin} ${_protobuf_include_path} ${_abs_file}
|
||||
COMMAND protobuf::protoc
|
||||
ARGS ${protobuf_generate_PROTOC_OPTIONS} --${protobuf_generate_LANGUAGE}_out ${_plugin_options}:${protobuf_generate_PROTOC_OUT_DIR} ${_plugin} ${_protobuf_include_path} ${_abs_file}
|
||||
DEPENDS ${_abs_file} protobuf::protoc
|
||||
COMMENT "Running ${protobuf_generate_LANGUAGE} protocol buffer compiler on ${_proto}. Custom options: ${protobuf_generate_PROTOC_OPTIONS}"
|
||||
COMMENT ${_comment}
|
||||
VERBATIM )
|
||||
endforeach()
|
||||
|
||||
|
@ -94,7 +94,7 @@ function(_protobuf_find_libraries name filename)
|
||||
elseif(${name}_LIBRARY)
|
||||
# Honor cache entry used by CMake 3.5 and lower.
|
||||
set(${name}_LIBRARIES "${${name}_LIBRARY}" PARENT_SCOPE)
|
||||
else()
|
||||
elseif(TARGET protobuf::lib${filename})
|
||||
get_target_property(${name}_LIBRARY_RELEASE protobuf::lib${filename}
|
||||
LOCATION_RELEASE)
|
||||
get_target_property(${name}_LIBRARY_RELWITHDEBINFO protobuf::lib${filename}
|
||||
@ -134,23 +134,25 @@ get_target_property(Protobuf_INCLUDE_DIRS protobuf::libprotobuf
|
||||
INTERFACE_INCLUDE_DIRECTORIES)
|
||||
|
||||
# Set the protoc Executable
|
||||
get_target_property(Protobuf_PROTOC_EXECUTABLE protobuf::protoc
|
||||
IMPORTED_LOCATION_RELEASE)
|
||||
if(NOT EXISTS "${Protobuf_PROTOC_EXECUTABLE}")
|
||||
if(NOT Protobuf_PROTOC_EXECUTABLE AND TARGET protobuf::protoc)
|
||||
get_target_property(Protobuf_PROTOC_EXECUTABLE protobuf::protoc
|
||||
IMPORTED_LOCATION_RELWITHDEBINFO)
|
||||
endif()
|
||||
if(NOT EXISTS "${Protobuf_PROTOC_EXECUTABLE}")
|
||||
get_target_property(Protobuf_PROTOC_EXECUTABLE protobuf::protoc
|
||||
IMPORTED_LOCATION_MINSIZEREL)
|
||||
endif()
|
||||
if(NOT EXISTS "${Protobuf_PROTOC_EXECUTABLE}")
|
||||
get_target_property(Protobuf_PROTOC_EXECUTABLE protobuf::protoc
|
||||
IMPORTED_LOCATION_DEBUG)
|
||||
endif()
|
||||
if(NOT EXISTS "${Protobuf_PROTOC_EXECUTABLE}")
|
||||
get_target_property(Protobuf_PROTOC_EXECUTABLE protobuf::protoc
|
||||
IMPORTED_LOCATION_NOCONFIG)
|
||||
IMPORTED_LOCATION_RELEASE)
|
||||
if(NOT EXISTS "${Protobuf_PROTOC_EXECUTABLE}")
|
||||
get_target_property(Protobuf_PROTOC_EXECUTABLE protobuf::protoc
|
||||
IMPORTED_LOCATION_RELWITHDEBINFO)
|
||||
endif()
|
||||
if(NOT EXISTS "${Protobuf_PROTOC_EXECUTABLE}")
|
||||
get_target_property(Protobuf_PROTOC_EXECUTABLE protobuf::protoc
|
||||
IMPORTED_LOCATION_MINSIZEREL)
|
||||
endif()
|
||||
if(NOT EXISTS "${Protobuf_PROTOC_EXECUTABLE}")
|
||||
get_target_property(Protobuf_PROTOC_EXECUTABLE protobuf::protoc
|
||||
IMPORTED_LOCATION_DEBUG)
|
||||
endif()
|
||||
if(NOT EXISTS "${Protobuf_PROTOC_EXECUTABLE}")
|
||||
get_target_property(Protobuf_PROTOC_EXECUTABLE protobuf::protoc
|
||||
IMPORTED_LOCATION_NOCONFIG)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# Version info variable
|
||||
|
@ -1,32 +1,41 @@
|
||||
if (NOT EXISTS "${PROJECT_SOURCE_DIR}/../third_party/googletest/CMakeLists.txt")
|
||||
message(FATAL_ERROR
|
||||
"Cannot find third_party/googletest directory that's needed to "
|
||||
"build tests. If you use git, make sure you have cloned submodules:\n"
|
||||
" git submodule update --init --recursive\n"
|
||||
"If instead you want to skip tests, run cmake with:\n"
|
||||
" cmake -Dprotobuf_BUILD_TESTS=OFF\n")
|
||||
endif()
|
||||
option(protobuf_USE_EXTERNAL_GTEST "Use external Google Test (i.e. not the one in third_party/googletest)" OFF)
|
||||
|
||||
option(protobuf_ABSOLUTE_TEST_PLUGIN_PATH
|
||||
"Using absolute test_plugin path in tests" ON)
|
||||
mark_as_advanced(protobuf_ABSOLUTE_TEST_PLUGIN_PATH)
|
||||
|
||||
set(googlemock_source_dir "${protobuf_source_dir}/third_party/googletest/googlemock")
|
||||
set(googletest_source_dir "${protobuf_source_dir}/third_party/googletest/googletest")
|
||||
include_directories(
|
||||
${googlemock_source_dir}
|
||||
${googletest_source_dir}
|
||||
${googletest_source_dir}/include
|
||||
${googlemock_source_dir}/include
|
||||
)
|
||||
if (protobuf_USE_EXTERNAL_GTEST)
|
||||
find_package(GTest REQUIRED)
|
||||
else()
|
||||
if (NOT EXISTS "${PROJECT_SOURCE_DIR}/../third_party/googletest/CMakeLists.txt")
|
||||
message(FATAL_ERROR
|
||||
"Cannot find third_party/googletest directory that's needed to "
|
||||
"build tests. If you use git, make sure you have cloned submodules:\n"
|
||||
" git submodule update --init --recursive\n"
|
||||
"If instead you want to skip tests, run cmake with:\n"
|
||||
" cmake -Dprotobuf_BUILD_TESTS=OFF\n")
|
||||
endif()
|
||||
|
||||
add_library(gmock STATIC
|
||||
"${googlemock_source_dir}/src/gmock-all.cc"
|
||||
"${googletest_source_dir}/src/gtest-all.cc"
|
||||
)
|
||||
target_link_libraries(gmock ${CMAKE_THREAD_LIBS_INIT})
|
||||
add_library(gmock_main STATIC "${googlemock_source_dir}/src/gmock_main.cc")
|
||||
target_link_libraries(gmock_main gmock)
|
||||
set(googlemock_source_dir "${protobuf_source_dir}/third_party/googletest/googlemock")
|
||||
set(googletest_source_dir "${protobuf_source_dir}/third_party/googletest/googletest")
|
||||
include_directories(
|
||||
${googlemock_source_dir}
|
||||
${googletest_source_dir}
|
||||
${googletest_source_dir}/include
|
||||
${googlemock_source_dir}/include
|
||||
)
|
||||
|
||||
add_library(gmock STATIC
|
||||
"${googlemock_source_dir}/src/gmock-all.cc"
|
||||
"${googletest_source_dir}/src/gtest-all.cc"
|
||||
)
|
||||
target_link_libraries(gmock ${CMAKE_THREAD_LIBS_INIT})
|
||||
add_library(gmock_main STATIC "${googlemock_source_dir}/src/gmock_main.cc")
|
||||
target_link_libraries(gmock_main gmock)
|
||||
|
||||
add_library(GTest::gmock ALIAS gmock)
|
||||
add_library(GTest::gmock_main ALIAS gmock_main)
|
||||
endif()
|
||||
|
||||
set(lite_test_protos
|
||||
google/protobuf/map_lite_unittest.proto
|
||||
@ -224,7 +233,12 @@ if(MINGW)
|
||||
endif()
|
||||
|
||||
add_executable(tests ${tests_files} ${common_test_files} ${tests_proto_files} ${lite_test_proto_files})
|
||||
target_link_libraries(tests libprotoc libprotobuf gmock_main)
|
||||
if (MSVC)
|
||||
target_compile_options(tests PRIVATE
|
||||
/wd4146 # unary minus operator applied to unsigned type, result still unsigned
|
||||
)
|
||||
endif()
|
||||
target_link_libraries(tests libprotoc libprotobuf GTest::gmock_main)
|
||||
|
||||
set(test_plugin_files
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/mock_code_generator.cc
|
||||
@ -234,21 +248,25 @@ set(test_plugin_files
|
||||
)
|
||||
|
||||
add_executable(test_plugin ${test_plugin_files})
|
||||
target_link_libraries(test_plugin libprotoc libprotobuf gmock)
|
||||
target_link_libraries(test_plugin libprotoc libprotobuf GTest::gmock)
|
||||
|
||||
set(lite_test_files
|
||||
${protobuf_source_dir}/src/google/protobuf/lite_unittest.cc
|
||||
)
|
||||
add_executable(lite-test ${lite_test_files} ${common_lite_test_files} ${lite_test_proto_files})
|
||||
target_link_libraries(lite-test libprotobuf-lite gmock_main)
|
||||
target_link_libraries(lite-test libprotobuf-lite GTest::gmock_main)
|
||||
|
||||
set(lite_arena_test_files
|
||||
${protobuf_source_dir}/src/google/protobuf/lite_arena_unittest.cc
|
||||
)
|
||||
add_executable(lite-arena-test ${lite_arena_test_files} ${common_lite_test_files} ${lite_test_proto_files})
|
||||
target_link_libraries(lite-arena-test libprotobuf-lite gmock_main)
|
||||
target_link_libraries(lite-arena-test libprotobuf-lite GTest::gmock_main)
|
||||
|
||||
add_custom_target(check
|
||||
COMMAND tests
|
||||
DEPENDS tests test_plugin
|
||||
WORKING_DIRECTORY ${protobuf_source_dir})
|
||||
|
||||
add_test(NAME check
|
||||
COMMAND tests
|
||||
WORKING_DIRECTORY "${protobuf_source_dir}")
|
||||
|
@ -17,7 +17,7 @@ AC_PREREQ(2.59)
|
||||
# In the SVN trunk, the version should always be the next anticipated release
|
||||
# version with the "-pre" suffix. (We used to use "-SNAPSHOT" but this pushed
|
||||
# the size of one file name in the dist tarfile over the 99-char limit.)
|
||||
AC_INIT([Protocol Buffers],[3.18.1],[protobuf@googlegroups.com],[protobuf])
|
||||
AC_INIT([Protocol Buffers],[3.19.4],[protobuf@googlegroups.com],[protobuf])
|
||||
|
||||
AM_MAINTAINER_MODE([enable])
|
||||
|
||||
|
@ -316,7 +316,7 @@ conformance-java-lite: javac_middleman_lite
|
||||
conformance-csharp: $(other_language_protoc_outputs)
|
||||
@echo "Writing shortcut script conformance-csharp..."
|
||||
@echo '#! /bin/sh' > conformance-csharp
|
||||
@echo 'dotnet ../csharp/src/Google.Protobuf.Conformance/bin/Release/netcoreapp2.1/Google.Protobuf.Conformance.dll "$$@"' >> conformance-csharp
|
||||
@echo 'dotnet ../csharp/src/Google.Protobuf.Conformance/bin/Release/netcoreapp3.1/Google.Protobuf.Conformance.dll "$$@"' >> conformance-csharp
|
||||
@chmod +x conformance-csharp
|
||||
|
||||
conformance-php:
|
||||
|
@ -41,11 +41,17 @@ function doTest(request) {
|
||||
var response = new conformance.ConformanceResponse();
|
||||
|
||||
try {
|
||||
if (request.getRequestedOutputFormat() === conformance.WireFormat.JSON) {
|
||||
if (request.getRequestedOutputFormat() == conformance.WireFormat.JSON) {
|
||||
response.setSkipped("JSON not supported.");
|
||||
return response;
|
||||
}
|
||||
|
||||
if (request.getRequestedOutputFormat() ==
|
||||
conformance.WireFormat.TEXT_FORMAT) {
|
||||
response.setSkipped('Text format is not supported as output format.');
|
||||
return response;
|
||||
}
|
||||
|
||||
switch (request.getPayloadCase()) {
|
||||
case conformance.ConformanceRequest.PayloadCase.PROTOBUF_PAYLOAD: {
|
||||
if (request.getMessageType() == "protobuf_test_messages.proto3.TestAllTypesProto3") {
|
||||
@ -67,7 +73,7 @@ function doTest(request) {
|
||||
} else {
|
||||
throw "Protobuf request doesn\'t have specific payload type";
|
||||
}
|
||||
}
|
||||
} break;
|
||||
|
||||
case conformance.ConformanceRequest.PayloadCase.JSON_PAYLOAD:
|
||||
response.setSkipped("JSON not supported.");
|
||||
|
@ -39,8 +39,8 @@ import sys
|
||||
import os
|
||||
from google.protobuf import json_format
|
||||
from google.protobuf import message
|
||||
from google3.third_party.protobuf import test_messages_proto3_pb2
|
||||
from google3.third_party.protobuf import test_messages_proto2_pb2
|
||||
from google.protobuf import test_messages_proto3_pb2
|
||||
from google.protobuf import test_messages_proto2_pb2
|
||||
from google.protobuf import text_format
|
||||
import conformance_pb2
|
||||
|
||||
|
@ -1,9 +1,18 @@
|
||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.ENUM.UnpackedInput.DefaultOutput.ProtobufOutput
|
||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.ENUM.UnpackedInput.PackedOutput.ProtobufOutput
|
||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.ENUM.UnpackedInput.UnpackedOutput.ProtobufOutput
|
||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.FIXED64.PackedInput.DefaultOutput.ProtobufOutput
|
||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.FIXED64.PackedInput.PackedOutput.ProtobufOutput
|
||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.FIXED64.PackedInput.UnpackedOutput.ProtobufOutput
|
||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.FIXED64.UnpackedInput.DefaultOutput.ProtobufOutput
|
||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.FIXED64.UnpackedInput.PackedOutput.ProtobufOutput
|
||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.FIXED64.UnpackedInput.UnpackedOutput.ProtobufOutput
|
||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.INT32.PackedInput.DefaultOutput.ProtobufOutput
|
||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.INT32.PackedInput.PackedOutput.ProtobufOutput
|
||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.INT32.PackedInput.UnpackedOutput.ProtobufOutput
|
||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.INT32.UnpackedInput.DefaultOutput.ProtobufOutput
|
||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.INT32.UnpackedInput.PackedOutput.ProtobufOutput
|
||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.INT32.UnpackedInput.UnpackedOutput.ProtobufOutput
|
||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.INT64.PackedInput.DefaultOutput.ProtobufOutput
|
||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.INT64.PackedInput.PackedOutput.ProtobufOutput
|
||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.INT64.PackedInput.UnpackedOutput.ProtobufOutput
|
||||
@ -22,23 +31,42 @@ Recommended.Proto2.ProtobufInput.ValidDataRepeated.SINT64.PackedInput.UnpackedOu
|
||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.SINT64.UnpackedInput.DefaultOutput.ProtobufOutput
|
||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.SINT64.UnpackedInput.PackedOutput.ProtobufOutput
|
||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.SINT64.UnpackedInput.UnpackedOutput.ProtobufOutput
|
||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.UINT32.PackedInput.DefaultOutput.ProtobufOutput
|
||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.UINT32.PackedInput.PackedOutput.ProtobufOutput
|
||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.UINT32.PackedInput.UnpackedOutput.ProtobufOutput
|
||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.UINT32.UnpackedInput.DefaultOutput.ProtobufOutput
|
||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.UINT32.UnpackedInput.PackedOutput.ProtobufOutput
|
||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.UINT32.UnpackedInput.UnpackedOutput.ProtobufOutput
|
||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.UINT64.PackedInput.DefaultOutput.ProtobufOutput
|
||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.UINT64.PackedInput.PackedOutput.ProtobufOutput
|
||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.UINT64.PackedInput.UnpackedOutput.ProtobufOutput
|
||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.UINT64.UnpackedInput.DefaultOutput.ProtobufOutput
|
||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.UINT64.UnpackedInput.PackedOutput.ProtobufOutput
|
||||
Recommended.Proto2.ProtobufInput.ValidDataRepeated.UINT64.UnpackedInput.UnpackedOutput.ProtobufOutput
|
||||
Recommended.Proto2.ProtobufInput.ValidDataScalarBinary.ENUM[4].ProtobufOutput
|
||||
Recommended.Proto2.ProtobufInput.ValidDataScalarBinary.ENUM[5].ProtobufOutput
|
||||
Recommended.Proto2.ProtobufInput.ValidDataScalarBinary.FIXED64[2].ProtobufOutput
|
||||
Recommended.Proto2.ProtobufInput.ValidDataScalarBinary.INT32[7].ProtobufOutput
|
||||
Recommended.Proto2.ProtobufInput.ValidDataScalarBinary.INT64[2].ProtobufOutput
|
||||
Recommended.Proto2.ProtobufInput.ValidDataScalarBinary.SFIXED64[2].ProtobufOutput
|
||||
Recommended.Proto2.ProtobufInput.ValidDataScalarBinary.SINT64[2].ProtobufOutput
|
||||
Recommended.Proto2.ProtobufInput.ValidDataScalarBinary.UINT32[8].ProtobufOutput
|
||||
Recommended.Proto2.ProtobufInput.ValidDataScalarBinary.UINT64[2].ProtobufOutput
|
||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.ENUM.UnpackedInput.DefaultOutput.ProtobufOutput
|
||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.ENUM.UnpackedInput.PackedOutput.ProtobufOutput
|
||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.ENUM.UnpackedInput.UnpackedOutput.ProtobufOutput
|
||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.FIXED64.PackedInput.DefaultOutput.ProtobufOutput
|
||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.FIXED64.PackedInput.PackedOutput.ProtobufOutput
|
||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.FIXED64.PackedInput.UnpackedOutput.ProtobufOutput
|
||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.FIXED64.UnpackedInput.DefaultOutput.ProtobufOutput
|
||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.FIXED64.UnpackedInput.PackedOutput.ProtobufOutput
|
||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.FIXED64.UnpackedInput.UnpackedOutput.ProtobufOutput
|
||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.INT32.PackedInput.DefaultOutput.ProtobufOutput
|
||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.INT32.PackedInput.PackedOutput.ProtobufOutput
|
||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.INT32.PackedInput.UnpackedOutput.ProtobufOutput
|
||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.INT32.UnpackedInput.DefaultOutput.ProtobufOutput
|
||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.INT32.UnpackedInput.PackedOutput.ProtobufOutput
|
||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.INT32.UnpackedInput.UnpackedOutput.ProtobufOutput
|
||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.INT64.PackedInput.DefaultOutput.ProtobufOutput
|
||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.INT64.PackedInput.PackedOutput.ProtobufOutput
|
||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.INT64.PackedInput.UnpackedOutput.ProtobufOutput
|
||||
@ -57,48 +85,78 @@ Recommended.Proto3.ProtobufInput.ValidDataRepeated.SINT64.PackedInput.UnpackedOu
|
||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.SINT64.UnpackedInput.DefaultOutput.ProtobufOutput
|
||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.SINT64.UnpackedInput.PackedOutput.ProtobufOutput
|
||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.SINT64.UnpackedInput.UnpackedOutput.ProtobufOutput
|
||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.UINT32.PackedInput.DefaultOutput.ProtobufOutput
|
||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.UINT32.PackedInput.PackedOutput.ProtobufOutput
|
||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.UINT32.PackedInput.UnpackedOutput.ProtobufOutput
|
||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.UINT32.UnpackedInput.DefaultOutput.ProtobufOutput
|
||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.UINT32.UnpackedInput.PackedOutput.ProtobufOutput
|
||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.UINT32.UnpackedInput.UnpackedOutput.ProtobufOutput
|
||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.UINT64.PackedInput.DefaultOutput.ProtobufOutput
|
||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.UINT64.PackedInput.PackedOutput.ProtobufOutput
|
||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.UINT64.PackedInput.UnpackedOutput.ProtobufOutput
|
||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.UINT64.UnpackedInput.DefaultOutput.ProtobufOutput
|
||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.UINT64.UnpackedInput.PackedOutput.ProtobufOutput
|
||||
Recommended.Proto3.ProtobufInput.ValidDataRepeated.UINT64.UnpackedInput.UnpackedOutput.ProtobufOutput
|
||||
Recommended.Proto3.ProtobufInput.ValidDataScalarBinary.ENUM[4].ProtobufOutput
|
||||
Recommended.Proto3.ProtobufInput.ValidDataScalarBinary.ENUM[5].ProtobufOutput
|
||||
Recommended.Proto3.ProtobufInput.ValidDataScalarBinary.FIXED64[2].ProtobufOutput
|
||||
Recommended.Proto3.ProtobufInput.ValidDataScalarBinary.INT32[7].ProtobufOutput
|
||||
Recommended.Proto3.ProtobufInput.ValidDataScalarBinary.INT64[2].ProtobufOutput
|
||||
Recommended.Proto3.ProtobufInput.ValidDataScalarBinary.SFIXED64[2].ProtobufOutput
|
||||
Recommended.Proto3.ProtobufInput.ValidDataScalarBinary.SINT64[2].ProtobufOutput
|
||||
Recommended.Proto3.ProtobufInput.ValidDataScalarBinary.UINT32[8].ProtobufOutput
|
||||
Recommended.Proto3.ProtobufInput.ValidDataScalarBinary.UINT64[2].ProtobufOutput
|
||||
Required.Proto2.ProtobufInput.RepeatedScalarSelectsLast.ENUM.ProtobufOutput
|
||||
Required.Proto2.ProtobufInput.RepeatedScalarSelectsLast.FIXED64.ProtobufOutput
|
||||
Required.Proto2.ProtobufInput.RepeatedScalarSelectsLast.UINT64.ProtobufOutput
|
||||
Required.Proto2.ProtobufInput.ValidDataRepeated.ENUM.UnpackedInput.ProtobufOutput
|
||||
Required.Proto2.ProtobufInput.ValidDataRepeated.FIXED64.PackedInput.ProtobufOutput
|
||||
Required.Proto2.ProtobufInput.ValidDataRepeated.FIXED64.UnpackedInput.ProtobufOutput
|
||||
Required.Proto2.ProtobufInput.ValidDataRepeated.INT32.PackedInput.ProtobufOutput
|
||||
Required.Proto2.ProtobufInput.ValidDataRepeated.INT32.UnpackedInput.ProtobufOutput
|
||||
Required.Proto2.ProtobufInput.ValidDataRepeated.INT64.PackedInput.ProtobufOutput
|
||||
Required.Proto2.ProtobufInput.ValidDataRepeated.INT64.UnpackedInput.ProtobufOutput
|
||||
Required.Proto2.ProtobufInput.ValidDataRepeated.SFIXED64.PackedInput.ProtobufOutput
|
||||
Required.Proto2.ProtobufInput.ValidDataRepeated.SFIXED64.UnpackedInput.ProtobufOutput
|
||||
Required.Proto2.ProtobufInput.ValidDataRepeated.SINT64.PackedInput.ProtobufOutput
|
||||
Required.Proto2.ProtobufInput.ValidDataRepeated.SINT64.UnpackedInput.ProtobufOutput
|
||||
Required.Proto2.ProtobufInput.ValidDataRepeated.UINT32.PackedInput.ProtobufOutput
|
||||
Required.Proto2.ProtobufInput.ValidDataRepeated.UINT32.UnpackedInput.ProtobufOutput
|
||||
Required.Proto2.ProtobufInput.ValidDataRepeated.UINT64.PackedInput.ProtobufOutput
|
||||
Required.Proto2.ProtobufInput.ValidDataRepeated.UINT64.UnpackedInput.ProtobufOutput
|
||||
Required.Proto2.ProtobufInput.ValidDataScalar.ENUM[4].ProtobufOutput
|
||||
Required.Proto2.ProtobufInput.ValidDataScalar.ENUM[5].ProtobufOutput
|
||||
Required.Proto2.ProtobufInput.ValidDataScalar.FIXED64[2].ProtobufOutput
|
||||
Required.Proto2.ProtobufInput.ValidDataScalar.INT32[7].ProtobufOutput
|
||||
Required.Proto2.ProtobufInput.ValidDataScalar.INT64[2].ProtobufOutput
|
||||
Required.Proto2.ProtobufInput.ValidDataScalar.SFIXED64[2].ProtobufOutput
|
||||
Required.Proto2.ProtobufInput.ValidDataScalar.SINT64[2].ProtobufOutput
|
||||
Required.Proto2.ProtobufInput.ValidDataScalar.UINT32[8].ProtobufOutput
|
||||
Required.Proto2.ProtobufInput.ValidDataScalar.UINT64[2].ProtobufOutput
|
||||
Required.Proto3.ProtobufInput.RepeatedScalarSelectsLast.ENUM.ProtobufOutput
|
||||
Required.Proto3.ProtobufInput.RepeatedScalarSelectsLast.FIXED64.ProtobufOutput
|
||||
Required.Proto3.ProtobufInput.RepeatedScalarSelectsLast.UINT64.ProtobufOutput
|
||||
Required.Proto3.ProtobufInput.ValidDataRepeated.ENUM.UnpackedInput.ProtobufOutput
|
||||
Required.Proto3.ProtobufInput.ValidDataRepeated.FIXED64.PackedInput.ProtobufOutput
|
||||
Required.Proto3.ProtobufInput.ValidDataRepeated.FIXED64.UnpackedInput.ProtobufOutput
|
||||
Required.Proto3.ProtobufInput.ValidDataRepeated.INT32.PackedInput.ProtobufOutput
|
||||
Required.Proto3.ProtobufInput.ValidDataRepeated.INT32.UnpackedInput.ProtobufOutput
|
||||
Required.Proto3.ProtobufInput.ValidDataRepeated.INT64.PackedInput.ProtobufOutput
|
||||
Required.Proto3.ProtobufInput.ValidDataRepeated.INT64.UnpackedInput.ProtobufOutput
|
||||
Required.Proto3.ProtobufInput.ValidDataRepeated.SFIXED64.PackedInput.ProtobufOutput
|
||||
Required.Proto3.ProtobufInput.ValidDataRepeated.SFIXED64.UnpackedInput.ProtobufOutput
|
||||
Required.Proto3.ProtobufInput.ValidDataRepeated.SINT64.PackedInput.ProtobufOutput
|
||||
Required.Proto3.ProtobufInput.ValidDataRepeated.SINT64.UnpackedInput.ProtobufOutput
|
||||
Required.Proto3.ProtobufInput.ValidDataRepeated.UINT32.PackedInput.ProtobufOutput
|
||||
Required.Proto3.ProtobufInput.ValidDataRepeated.UINT32.UnpackedInput.ProtobufOutput
|
||||
Required.Proto3.ProtobufInput.ValidDataRepeated.UINT64.PackedInput.ProtobufOutput
|
||||
Required.Proto3.ProtobufInput.ValidDataRepeated.UINT64.UnpackedInput.ProtobufOutput
|
||||
Required.Proto3.ProtobufInput.ValidDataScalar.ENUM[4].ProtobufOutput
|
||||
Required.Proto3.ProtobufInput.ValidDataScalar.ENUM[5].ProtobufOutput
|
||||
Required.Proto3.ProtobufInput.ValidDataScalar.FIXED64[2].ProtobufOutput
|
||||
Required.Proto3.ProtobufInput.ValidDataScalar.INT32[7].ProtobufOutput
|
||||
Required.Proto3.ProtobufInput.ValidDataScalar.INT64[2].ProtobufOutput
|
||||
Required.Proto3.ProtobufInput.ValidDataScalar.SFIXED64[2].ProtobufOutput
|
||||
Required.Proto3.ProtobufInput.ValidDataScalar.SINT64[2].ProtobufOutput
|
||||
Required.Proto3.ProtobufInput.ValidDataScalar.UINT32[8].ProtobufOutput
|
||||
Required.Proto3.ProtobufInput.ValidDataScalar.UINT64[2].ProtobufOutput
|
||||
|
File diff suppressed because it is too large
Load Diff
Binary file not shown.
@ -233,7 +233,14 @@ namespace Google.Protobuf
|
||||
writer.Write(PropertySeparator);
|
||||
}
|
||||
|
||||
WriteString(writer, accessor.Descriptor.JsonName);
|
||||
if (settings.PreserveProtoFieldNames)
|
||||
{
|
||||
WriteString(writer, accessor.Descriptor.Name);
|
||||
}
|
||||
else
|
||||
{
|
||||
WriteString(writer, accessor.Descriptor.JsonName);
|
||||
}
|
||||
writer.Write(NameValueSeparator);
|
||||
WriteValue(writer, value);
|
||||
|
||||
@ -816,6 +823,11 @@ namespace Google.Protobuf
|
||||
/// </summary>
|
||||
public bool FormatEnumsAsIntegers { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether to use the original proto field names as defined in the .proto file. Defaults to false.
|
||||
/// </summary>
|
||||
public bool PreserveProtoFieldNames { get; }
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new <see cref="Settings"/> object with the specified formatting of default values
|
||||
@ -832,7 +844,7 @@ namespace Google.Protobuf
|
||||
/// </summary>
|
||||
/// <param name="formatDefaultValues"><c>true</c> if default values (0, empty strings etc) should be formatted; <c>false</c> otherwise.</param>
|
||||
/// <param name="typeRegistry">The <see cref="TypeRegistry"/> to use when formatting <see cref="Any"/> messages.</param>
|
||||
public Settings(bool formatDefaultValues, TypeRegistry typeRegistry) : this(formatDefaultValues, typeRegistry, false)
|
||||
public Settings(bool formatDefaultValues, TypeRegistry typeRegistry) : this(formatDefaultValues, typeRegistry, false, false)
|
||||
{
|
||||
}
|
||||
|
||||
@ -842,32 +854,41 @@ namespace Google.Protobuf
|
||||
/// <param name="formatDefaultValues"><c>true</c> if default values (0, empty strings etc) should be formatted; <c>false</c> otherwise.</param>
|
||||
/// <param name="typeRegistry">The <see cref="TypeRegistry"/> to use when formatting <see cref="Any"/> messages. TypeRegistry.Empty will be used if it is null.</param>
|
||||
/// <param name="formatEnumsAsIntegers"><c>true</c> to format the enums as integers; <c>false</c> to format enums as enum names.</param>
|
||||
/// <param name="preserveProtoFieldNames"><c>true</c> to preserve proto field names; <c>false</c> to convert them to lowerCamelCase.</param>
|
||||
private Settings(bool formatDefaultValues,
|
||||
TypeRegistry typeRegistry,
|
||||
bool formatEnumsAsIntegers)
|
||||
bool formatEnumsAsIntegers,
|
||||
bool preserveProtoFieldNames)
|
||||
{
|
||||
FormatDefaultValues = formatDefaultValues;
|
||||
TypeRegistry = typeRegistry ?? TypeRegistry.Empty;
|
||||
FormatEnumsAsIntegers = formatEnumsAsIntegers;
|
||||
PreserveProtoFieldNames = preserveProtoFieldNames;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new <see cref="Settings"/> object with the specified formatting of default values and the current settings.
|
||||
/// </summary>
|
||||
/// <param name="formatDefaultValues"><c>true</c> if default values (0, empty strings etc) should be formatted; <c>false</c> otherwise.</param>
|
||||
public Settings WithFormatDefaultValues(bool formatDefaultValues) => new Settings(formatDefaultValues, TypeRegistry, FormatEnumsAsIntegers);
|
||||
public Settings WithFormatDefaultValues(bool formatDefaultValues) => new Settings(formatDefaultValues, TypeRegistry, FormatEnumsAsIntegers, PreserveProtoFieldNames);
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new <see cref="Settings"/> object with the specified type registry and the current settings.
|
||||
/// </summary>
|
||||
/// <param name="typeRegistry">The <see cref="TypeRegistry"/> to use when formatting <see cref="Any"/> messages.</param>
|
||||
public Settings WithTypeRegistry(TypeRegistry typeRegistry) => new Settings(FormatDefaultValues, typeRegistry, FormatEnumsAsIntegers);
|
||||
public Settings WithTypeRegistry(TypeRegistry typeRegistry) => new Settings(FormatDefaultValues, typeRegistry, FormatEnumsAsIntegers, PreserveProtoFieldNames);
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new <see cref="Settings"/> object with the specified enums formatting option and the current settings.
|
||||
/// </summary>
|
||||
/// <param name="formatEnumsAsIntegers"><c>true</c> to format the enums as integers; <c>false</c> to format enums as enum names.</param>
|
||||
public Settings WithFormatEnumsAsIntegers(bool formatEnumsAsIntegers) => new Settings(FormatDefaultValues, TypeRegistry, formatEnumsAsIntegers);
|
||||
public Settings WithFormatEnumsAsIntegers(bool formatEnumsAsIntegers) => new Settings(FormatDefaultValues, TypeRegistry, formatEnumsAsIntegers, PreserveProtoFieldNames);
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new <see cref="Settings"/> object with the specified field name formatting option and the current settings.
|
||||
/// </summary>
|
||||
/// <param name="preserveProtoFieldNames"><c>true</c> to preserve proto field names; <c>false</c> to convert them to lowerCamelCase.</param>
|
||||
public Settings WithPreserveProtoFieldNames(bool preserveProtoFieldNames) => new Settings(FormatDefaultValues, TypeRegistry, FormatEnumsAsIntegers, preserveProtoFieldNames);
|
||||
}
|
||||
|
||||
// Effectively a cache of mapping from enum values to the original name as specified in the proto file,
|
||||
|
@ -10164,8 +10164,8 @@ namespace Google.Protobuf.Reflection {
|
||||
/// location.
|
||||
///
|
||||
/// Each element is a field number or an index. They form a path from
|
||||
/// the root FileDescriptorProto to the place where the definition. For
|
||||
/// example, this path:
|
||||
/// the root FileDescriptorProto to the place where the definition occurs.
|
||||
/// For example, this path:
|
||||
/// [ 4, 3, 2, 7, 1 ]
|
||||
/// refers to:
|
||||
/// file.message_type(3) // 4, 3
|
||||
|
@ -65,7 +65,7 @@ namespace Google.Protobuf.WellKnownTypes {
|
||||
/// foo = any.unpack(Foo.class);
|
||||
/// }
|
||||
///
|
||||
/// Example 3: Pack and unpack a message in Python.
|
||||
/// Example 3: Pack and unpack a message in Python.
|
||||
///
|
||||
/// foo = Foo(...)
|
||||
/// any = Any()
|
||||
@ -75,7 +75,7 @@ namespace Google.Protobuf.WellKnownTypes {
|
||||
/// any.Unpack(foo)
|
||||
/// ...
|
||||
///
|
||||
/// Example 4: Pack and unpack a message in Go
|
||||
/// Example 4: Pack and unpack a message in Go
|
||||
///
|
||||
/// foo := &pb.Foo{...}
|
||||
/// any, err := anypb.New(foo)
|
||||
@ -95,7 +95,7 @@ namespace Google.Protobuf.WellKnownTypes {
|
||||
/// name "y.z".
|
||||
///
|
||||
/// JSON
|
||||
/// ====
|
||||
///
|
||||
/// The JSON representation of an `Any` value uses the regular
|
||||
/// representation of the deserialized, embedded message, with an
|
||||
/// additional field `@type` which contains the type URL. Example:
|
||||
|
@ -300,3 +300,7 @@ with info about your project (name and website) so we can add an entry for you.
|
||||
1. Protoc-gen-go-svc
|
||||
* Website: https://github.com/dane/protoc-gen-go-svc
|
||||
* Extension: 1140
|
||||
|
||||
1. Embedded Proto
|
||||
* Website: https://EmbeddedProto.com
|
||||
* Extension: 1141
|
||||
|
@ -201,3 +201,4 @@ There are miscellaneous other things you may find useful as a Protocol Buffers d
|
||||
* [intellij-protolint: A protobuf linter for JetBrains IDEs](https://github.com/yoheimuta/intellij-protolint)
|
||||
* [vim-protolint: A protobuf linter for Vim](https://github.com/yoheimuta/vim-protolint)
|
||||
* [super-linter: Protocol Buffer lint as GitHub Action](https://github.com/github/super-linter)
|
||||
* [protoc-gen-fieldmask - A plugin to generate static type fieldmask paths](https://github.com/idodod/protoc-gen-fieldmask)
|
||||
|
@ -305,16 +305,14 @@ public abstract class ByteString implements Iterable<Byte>, Serializable {
|
||||
ByteIterator latterBytes = latter.iterator();
|
||||
|
||||
while (formerBytes.hasNext() && latterBytes.hasNext()) {
|
||||
// Note: This code was copied from com.google.common.primitives.UnsignedBytes#compare,
|
||||
// as Guava libraries cannot be used in the {@code com.google.protobuf} package.
|
||||
int result =
|
||||
Integer.compare(toInt(formerBytes.nextByte()), toInt(latterBytes.nextByte()));
|
||||
Integer.valueOf(toInt(formerBytes.nextByte()))
|
||||
.compareTo(toInt(latterBytes.nextByte()));
|
||||
if (result != 0) {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
return Integer.compare(former.size(), latter.size());
|
||||
return Integer.valueOf(former.size()).compareTo(Integer.valueOf(latter.size()));
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -61,8 +61,32 @@ import java.util.concurrent.ConcurrentHashMap;
|
||||
final class DescriptorMessageInfoFactory implements MessageInfoFactory {
|
||||
private static final String GET_DEFAULT_INSTANCE_METHOD_NAME = "getDefaultInstance";
|
||||
private static final DescriptorMessageInfoFactory instance = new DescriptorMessageInfoFactory();
|
||||
|
||||
/**
|
||||
* Names that should be avoided (in UpperCamelCase format). Using them causes the compiler to
|
||||
* generate accessors whose names collide with methods defined in base classes.
|
||||
*
|
||||
* <p>Keep this list in sync with kForbiddenWordList in
|
||||
* src/google/protobuf/compiler/java/java_helpers.cc
|
||||
*/
|
||||
private static final Set<String> specialFieldNames =
|
||||
new HashSet<>(Arrays.asList("cached_size", "serialized_size", "class"));
|
||||
new HashSet<>(
|
||||
Arrays.asList(
|
||||
// java.lang.Object:
|
||||
"Class",
|
||||
// com.google.protobuf.MessageLiteOrBuilder:
|
||||
"DefaultInstanceForType",
|
||||
// com.google.protobuf.MessageLite:
|
||||
"ParserForType",
|
||||
"SerializedSize",
|
||||
// com.google.protobuf.MessageOrBuilder:
|
||||
"AllFields",
|
||||
"DescriptorForType",
|
||||
"InitializationErrorString",
|
||||
// TODO(b/219045204): re-enable
|
||||
// "UnknownFields",
|
||||
// obsolete. kept for backwards compatibility of generated code
|
||||
"CachedSize"));
|
||||
|
||||
// Disallow construction - it's a singleton.
|
||||
private DescriptorMessageInfoFactory() {}
|
||||
@ -125,6 +149,8 @@ final class DescriptorMessageInfoFactory implements MessageInfoFactory {
|
||||
*
|
||||
* <p>This class is thread-safe.
|
||||
*/
|
||||
// <p>The code is adapted from the C++ implementation:
|
||||
// https://github.com/protocolbuffers/protobuf/blob/master/src/google/protobuf/compiler/java/java_helpers.h
|
||||
static class IsInitializedCheckAnalyzer {
|
||||
|
||||
private final Map<Descriptor, Boolean> resultCache =
|
||||
@ -593,21 +619,104 @@ final class DescriptorMessageInfoFactory implements MessageInfoFactory {
|
||||
String name = (fd.getType() == FieldDescriptor.Type.GROUP)
|
||||
? fd.getMessageType().getName()
|
||||
: fd.getName();
|
||||
String suffix = specialFieldNames.contains(name) ? "__" : "_";
|
||||
return snakeCaseToCamelCase(name) + suffix;
|
||||
|
||||
// convert to UpperCamelCase for comparison to the specialFieldNames
|
||||
// (which are in UpperCamelCase)
|
||||
String upperCamelCaseName = snakeCaseToUpperCamelCase(name);
|
||||
|
||||
// Append underscores to match the behavior of the protoc java compiler
|
||||
final String suffix;
|
||||
if (specialFieldNames.contains(upperCamelCaseName)) {
|
||||
// For proto field names that match the specialFieldNames,
|
||||
// the protoc java compiler appends "__" to the java field name
|
||||
// to prevent the field's accessor method names from clashing with other methods.
|
||||
// For example:
|
||||
// proto field name = "class"
|
||||
// java field name = "class__"
|
||||
// accessor method name = "getClass_()" (so that it does not clash with
|
||||
// Object.getClass())
|
||||
suffix = "__";
|
||||
} else {
|
||||
// For other proto field names,
|
||||
// the protoc java compiler appends "_" to the java field name
|
||||
// to prevent field names from clashing with java keywords.
|
||||
// For example:
|
||||
// proto field name = "int"
|
||||
// java field name = "int_" (so that it does not clash with int keyword)
|
||||
// accessor method name = "getInt()"
|
||||
suffix = "_";
|
||||
}
|
||||
return snakeCaseToLowerCamelCase(name) + suffix;
|
||||
}
|
||||
|
||||
private static String getCachedSizeFieldName(FieldDescriptor fd) {
|
||||
return snakeCaseToCamelCase(fd.getName()) + "MemoizedSerializedSize";
|
||||
return snakeCaseToLowerCamelCase(fd.getName()) + "MemoizedSerializedSize";
|
||||
}
|
||||
|
||||
/**
|
||||
* This method must match exactly with the corresponding function in protocol compiler. See:
|
||||
* https://github.com/google/protobuf/blob/v3.0.0/src/google/protobuf/compiler/java/java_helpers.cc#L153
|
||||
* Converts a snake case string into lower camel case.
|
||||
*
|
||||
* <p>Some examples:
|
||||
*
|
||||
* <pre>
|
||||
* snakeCaseToLowerCamelCase("foo_bar") => "fooBar"
|
||||
* snakeCaseToLowerCamelCase("foo") => "foo"
|
||||
* </pre>
|
||||
*
|
||||
* @param snakeCase the string in snake case to convert
|
||||
* @return the string converted to camel case, with a lowercase first character
|
||||
*/
|
||||
private static String snakeCaseToCamelCase(String snakeCase) {
|
||||
private static String snakeCaseToLowerCamelCase(String snakeCase) {
|
||||
return snakeCaseToCamelCase(snakeCase, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a snake case string into upper camel case.
|
||||
*
|
||||
* <p>Some examples:
|
||||
*
|
||||
* <pre>
|
||||
* snakeCaseToUpperCamelCase("foo_bar") => "FooBar"
|
||||
* snakeCaseToUpperCamelCase("foo") => "Foo"
|
||||
* </pre>
|
||||
*
|
||||
* @param snakeCase the string in snake case to convert
|
||||
* @return the string converted to camel case, with an uppercase first character
|
||||
*/
|
||||
private static String snakeCaseToUpperCamelCase(String snakeCase) {
|
||||
return snakeCaseToCamelCase(snakeCase, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a snake case string into camel case.
|
||||
*
|
||||
* <p>For better readability, prefer calling either {@link #snakeCaseToLowerCamelCase(String)} or
|
||||
* {@link #snakeCaseToUpperCamelCase(String)}.
|
||||
*
|
||||
* <p>Some examples:
|
||||
*
|
||||
* <pre>
|
||||
* snakeCaseToCamelCase("foo_bar", false) => "fooBar"
|
||||
* snakeCaseToCamelCase("foo_bar", true) => "FooBar"
|
||||
* snakeCaseToCamelCase("foo", false) => "foo"
|
||||
* snakeCaseToCamelCase("foo", true) => "Foo"
|
||||
* snakeCaseToCamelCase("Foo", false) => "foo"
|
||||
* snakeCaseToCamelCase("fooBar", false) => "fooBar"
|
||||
* </pre>
|
||||
*
|
||||
* <p>This implementation of this method must exactly match the corresponding function in the
|
||||
* protocol compiler. Specifically, the {@code UnderscoresToCamelCase} function in {@code
|
||||
* src/google/protobuf/compiler/java/java_helpers.cc}.
|
||||
*
|
||||
* @param snakeCase the string in snake case to convert
|
||||
* @param capFirst true if the first letter of the returned string should be uppercase. false if
|
||||
* the first letter of the returned string should be lowercase.
|
||||
* @return the string converted to camel case, with an uppercase or lowercase first character
|
||||
* depending on if {@code capFirst} is true or false, respectively
|
||||
*/
|
||||
private static String snakeCaseToCamelCase(String snakeCase, boolean capFirst) {
|
||||
StringBuilder sb = new StringBuilder(snakeCase.length() + 1);
|
||||
boolean capNext = false;
|
||||
boolean capNext = capFirst;
|
||||
for (int ctr = 0; ctr < snakeCase.length(); ctr++) {
|
||||
char next = snakeCase.charAt(ctr);
|
||||
if (next == '_') {
|
||||
@ -653,7 +762,7 @@ final class DescriptorMessageInfoFactory implements MessageInfoFactory {
|
||||
|
||||
/** Constructs the name of the get method for the given field in the proto. */
|
||||
private static String getterForField(String snakeCase) {
|
||||
String camelCase = snakeCaseToCamelCase(snakeCase);
|
||||
String camelCase = snakeCaseToLowerCamelCase(snakeCase);
|
||||
StringBuilder builder = new StringBuilder("get");
|
||||
// Capitalize the first character in the field name.
|
||||
builder.append(Character.toUpperCase(camelCase.charAt(0)));
|
||||
@ -679,7 +788,7 @@ final class DescriptorMessageInfoFactory implements MessageInfoFactory {
|
||||
}
|
||||
|
||||
private static OneofInfo newInfo(Class<?> messageType, OneofDescriptor desc) {
|
||||
String camelCase = snakeCaseToCamelCase(desc.getName());
|
||||
String camelCase = snakeCaseToLowerCamelCase(desc.getName());
|
||||
String valueFieldName = camelCase + "_";
|
||||
String caseFieldName = camelCase + "Case_";
|
||||
|
||||
|
@ -1941,7 +1941,7 @@ public final class Descriptors {
|
||||
new Comparator<EnumValueDescriptor>() {
|
||||
@Override
|
||||
public int compare(EnumValueDescriptor o1, EnumValueDescriptor o2) {
|
||||
return Integer.compare(o1.getNumber(), o2.getNumber());
|
||||
return Integer.valueOf(o1.getNumber()).compareTo(o2.getNumber());
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -30,8 +30,6 @@
|
||||
|
||||
package com.google.protobuf;
|
||||
|
||||
import static java.nio.charset.StandardCharsets.UTF_8;
|
||||
|
||||
import com.google.protobuf.Descriptors.Descriptor;
|
||||
import com.google.protobuf.Descriptors.EnumDescriptor;
|
||||
import com.google.protobuf.Descriptors.EnumValueDescriptor;
|
||||
@ -506,11 +504,11 @@ public final class TextFormat {
|
||||
}
|
||||
switch (fieldType) {
|
||||
case BOOLEAN:
|
||||
return Boolean.compare((boolean) getKey(), (boolean) b.getKey());
|
||||
return Boolean.valueOf((boolean) getKey()).compareTo((boolean) b.getKey());
|
||||
case LONG:
|
||||
return Long.compare((long) getKey(), (long) b.getKey());
|
||||
return Long.valueOf((long) getKey()).compareTo((long) b.getKey());
|
||||
case INT:
|
||||
return Integer.compare((int) getKey(), (int) b.getKey());
|
||||
return Integer.valueOf((int) getKey()).compareTo((int) b.getKey());
|
||||
case STRING:
|
||||
String aString = (String) getKey();
|
||||
String bString = (String) b.getKey();
|
||||
@ -2408,11 +2406,12 @@ public final class TextFormat {
|
||||
| digitValue(input.byteAt(i + 1)) << 8
|
||||
| digitValue(input.byteAt(i + 2)) << 4
|
||||
| digitValue(input.byteAt(i + 3)));
|
||||
if (Character.isSurrogate(ch)) {
|
||||
|
||||
if (ch >= Character.MIN_SURROGATE && ch <= Character.MAX_SURROGATE) {
|
||||
throw new InvalidEscapeSequenceException(
|
||||
"Invalid escape sequence: '\\u' refers to a surrogate");
|
||||
}
|
||||
byte[] chUtf8 = Character.toString(ch).getBytes(UTF_8);
|
||||
byte[] chUtf8 = Character.toString(ch).getBytes(Internal.UTF_8);
|
||||
System.arraycopy(chUtf8, 0, result, pos, chUtf8.length);
|
||||
pos += chUtf8.length;
|
||||
i += 3;
|
||||
@ -2456,7 +2455,7 @@ public final class TextFormat {
|
||||
}
|
||||
int[] codepoints = new int[1];
|
||||
codepoints[0] = codepoint;
|
||||
byte[] chUtf8 = new String(codepoints, 0, 1).getBytes(UTF_8);
|
||||
byte[] chUtf8 = new String(codepoints, 0, 1).getBytes(Internal.UTF_8);
|
||||
System.arraycopy(chUtf8, 0, result, pos, chUtf8.length);
|
||||
pos += chUtf8.length;
|
||||
i += 7;
|
||||
|
@ -32,6 +32,7 @@
|
||||
|
||||
// This file tests that various identifiers work as field and type names even
|
||||
// though the same identifiers are used internally by the java code generator.
|
||||
// LINT: LEGACY_NAMES
|
||||
|
||||
syntax = "proto2";
|
||||
|
||||
@ -41,10 +42,67 @@ option java_generic_services = true; // auto-added
|
||||
option java_package = "com.google.protobuf";
|
||||
option java_outer_classname = "TestBadIdentifiersProto";
|
||||
|
||||
message TestMessage {
|
||||
optional string cached_size = 1;
|
||||
optional string serialized_size = 2;
|
||||
optional string class = 3;
|
||||
// Message with field names using underscores that conflict with accessors in
|
||||
// the base message class in java. See kForbiddenWordList in
|
||||
// src/google/protobuf/compiler/java/java_helpers.cc
|
||||
message ForbiddenWordsUnderscoreMessage {
|
||||
// java.lang.Object
|
||||
optional bool class = 1;
|
||||
// com.google.protobuf.MessageLiteOrBuilder
|
||||
optional bool default_instance_for_type = 2;
|
||||
// com.google.protobuf.MessageLite
|
||||
optional bool parser_for_type = 3;
|
||||
optional bool serialized_size = 4;
|
||||
// com.google.protobuf.MessageOrBuilder
|
||||
optional bool all_fields = 5;
|
||||
optional bool descriptor_for_type = 6;
|
||||
optional bool initialization_error_string = 7;
|
||||
// TODO(b/219045204): re-enable
|
||||
// optional bool unknown_fields = 8;
|
||||
// obsolete. kept for backwards compatibility of generated code
|
||||
optional bool cached_size = 9;
|
||||
}
|
||||
|
||||
// Message with field names using leading underscores that conflict with
|
||||
// accessors in the base message class in java. See kForbiddenWordList in
|
||||
// src/google/protobuf/compiler/java/java_helpers.cc
|
||||
message ForbiddenWordsLeadingUnderscoreMessage {
|
||||
// java.lang.Object
|
||||
optional bool _class = 1;
|
||||
// com.google.protobuf.MessageLiteOrBuilder
|
||||
optional bool _default_instance_for_type = 2;
|
||||
// com.google.protobuf.MessageLite
|
||||
optional bool _parser_for_type = 3;
|
||||
optional bool _serialized_size = 4;
|
||||
// com.google.protobuf.MessageOrBuilder
|
||||
optional bool _all_fields = 5;
|
||||
optional bool _descriptor_for_type = 6;
|
||||
optional bool _initialization_error_string = 7;
|
||||
// TODO(b/219045204): re-enable
|
||||
// optional bool _unknown_fields = 8;
|
||||
// obsolete. kept for backwards compatibility of generated code
|
||||
optional bool _cached_size = 9;
|
||||
}
|
||||
|
||||
// Message with field names in camel case that conflict with accessors in the
|
||||
// base message class in java. See kForbiddenWordList in
|
||||
// src/google/protobuf/compiler/java/java_helpers.cc
|
||||
message ForbiddenWordsCamelMessage {
|
||||
// java.lang.Object
|
||||
optional bool class = 1;
|
||||
// com.google.protobuf.MessageLiteOrBuilder
|
||||
optional bool defaultInstanceForType = 2;
|
||||
// com.google.protobuf.MessageLite
|
||||
optional bool serializedSize = 3;
|
||||
optional bool parserForType = 4;
|
||||
// com.google.protobuf.MessageOrBuilder:
|
||||
optional bool initializationErrorString = 5;
|
||||
optional bool descriptorForType = 6;
|
||||
optional bool allFields = 7;
|
||||
// TODO(b/219045204): re-enable
|
||||
// optional bool unknownFields = 8;
|
||||
// obsolete. kept for backwards compatibility of generated code
|
||||
optional bool cachedSize = 9;
|
||||
}
|
||||
|
||||
message Descriptor {
|
||||
@ -82,7 +140,7 @@ message Deprecated {
|
||||
|
||||
optional int32 field1 = 1 [deprecated = true];
|
||||
optional TestEnum field2 = 2 [deprecated = true];
|
||||
optional TestMessage field3 = 3 [deprecated = true];
|
||||
optional ForbiddenWordsUnderscoreMessage field3 = 3 [deprecated = true];
|
||||
}
|
||||
|
||||
message Override {
|
||||
@ -115,7 +173,8 @@ message Double {
|
||||
}
|
||||
|
||||
service TestConflictingMethodNames {
|
||||
rpc Override(TestMessage) returns (TestMessage);
|
||||
rpc Override(ForbiddenWordsUnderscoreMessage)
|
||||
returns (ForbiddenWordsUnderscoreMessage);
|
||||
}
|
||||
|
||||
message TestConflictingFieldNames {
|
||||
@ -123,24 +182,24 @@ message TestConflictingFieldNames {
|
||||
UNKNOWN = 0;
|
||||
FOO = 1;
|
||||
}
|
||||
message TestMessage {}
|
||||
message ForbiddenWordsUnderscoreMessage {}
|
||||
repeated int32 int32_field = 1;
|
||||
repeated TestEnum enum_field = 2;
|
||||
repeated string string_field = 3;
|
||||
repeated bytes bytes_field = 4;
|
||||
repeated TestMessage message_field = 5;
|
||||
repeated ForbiddenWordsUnderscoreMessage message_field = 5;
|
||||
|
||||
optional int32 int32_field_count = 11;
|
||||
optional TestEnum enum_field_count = 12;
|
||||
optional string string_field_count = 13;
|
||||
optional bytes bytes_field_count = 14;
|
||||
optional TestMessage message_field_count = 15;
|
||||
optional ForbiddenWordsUnderscoreMessage message_field_count = 15;
|
||||
|
||||
repeated int32 Int32Field = 21; // NO_PROTO3
|
||||
repeated TestEnum EnumField = 22; // NO_PROTO3
|
||||
repeated string StringField = 23; // NO_PROTO3
|
||||
repeated bytes BytesField = 24; // NO_PROTO3
|
||||
repeated TestMessage MessageField = 25; // NO_PROTO3
|
||||
repeated int32 Int32Field = 21; // NO_PROTO3
|
||||
repeated TestEnum EnumField = 22; // NO_PROTO3
|
||||
repeated string StringField = 23; // NO_PROTO3
|
||||
repeated bytes BytesField = 24; // NO_PROTO3
|
||||
repeated ForbiddenWordsUnderscoreMessage MessageField = 25; // NO_PROTO3
|
||||
|
||||
// This field conflicts with "int32_field" as they both generate
|
||||
// the method getInt32FieldList().
|
||||
|
@ -1,5 +1,6 @@
|
||||
load("@io_bazel_rules_kotlin//kotlin:jvm.bzl", "kt_jvm_library")
|
||||
load("@rules_java//java:defs.bzl", "java_lite_proto_library")
|
||||
load("//:protobuf.bzl", "internal_gen_kt_protos")
|
||||
|
||||
java_lite_proto_library(
|
||||
name = "example_extensible_message_java_proto_lite",
|
||||
@ -47,74 +48,31 @@ java_lite_proto_library(
|
||||
deps = ["//java/kotlin:evil_names_proto2"],
|
||||
)
|
||||
|
||||
internal_gen_kt_protos(
|
||||
name = "gen_evil_names_proto2_lite",
|
||||
deps = ["//java/kotlin:evil_names_proto2"],
|
||||
lite = True,
|
||||
)
|
||||
|
||||
java_lite_proto_library(
|
||||
name = "evil_names_proto3_java_proto_lite",
|
||||
deps = ["//java/kotlin:evil_names_proto3"],
|
||||
)
|
||||
|
||||
internal_gen_kt_protos(
|
||||
name = "gen_evil_names_proto3_lite",
|
||||
deps = ["//java/kotlin:evil_names_proto3"],
|
||||
lite = True,
|
||||
)
|
||||
|
||||
java_lite_proto_library(
|
||||
name = "multiple_files_proto3_java_proto_lite",
|
||||
deps = ["//java/kotlin:multiple_files_proto3"],
|
||||
)
|
||||
|
||||
genrule(
|
||||
internal_gen_kt_protos(
|
||||
name = "gen_kotlin_proto3_java_multiple_files_lite",
|
||||
srcs = ["src/test/proto/com/google/protobuf/multiple_files_proto3.proto"],
|
||||
outs = [
|
||||
"MultipleFilesMessageALiteKt.kt",
|
||||
"MultipleFilesMessageBLiteKt.kt",
|
||||
"MultipleFilesProto3LiteKt.kt",
|
||||
],
|
||||
cmd = "$(location //:protoc) " +
|
||||
"--kotlin_out=lite:$(@D) " +
|
||||
"$(location src/test/proto/com/google/protobuf/multiple_files_proto3.proto) && " +
|
||||
"cp $(@D)/com/google/protobuf/kotlin/generator/MultipleFilesMessageAKt.kt " +
|
||||
"$(location MultipleFilesMessageALiteKt.kt) && " +
|
||||
"cp $(@D)/com/google/protobuf/kotlin/generator/MultipleFilesMessageBKt.kt " +
|
||||
"$(location MultipleFilesMessageBLiteKt.kt) && " +
|
||||
"cp $(@D)/com/google/protobuf/kotlin/generator/MultipleFilesProto3Kt.kt " +
|
||||
"$(location MultipleFilesProto3LiteKt.kt)",
|
||||
tools = ["//:protoc"],
|
||||
)
|
||||
|
||||
genrule(
|
||||
name = "gen_evil_names_proto2_lite",
|
||||
srcs = ["src/test/proto/com/google/protobuf/evil_names_proto2.proto"],
|
||||
outs = [
|
||||
"EvilNamesProto2LiteKt.kt",
|
||||
"HardKeywordsAllTypesProto2LiteKt.kt",
|
||||
"InterfaceKt.kt",
|
||||
],
|
||||
cmd = "$(location //:protoc) " +
|
||||
"--kotlin_out=lite:$(@D) " +
|
||||
"$(location src/test/proto/com/google/protobuf/evil_names_proto2.proto) && " +
|
||||
"cp $(@D)/com/google/protobuf/kotlin/generator/EvilNamesProto2Kt.kt " +
|
||||
"$(location EvilNamesProto2LiteKt.kt) && " +
|
||||
"cp $(@D)/com/google/protobuf/kotlin/generator/HardKeywordsAllTypesProto2Kt.kt " +
|
||||
"$(location HardKeywordsAllTypesProto2LiteKt.kt) && " +
|
||||
"cp $(@D)/com/google/protobuf/kotlin/generator/InterfaceKt.kt " +
|
||||
"$(location InterfaceKt.kt)",
|
||||
tools = ["//:protoc"],
|
||||
)
|
||||
|
||||
genrule(
|
||||
name = "gen_evil_names_proto3_lite",
|
||||
srcs = ["src/test/proto/com/google/protobuf/evil_names_proto3.proto"],
|
||||
outs = [
|
||||
"ClassKt.kt",
|
||||
"EvilNamesProto3Kt.kt",
|
||||
"HardKeywordsAllTypesProto3Kt.kt",
|
||||
],
|
||||
cmd = "$(location //:protoc) " +
|
||||
"--kotlin_out=lite:$(@D) " +
|
||||
"$(location src/test/proto/com/google/protobuf/evil_names_proto3.proto) && " +
|
||||
"cp $(@D)/com/google/protobuf/kotlin/generator/ClassKt.kt " +
|
||||
"$(location ClassKt.kt) && " +
|
||||
"cp $(@D)/com/google/protobuf/kotlin/generator/EvilNamesProto3Kt.kt " +
|
||||
"$(location EvilNamesProto3Kt.kt) && " +
|
||||
"cp $(@D)/com/google/protobuf/kotlin/generator/HardKeywordsAllTypesProto3Kt.kt " +
|
||||
"$(location HardKeywordsAllTypesProto3Kt.kt)",
|
||||
tools = ["//:protoc"],
|
||||
deps = ["//java/kotlin:multiple_files_proto3"],
|
||||
)
|
||||
|
||||
kt_jvm_library(
|
||||
@ -168,7 +126,7 @@ java_test(
|
||||
|
||||
kt_jvm_library(
|
||||
name = "proto3_test_lite_library",
|
||||
srcs = ["src/test/kotlin/com/google/protobuf/Proto3LiteTest.kt"],
|
||||
srcs = ["//java/kotlin:src/test/kotlin/com/google/protobuf/Proto3Test.kt"],
|
||||
deps = [
|
||||
":kotlin_proto3_unittest_lite",
|
||||
"//java/core:test_util_lite",
|
||||
@ -180,5 +138,5 @@ kt_jvm_library(
|
||||
java_test(
|
||||
name = "proto3_test_lite",
|
||||
runtime_deps = [":proto3_test_lite_library"],
|
||||
test_class = "com.google.protobuf.kotlin.Proto3LiteTest",
|
||||
test_class = "com.google.protobuf.kotlin.Proto3Test",
|
||||
)
|
||||
|
@ -59,6 +59,7 @@
|
||||
<groupId>org.jetbrains.kotlin</groupId>
|
||||
<artifactId>kotlin-test</artifactId>
|
||||
<version>${kotlin.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
|
@ -2,6 +2,11 @@ load("@io_bazel_rules_kotlin//kotlin:jvm.bzl", "kt_jvm_library")
|
||||
load("@rules_java//java:defs.bzl", "java_proto_library")
|
||||
load("@rules_proto//proto:defs.bzl", "proto_library")
|
||||
load("//:protobuf_version.bzl", "PROTOBUF_VERSION")
|
||||
load("//:protobuf.bzl", "internal_gen_kt_protos")
|
||||
|
||||
exports_files([
|
||||
"src/test/kotlin/com/google/protobuf/Proto3Test.kt",
|
||||
])
|
||||
|
||||
# Kotlin generated protos depend on this and only this.
|
||||
kt_jvm_library(
|
||||
@ -35,7 +40,10 @@ kt_jvm_library(
|
||||
|
||||
kt_jvm_library(
|
||||
name = "full_extensions",
|
||||
srcs = ["src/main/kotlin/com/google/protobuf/ExtendableMessageExtensions.kt"],
|
||||
srcs = [
|
||||
"src/main/kotlin/com/google/protobuf/Anies.kt",
|
||||
"src/main/kotlin/com/google/protobuf/ExtendableMessageExtensions.kt",
|
||||
],
|
||||
deps = ["//java/core"],
|
||||
)
|
||||
|
||||
@ -106,11 +114,15 @@ java_test(
|
||||
|
||||
kt_jvm_library(
|
||||
name = "test_extensions_library",
|
||||
srcs = ["src/test/kotlin/com/google/protobuf/ExtendableMessageExtensionsTest.kt"],
|
||||
srcs = [
|
||||
"src/test/kotlin/com/google/protobuf/AniesTest.kt",
|
||||
"src/test/kotlin/com/google/protobuf/ExtendableMessageExtensionsTest.kt",
|
||||
],
|
||||
deps = [
|
||||
":example_extensible_message_java_proto",
|
||||
":full_extensions",
|
||||
"//java/lite",
|
||||
"//java/core:core",
|
||||
":kotlin_unittest",
|
||||
":only_for_use_in_proto_generated_code_its_generator_and_tests",
|
||||
":shared_runtime",
|
||||
"@com_github_jetbrains_kotlin//:kotlin-test",
|
||||
@ -131,6 +143,16 @@ proto_library(
|
||||
visibility = ["//:__subpackages__"],
|
||||
)
|
||||
|
||||
java_proto_library(
|
||||
name = "evil_names_proto2_java_proto",
|
||||
deps = [":evil_names_proto2"],
|
||||
)
|
||||
|
||||
internal_gen_kt_protos(
|
||||
name = "gen_evil_names_proto2",
|
||||
deps = [":evil_names_proto2"],
|
||||
)
|
||||
|
||||
proto_library(
|
||||
name = "evil_names_proto3",
|
||||
srcs = ["src/test/proto/com/google/protobuf/evil_names_proto3.proto"],
|
||||
@ -138,12 +160,12 @@ proto_library(
|
||||
)
|
||||
|
||||
java_proto_library(
|
||||
name = "evil_names_proto2_java_proto",
|
||||
deps = [":evil_names_proto2"],
|
||||
name = "evil_names_proto3_java_proto",
|
||||
deps = [":evil_names_proto3"],
|
||||
)
|
||||
|
||||
java_proto_library(
|
||||
name = "evil_names_proto3_java_proto",
|
||||
internal_gen_kt_protos(
|
||||
name = "gen_evil_names_proto3",
|
||||
deps = [":evil_names_proto3"],
|
||||
)
|
||||
|
||||
@ -153,68 +175,14 @@ proto_library(
|
||||
visibility = ["//:__subpackages__"],
|
||||
)
|
||||
|
||||
java_proto_library( name = "multiple_files_proto3_java_proto",
|
||||
java_proto_library(
|
||||
name = "multiple_files_proto3_java_proto",
|
||||
deps = [":multiple_files_proto3"],
|
||||
)
|
||||
|
||||
genrule(
|
||||
internal_gen_kt_protos(
|
||||
name = "gen_kotlin_proto3_java_multiple_files",
|
||||
srcs = ["src/test/proto/com/google/protobuf/multiple_files_proto3.proto"],
|
||||
outs = [
|
||||
"MultipleFilesMessageAKt.kt",
|
||||
"MultipleFilesMessageBKt.kt",
|
||||
"MultipleFilesProto3Kt.kt",
|
||||
],
|
||||
cmd = "$(location //:protoc) " +
|
||||
"--kotlin_out=shared,immutable:$(@D) " +
|
||||
"$(location src/test/proto/com/google/protobuf/multiple_files_proto3.proto) && " +
|
||||
"cp $(@D)/com/google/protobuf/kotlin/generator/MultipleFilesMessageAKt.kt " +
|
||||
"$(location MultipleFilesMessageAKt.kt) && " +
|
||||
"cp $(@D)/com/google/protobuf/kotlin/generator/MultipleFilesMessageBKt.kt " +
|
||||
"$(location MultipleFilesMessageBKt.kt) && " +
|
||||
"cp $(@D)/com/google/protobuf/kotlin/generator/MultipleFilesProto3Kt.kt " +
|
||||
"$(location MultipleFilesProto3Kt.kt)",
|
||||
tools = ["//:protoc"],
|
||||
)
|
||||
|
||||
genrule(
|
||||
name = "gen_evil_names_proto2",
|
||||
srcs = ["src/test/proto/com/google/protobuf/evil_names_proto2.proto"],
|
||||
outs = [
|
||||
"EvilNamesProto2Kt.kt",
|
||||
"HardKeywordsAllTypesProto2Kt.kt",
|
||||
"InterfaceKt.kt",
|
||||
],
|
||||
cmd = "$(location //:protoc) " +
|
||||
"--kotlin_out=shared,immutable:$(@D) " +
|
||||
"$(location src/test/proto/com/google/protobuf/evil_names_proto2.proto) && " +
|
||||
"cp $(@D)/com/google/protobuf/kotlin/generator/EvilNamesProto2Kt.kt " +
|
||||
"$(location EvilNamesProto2Kt.kt) && " +
|
||||
"cp $(@D)/com/google/protobuf/kotlin/generator/HardKeywordsAllTypesProto2Kt.kt " +
|
||||
"$(location HardKeywordsAllTypesProto2Kt.kt) && " +
|
||||
"cp $(@D)/com/google/protobuf/kotlin/generator/InterfaceKt.kt " +
|
||||
"$(location InterfaceKt.kt)",
|
||||
tools = ["//:protoc"],
|
||||
)
|
||||
|
||||
genrule(
|
||||
name = "gen_evil_names_proto3",
|
||||
srcs = ["src/test/proto/com/google/protobuf/evil_names_proto3.proto"],
|
||||
outs = [
|
||||
"ClassKt.kt",
|
||||
"EvilNamesProto3Kt.kt",
|
||||
"HardKeywordsAllTypesProto3Kt.kt",
|
||||
],
|
||||
cmd = "$(location //:protoc) " +
|
||||
"--kotlin_out=shared,immutable:$(@D) " +
|
||||
"$(location src/test/proto/com/google/protobuf/evil_names_proto3.proto) && " +
|
||||
"cp $(@D)/com/google/protobuf/kotlin/generator/ClassKt.kt " +
|
||||
"$(location ClassKt.kt) && " +
|
||||
"cp $(@D)/com/google/protobuf/kotlin/generator/EvilNamesProto3Kt.kt " +
|
||||
"$(location EvilNamesProto3Kt.kt) && " +
|
||||
"cp $(@D)/com/google/protobuf/kotlin/generator/HardKeywordsAllTypesProto3Kt.kt " +
|
||||
"$(location HardKeywordsAllTypesProto3Kt.kt)",
|
||||
tools = ["//:protoc"],
|
||||
deps = [":multiple_files_proto3"],
|
||||
)
|
||||
|
||||
kt_jvm_library(
|
||||
@ -228,6 +196,7 @@ kt_jvm_library(
|
||||
"//java/core:core",
|
||||
":only_for_use_in_proto_generated_code_its_generator_and_tests",
|
||||
":shared_runtime",
|
||||
":well_known_protos_kotlin",
|
||||
"//:java_test_protos",
|
||||
],
|
||||
)
|
||||
@ -282,3 +251,15 @@ java_test(
|
||||
runtime_deps = [":proto3_test_library"],
|
||||
test_class = "com.google.protobuf.kotlin.Proto3Test",
|
||||
)
|
||||
|
||||
kt_jvm_library(
|
||||
name = "well_known_protos_kotlin",
|
||||
srcs = [
|
||||
"//:gen_well_known_protos_kotlin",
|
||||
],
|
||||
deps = [
|
||||
"//java/core",
|
||||
":only_for_use_in_proto_generated_code_its_generator_and_tests",
|
||||
":shared_runtime",
|
||||
],
|
||||
)
|
||||
|
@ -3,6 +3,7 @@
|
||||
<mkdir dir="${generated.sources.dir}"/>
|
||||
<exec executable="${protoc}">
|
||||
<arg value="--kotlin_out=${generated.sources.dir}"/>
|
||||
<arg value="--java_out=${generated.sources.dir}"/>
|
||||
<arg value="--proto_path=${protobuf.source.dir}"/>
|
||||
<arg value="${protobuf.source.dir}/google/protobuf/any.proto"/>
|
||||
<arg value="${protobuf.source.dir}/google/protobuf/api.proto"/>
|
||||
|
@ -33,6 +33,7 @@ package com.google.protobuf.kotlin
|
||||
import com.google.common.truth.Truth.assertThat
|
||||
import com.google.protobuf.ByteString
|
||||
import java.lang.IndexOutOfBoundsException
|
||||
import java.nio.Buffer
|
||||
import java.nio.ByteBuffer
|
||||
import kotlin.test.assertFailsWith
|
||||
import org.junit.Test
|
||||
@ -90,8 +91,8 @@ class ByteStringsTest {
|
||||
@Test
|
||||
fun byteBufferToByteStringRespectsPositionAndLimit() {
|
||||
val buffer = ByteBuffer.wrap("abc".toByteArray(Charsets.UTF_8))
|
||||
buffer.position(1)
|
||||
buffer.limit(2)
|
||||
(buffer as java.nio.Buffer).position(1)
|
||||
(buffer as java.nio.Buffer).limit(2)
|
||||
assertThat(buffer.toByteString()).isEqualTo(ByteString.copyFromUtf8("b"))
|
||||
}
|
||||
}
|
||||
|
@ -1901,7 +1901,7 @@ public class JsonFormat {
|
||||
return json.getAsString();
|
||||
}
|
||||
|
||||
private ByteString parseBytes(JsonElement json) throws InvalidProtocolBufferException {
|
||||
private ByteString parseBytes(JsonElement json) {
|
||||
try {
|
||||
return ByteString.copyFrom(BaseEncoding.base64().decode(json.getAsString()));
|
||||
} catch (IllegalArgumentException e) {
|
||||
|
@ -1117,7 +1117,7 @@ public class JsonFormatTest {
|
||||
+ " \"value\": \"12345\"\n"
|
||||
+ "}");
|
||||
assertRoundTripEquals(anyMessage, registry);
|
||||
anyMessage = Any.pack(UInt64Value.newBuilder().setValue(12345).build());
|
||||
anyMessage = Any.pack(UInt64Value.of(12345));
|
||||
assertThat(printer.print(anyMessage))
|
||||
.isEqualTo(
|
||||
"{\n"
|
||||
@ -1125,7 +1125,7 @@ public class JsonFormatTest {
|
||||
+ " \"value\": \"12345\"\n"
|
||||
+ "}");
|
||||
assertRoundTripEquals(anyMessage, registry);
|
||||
anyMessage = Any.pack(FloatValue.newBuilder().setValue(12345).build());
|
||||
anyMessage = Any.pack(FloatValue.of(12345));
|
||||
assertThat(printer.print(anyMessage))
|
||||
.isEqualTo(
|
||||
"{\n"
|
||||
@ -1133,7 +1133,7 @@ public class JsonFormatTest {
|
||||
+ " \"value\": 12345.0\n"
|
||||
+ "}");
|
||||
assertRoundTripEquals(anyMessage, registry);
|
||||
anyMessage = Any.pack(DoubleValue.newBuilder().setValue(12345).build());
|
||||
anyMessage = Any.pack(DoubleValue.of(12345));
|
||||
assertThat(printer.print(anyMessage))
|
||||
.isEqualTo(
|
||||
"{\n"
|
||||
@ -1340,7 +1340,7 @@ public class JsonFormatTest {
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
@Test
|
||||
public void testParserRejectTrailingComma() throws Exception {
|
||||
try {
|
||||
TestAllTypes.Builder builder = TestAllTypes.newBuilder();
|
||||
@ -1350,20 +1350,16 @@ public class JsonFormatTest {
|
||||
// Expected.
|
||||
}
|
||||
|
||||
// TODO(xiaofeng): GSON allows trailing comma in arrays even after I set
|
||||
// the JsonReader to non-lenient mode. If we want to enforce strict JSON
|
||||
// compliance, we might want to switch to a different JSON parser or
|
||||
// implement one by ourselves.
|
||||
// try {
|
||||
// TestAllTypes.Builder builder = TestAllTypes.newBuilder();
|
||||
// JsonFormat.merge(
|
||||
// "{\n"
|
||||
// + " \"repeatedInt32\": [12345,]\n"
|
||||
// + "}", builder);
|
||||
// fail("Exception is expected.");
|
||||
// } catch (IOException e) {
|
||||
// // Expected.
|
||||
// }
|
||||
try {
|
||||
TestAllTypes.Builder builder = TestAllTypes.newBuilder();
|
||||
mergeFromJson(
|
||||
"{\n"
|
||||
+ " \"repeatedInt32\": [12345,]\n"
|
||||
+ "}", builder);
|
||||
assertWithMessage("IOException expected.").fail();
|
||||
} catch (IOException e) {
|
||||
// Expected.
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "google-protobuf",
|
||||
"version": "3.18.1",
|
||||
"version": "3.19.4",
|
||||
"description": "Protocol Buffers for JavaScript",
|
||||
"main": "google-protobuf.js",
|
||||
"files": [
|
||||
|
@ -1,18 +1,18 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# This is the top-level script we give to Kokoro as the entry point for
|
||||
# running the "pull request" project:
|
||||
#
|
||||
# This script selects a specific Dockerfile (for building a Docker image) and
|
||||
# a script to run inside that image. Then we delegate to the general
|
||||
# build_and_run_docker.sh script.
|
||||
|
||||
# Change to repo root
|
||||
cd $(dirname $0)/../../..
|
||||
|
||||
set -ex
|
||||
|
||||
# Install openJDK 11 (required by the java benchmarks)
|
||||
sudo apt-key adv --recv-keys --keyserver keyserver.ubuntu.com 78BD65473CB3BD13
|
||||
sudo add-apt-repository ppa:openjdk-r/ppa
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y openjdk-11-jdk-headless
|
||||
|
||||
# use java 11
|
||||
sudo update-java-alternatives --set /usr/lib/jvm/java-1.11.0-openjdk-amd64
|
||||
java -version
|
||||
|
||||
./tests.sh benchmark
|
||||
export DOCKERHUB_ORGANIZATION=protobuftesting
|
||||
export DOCKERFILE_DIR=kokoro/linux/dockerfile/test/java_stretch
|
||||
export DOCKER_RUN_SCRIPT=kokoro/linux/pull_request_in_docker.sh
|
||||
export OUTPUT_DIR=testoutput
|
||||
export TEST_SET="benchmark"
|
||||
./kokoro/linux/build_and_run_docker.sh
|
||||
|
@ -23,7 +23,7 @@ popd
|
||||
./configure CXXFLAGS="-fPIC -O2"
|
||||
make -j8
|
||||
pushd python
|
||||
virtualenv -p python3 env
|
||||
python3 -m venv env
|
||||
source env/bin/activate
|
||||
python3 setup.py build --cpp_implementation
|
||||
pip3 install --install-option="--cpp_implementation" .
|
||||
|
@ -11,6 +11,7 @@ RUN apt-get update && apt-get install -y \
|
||||
build-essential \
|
||||
bzip2 \
|
||||
ccache \
|
||||
cmake \
|
||||
curl \
|
||||
gcc \
|
||||
git \
|
||||
@ -21,6 +22,7 @@ RUN apt-get update && apt-get install -y \
|
||||
libtool \
|
||||
make \
|
||||
parallel \
|
||||
pkg-config \
|
||||
time \
|
||||
wget \
|
||||
# Java dependencies
|
||||
@ -29,6 +31,7 @@ RUN apt-get update && apt-get install -y \
|
||||
# Required for the gtest build.
|
||||
python2 \
|
||||
# Python dependencies
|
||||
python3-dev \
|
||||
python3-setuptools \
|
||||
python3-pip \
|
||||
python3-venv \
|
||||
|
@ -1,31 +0,0 @@
|
||||
FROM python:3.5-buster
|
||||
|
||||
# Install dependencies. We start with the basic ones require to build protoc
|
||||
# and the C++ build
|
||||
RUN apt-get update && apt-get install -y \
|
||||
autoconf \
|
||||
autotools-dev \
|
||||
build-essential \
|
||||
bzip2 \
|
||||
ccache \
|
||||
curl \
|
||||
gcc \
|
||||
git \
|
||||
libc6 \
|
||||
libc6-dbg \
|
||||
libc6-dev \
|
||||
libgtest-dev \
|
||||
libtool \
|
||||
make \
|
||||
parallel \
|
||||
time \
|
||||
wget \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install Python libraries.
|
||||
RUN python -m pip install --no-cache-dir --upgrade \
|
||||
pip \
|
||||
setuptools \
|
||||
tox \
|
||||
wheel
|
@ -1,31 +0,0 @@
|
||||
FROM python:3.6-buster
|
||||
|
||||
# Install dependencies. We start with the basic ones require to build protoc
|
||||
# and the C++ build
|
||||
RUN apt-get update && apt-get install -y \
|
||||
autoconf \
|
||||
autotools-dev \
|
||||
build-essential \
|
||||
bzip2 \
|
||||
ccache \
|
||||
curl \
|
||||
gcc \
|
||||
git \
|
||||
libc6 \
|
||||
libc6-dbg \
|
||||
libc6-dev \
|
||||
libgtest-dev \
|
||||
libtool \
|
||||
make \
|
||||
parallel \
|
||||
time \
|
||||
wget \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install Python libraries.
|
||||
RUN python -m pip install --no-cache-dir --upgrade \
|
||||
pip \
|
||||
setuptools \
|
||||
tox \
|
||||
wheel
|
@ -1,18 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# This is the top-level script we give to Kokoro as the entry point for
|
||||
# running the "pull request" project:
|
||||
#
|
||||
# This script selects a specific Dockerfile (for building a Docker image) and
|
||||
# a script to run inside that image. Then we delegate to the general
|
||||
# build_and_run_docker.sh script.
|
||||
|
||||
# Change to repo root
|
||||
cd $(dirname $0)/../../..
|
||||
|
||||
export DOCKERHUB_ORGANIZATION=protobuftesting
|
||||
export DOCKERFILE_DIR=kokoro/linux/dockerfile/test/python27
|
||||
export DOCKER_RUN_SCRIPT=kokoro/linux/pull_request_in_docker.sh
|
||||
export OUTPUT_DIR=testoutput
|
||||
export TEST_SET="python27"
|
||||
./kokoro/linux/build_and_run_docker.sh
|
@ -1,11 +0,0 @@
|
||||
# Config file for running tests in Kokoro
|
||||
|
||||
# Location of the build script in repository
|
||||
build_file: "protobuf/kokoro/linux/python27/build.sh"
|
||||
timeout_mins: 120
|
||||
|
||||
action {
|
||||
define_artifacts {
|
||||
regex: "**/sponge_log.xml"
|
||||
}
|
||||
}
|
@ -1,11 +0,0 @@
|
||||
# Config file for running tests in Kokoro
|
||||
|
||||
# Location of the build script in repository
|
||||
build_file: "protobuf/kokoro/linux/python27/build.sh"
|
||||
timeout_mins: 120
|
||||
|
||||
action {
|
||||
define_artifacts {
|
||||
regex: "**/sponge_log.xml"
|
||||
}
|
||||
}
|
@ -1,18 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# This is the top-level script we give to Kokoro as the entry point for
|
||||
# running the "pull request" project:
|
||||
#
|
||||
# This script selects a specific Dockerfile (for building a Docker image) and
|
||||
# a script to run inside that image. Then we delegate to the general
|
||||
# build_and_run_docker.sh script.
|
||||
|
||||
# Change to repo root
|
||||
cd $(dirname $0)/../../..
|
||||
|
||||
export DOCKERHUB_ORGANIZATION=protobuftesting
|
||||
export DOCKERFILE_DIR=kokoro/linux/dockerfile/test/python27
|
||||
export DOCKER_RUN_SCRIPT=kokoro/linux/pull_request_in_docker.sh
|
||||
export OUTPUT_DIR=testoutput
|
||||
export TEST_SET="python27_cpp"
|
||||
./kokoro/linux/build_and_run_docker.sh
|
@ -1,11 +0,0 @@
|
||||
# Config file for running tests in Kokoro
|
||||
|
||||
# Location of the build script in repository
|
||||
build_file: "protobuf/kokoro/linux/python27_cpp/build.sh"
|
||||
timeout_mins: 120
|
||||
|
||||
action {
|
||||
define_artifacts {
|
||||
regex: "**/sponge_log.xml"
|
||||
}
|
||||
}
|
@ -1,11 +0,0 @@
|
||||
# Config file for running tests in Kokoro
|
||||
|
||||
# Location of the build script in repository
|
||||
build_file: "protobuf/kokoro/linux/python27_cpp/build.sh"
|
||||
timeout_mins: 120
|
||||
|
||||
action {
|
||||
define_artifacts {
|
||||
regex: "**/sponge_log.xml"
|
||||
}
|
||||
}
|
@ -1,18 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# This is the top-level script we give to Kokoro as the entry point for
|
||||
# running the "pull request" project:
|
||||
#
|
||||
# This script selects a specific Dockerfile (for building a Docker image) and
|
||||
# a script to run inside that image. Then we delegate to the general
|
||||
# build_and_run_docker.sh script.
|
||||
|
||||
# Change to repo root
|
||||
cd $(dirname $0)/../../..
|
||||
|
||||
export DOCKERHUB_ORGANIZATION=protobuftesting
|
||||
export DOCKERFILE_DIR=kokoro/linux/dockerfile/test/python35
|
||||
export DOCKER_RUN_SCRIPT=kokoro/linux/pull_request_in_docker.sh
|
||||
export OUTPUT_DIR=testoutput
|
||||
export TEST_SET="python35"
|
||||
./kokoro/linux/build_and_run_docker.sh
|
@ -1,11 +0,0 @@
|
||||
# Config file for running tests in Kokoro
|
||||
|
||||
# Location of the build script in repository
|
||||
build_file: "protobuf/kokoro/linux/python35/build.sh"
|
||||
timeout_mins: 120
|
||||
|
||||
action {
|
||||
define_artifacts {
|
||||
regex: "**/sponge_log.xml"
|
||||
}
|
||||
}
|
@ -1,11 +0,0 @@
|
||||
# Config file for running tests in Kokoro
|
||||
|
||||
# Location of the build script in repository
|
||||
build_file: "protobuf/kokoro/linux/python35/build.sh"
|
||||
timeout_mins: 120
|
||||
|
||||
action {
|
||||
define_artifacts {
|
||||
regex: "**/sponge_log.xml"
|
||||
}
|
||||
}
|
@ -1,18 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# This is the top-level script we give to Kokoro as the entry point for
|
||||
# running the "pull request" project:
|
||||
#
|
||||
# This script selects a specific Dockerfile (for building a Docker image) and
|
||||
# a script to run inside that image. Then we delegate to the general
|
||||
# build_and_run_docker.sh script.
|
||||
|
||||
# Change to repo root
|
||||
cd $(dirname $0)/../../..
|
||||
|
||||
export DOCKERHUB_ORGANIZATION=protobuftesting
|
||||
export DOCKERFILE_DIR=kokoro/linux/dockerfile/test/python35
|
||||
export DOCKER_RUN_SCRIPT=kokoro/linux/pull_request_in_docker.sh
|
||||
export OUTPUT_DIR=testoutput
|
||||
export TEST_SET="python35_cpp"
|
||||
./kokoro/linux/build_and_run_docker.sh
|
@ -1,11 +0,0 @@
|
||||
# Config file for running tests in Kokoro
|
||||
|
||||
# Location of the build script in repository
|
||||
build_file: "protobuf/kokoro/linux/python35_cpp/build.sh"
|
||||
timeout_mins: 120
|
||||
|
||||
action {
|
||||
define_artifacts {
|
||||
regex: "**/sponge_log.xml"
|
||||
}
|
||||
}
|
@ -1,11 +0,0 @@
|
||||
# Config file for running tests in Kokoro
|
||||
|
||||
# Location of the build script in repository
|
||||
build_file: "protobuf/kokoro/linux/python35_cpp/build.sh"
|
||||
timeout_mins: 120
|
||||
|
||||
action {
|
||||
define_artifacts {
|
||||
regex: "**/sponge_log.xml"
|
||||
}
|
||||
}
|
@ -81,7 +81,7 @@ typedef GPB_ENUM(GPBAny_FieldNumber) {
|
||||
* foo = any.unpack(Foo.class);
|
||||
* }
|
||||
*
|
||||
* Example 3: Pack and unpack a message in Python.
|
||||
* Example 3: Pack and unpack a message in Python.
|
||||
*
|
||||
* foo = Foo(...)
|
||||
* any = Any()
|
||||
@ -91,7 +91,7 @@ typedef GPB_ENUM(GPBAny_FieldNumber) {
|
||||
* any.Unpack(foo)
|
||||
* ...
|
||||
*
|
||||
* Example 4: Pack and unpack a message in Go
|
||||
* Example 4: Pack and unpack a message in Go
|
||||
*
|
||||
* foo := &pb.Foo{...}
|
||||
* any, err := anypb.New(foo)
|
||||
@ -112,7 +112,7 @@ typedef GPB_ENUM(GPBAny_FieldNumber) {
|
||||
*
|
||||
*
|
||||
* JSON
|
||||
* ====
|
||||
*
|
||||
* The JSON representation of an `Any` value uses the regular
|
||||
* representation of the deserialized, embedded message, with an
|
||||
* additional field `\@type` which contains the type URL. Example:
|
||||
|
@ -71,22 +71,23 @@ static void stringsink_uninit(stringsink *sink) { free(sink->ptr); }
|
||||
/* def name -> classname ******************************************************/
|
||||
|
||||
const char *const kReservedNames[] = {
|
||||
"abstract", "and", "array", "as", "break",
|
||||
"callable", "case", "catch", "class", "clone",
|
||||
"const", "continue", "declare", "default", "die",
|
||||
"do", "echo", "else", "elseif", "empty",
|
||||
"enddeclare", "endfor", "endforeach", "endif", "endswitch",
|
||||
"endwhile", "eval", "exit", "extends", "final",
|
||||
"finally", "fn", "for", "foreach", "function",
|
||||
"if", "implements", "include", "include_once", "instanceof",
|
||||
"global", "goto", "insteadof", "interface", "isset",
|
||||
"list", "match", "namespace", "new", "object",
|
||||
"or", "print", "private", "protected", "public",
|
||||
"require", "require_once", "return", "static", "switch",
|
||||
"throw", "trait", "try", "unset", "use",
|
||||
"var", "while", "xor", "yield", "int",
|
||||
"float", "bool", "string", "true", "false",
|
||||
"null", "void", "iterable", NULL};
|
||||
"abstract", "and", "array", "as", "break",
|
||||
"callable", "case", "catch", "class", "clone",
|
||||
"const", "continue", "declare", "default", "die",
|
||||
"do", "echo", "else", "elseif", "empty",
|
||||
"enddeclare", "endfor", "endforeach", "endif", "endswitch",
|
||||
"endwhile", "eval", "exit", "extends", "final",
|
||||
"finally", "fn", "for", "foreach", "function",
|
||||
"if", "implements", "include", "include_once", "instanceof",
|
||||
"global", "goto", "insteadof", "interface", "isset",
|
||||
"list", "match", "namespace", "new", "object",
|
||||
"or", "parent", "print", "private", "protected",
|
||||
"public", "require", "require_once", "return", "self",
|
||||
"static", "switch", "throw", "trait", "try",
|
||||
"unset", "use", "var", "while", "xor",
|
||||
"yield", "int", "float", "bool", "string",
|
||||
"true", "false", "null", "void", "iterable",
|
||||
NULL};
|
||||
|
||||
bool is_reserved_name(const char* name) {
|
||||
int i;
|
||||
|
@ -28,7 +28,7 @@ use Google\Protobuf\Internal\GPBUtil;
|
||||
* if (any.is(Foo.class)) {
|
||||
* foo = any.unpack(Foo.class);
|
||||
* }
|
||||
* Example 3: Pack and unpack a message in Python.
|
||||
* Example 3: Pack and unpack a message in Python.
|
||||
* foo = Foo(...)
|
||||
* any = Any()
|
||||
* any.Pack(foo)
|
||||
@ -36,7 +36,7 @@ use Google\Protobuf\Internal\GPBUtil;
|
||||
* if any.Is(Foo.DESCRIPTOR):
|
||||
* any.Unpack(foo)
|
||||
* ...
|
||||
* Example 4: Pack and unpack a message in Go
|
||||
* Example 4: Pack and unpack a message in Go
|
||||
* foo := &pb.Foo{...}
|
||||
* any, err := anypb.New(foo)
|
||||
* if err != nil {
|
||||
@ -53,7 +53,6 @@ use Google\Protobuf\Internal\GPBUtil;
|
||||
* in the type URL, for example "foo.bar.com/x/y.z" will yield type
|
||||
* name "y.z".
|
||||
* JSON
|
||||
* ====
|
||||
* The JSON representation of an `Any` value uses the regular
|
||||
* representation of the deserialized, embedded message, with an
|
||||
* additional field `@type` which contains the type URL. Example:
|
||||
|
@ -159,7 +159,7 @@ class DescriptorPool
|
||||
if (is_null($subdesc)) {
|
||||
trigger_error(
|
||||
'proto not added: ' . $proto
|
||||
. " for " . $desc->getFullName(), E_ERROR);
|
||||
. " for " . $desc->getFullName(), E_USER_ERROR);
|
||||
}
|
||||
$field->setMessageType($subdesc);
|
||||
break;
|
||||
|
@ -284,12 +284,12 @@ class GPBUtil
|
||||
"function"=>0, "global"=>0, "goto"=>0, "if"=>0, "implements"=>0,
|
||||
"include"=>0, "include_once"=>0, "instanceof"=>0, "insteadof"=>0,
|
||||
"interface"=>0, "isset"=>0, "list"=>0, "match"=>0, "namespace"=>0,
|
||||
"new"=>0, "or"=>0, "print"=>0, "private"=>0, "protected"=>0,
|
||||
"public"=>0, "require"=>0, "require_once"=>0, "return"=>0,
|
||||
"static"=>0, "switch"=>0, "throw"=>0, "trait"=>0, "try"=>0,
|
||||
"unset"=>0, "use"=>0, "var"=>0, "while"=>0, "xor"=>0, "yield"=>0,
|
||||
"int"=>0, "float"=>0, "bool"=>0, "string"=>0, "true"=>0, "false"=>0,
|
||||
"null"=>0, "void"=>0, "iterable"=>0
|
||||
"new"=>0, "or"=>0, "parent"=>0, "print"=>0, "private"=>0,
|
||||
"protected"=>0,"public"=>0, "require"=>0, "require_once"=>0,
|
||||
"return"=>0, "self"=>0, "static"=>0, "switch"=>0, "throw"=>0,
|
||||
"trait"=>0, "try"=>0,"unset"=>0, "use"=>0, "var"=>0, "while"=>0,
|
||||
"xor"=>0, "yield"=>0, "int"=>0, "float"=>0, "bool"=>0, "string"=>0,
|
||||
"true"=>0, "false"=>0, "null"=>0, "void"=>0, "iterable"=>0
|
||||
);
|
||||
|
||||
if (array_key_exists(strtolower($classname), $reserved_words)) {
|
||||
|
@ -135,6 +135,7 @@ class MapField implements \ArrayAccess, \IteratorAggregate, \Countable
|
||||
* @return object The stored element at given key.
|
||||
* @throws \ErrorException Invalid type for index.
|
||||
* @throws \ErrorException Non-existing index.
|
||||
* @todo need to add return type mixed (require update php version to 8.0)
|
||||
*/
|
||||
#[\ReturnTypeWillChange]
|
||||
public function offsetGet($key)
|
||||
@ -153,6 +154,7 @@ class MapField implements \ArrayAccess, \IteratorAggregate, \Countable
|
||||
* @throws \ErrorException Invalid type for key.
|
||||
* @throws \ErrorException Invalid type for value.
|
||||
* @throws \ErrorException Non-existing key.
|
||||
* @todo need to add return type void (require update php version to 7.1)
|
||||
*/
|
||||
#[\ReturnTypeWillChange]
|
||||
public function offsetSet($key, $value)
|
||||
@ -212,6 +214,7 @@ class MapField implements \ArrayAccess, \IteratorAggregate, \Countable
|
||||
* @param object $key The key of the element to be removed.
|
||||
* @return void
|
||||
* @throws \ErrorException Invalid type for key.
|
||||
* @todo need to add return type void (require update php version to 7.1)
|
||||
*/
|
||||
#[\ReturnTypeWillChange]
|
||||
public function offsetUnset($key)
|
||||
|
@ -67,6 +67,7 @@ class MapFieldIter implements \Iterator
|
||||
* Reset the status of the iterator
|
||||
*
|
||||
* @return void
|
||||
* @todo need to add return type void (require update php version to 7.1)
|
||||
*/
|
||||
#[\ReturnTypeWillChange]
|
||||
public function rewind()
|
||||
@ -78,6 +79,7 @@ class MapFieldIter implements \Iterator
|
||||
* Return the element at the current position.
|
||||
*
|
||||
* @return object The element at the current position.
|
||||
* @todo need to add return type mixed (require update php version to 8.0)
|
||||
*/
|
||||
#[\ReturnTypeWillChange]
|
||||
public function current()
|
||||
@ -89,6 +91,7 @@ class MapFieldIter implements \Iterator
|
||||
* Return the current key.
|
||||
*
|
||||
* @return object The current key.
|
||||
* @todo need to add return type mixed (require update php version to 8.0)
|
||||
*/
|
||||
#[\ReturnTypeWillChange]
|
||||
public function key()
|
||||
@ -119,6 +122,7 @@ class MapFieldIter implements \Iterator
|
||||
* Move to the next position.
|
||||
*
|
||||
* @return void
|
||||
* @todo need to add return type void (require update php version to 7.1)
|
||||
*/
|
||||
#[\ReturnTypeWillChange]
|
||||
public function next()
|
||||
|
@ -423,7 +423,7 @@ class Message
|
||||
}
|
||||
break;
|
||||
case GPBType::GROUP:
|
||||
trigger_error("Not implemented.", E_ERROR);
|
||||
trigger_error("Not implemented.", E_USER_ERROR);
|
||||
break;
|
||||
case GPBType::MESSAGE:
|
||||
if ($field->isMap()) {
|
||||
|
@ -121,6 +121,7 @@ class RepeatedField implements \ArrayAccess, \IteratorAggregate, \Countable
|
||||
* @return object The stored element at given index.
|
||||
* @throws \ErrorException Invalid type for index.
|
||||
* @throws \ErrorException Non-existing index.
|
||||
* @todo need to add return type mixed (require update php version to 8.0)
|
||||
*/
|
||||
#[\ReturnTypeWillChange]
|
||||
public function offsetGet($offset)
|
||||
@ -139,6 +140,7 @@ class RepeatedField implements \ArrayAccess, \IteratorAggregate, \Countable
|
||||
* @throws \ErrorException Invalid type for index.
|
||||
* @throws \ErrorException Non-existing index.
|
||||
* @throws \ErrorException Incorrect type of the element.
|
||||
* @todo need to add return type void (require update php version to 7.1)
|
||||
*/
|
||||
#[\ReturnTypeWillChange]
|
||||
public function offsetSet($offset, $value)
|
||||
@ -211,6 +213,7 @@ class RepeatedField implements \ArrayAccess, \IteratorAggregate, \Countable
|
||||
* @throws \ErrorException Invalid type for index.
|
||||
* @throws \ErrorException The element to be removed is not at the end of the
|
||||
* RepeatedField.
|
||||
* @todo need to add return type void (require update php version to 7.1)
|
||||
*/
|
||||
#[\ReturnTypeWillChange]
|
||||
public function offsetUnset($offset)
|
||||
|
@ -70,6 +70,7 @@ class RepeatedFieldIter implements \Iterator
|
||||
* Reset the status of the iterator
|
||||
*
|
||||
* @return void
|
||||
* @todo need to add return type void (require update php version to 7.1)
|
||||
*/
|
||||
#[\ReturnTypeWillChange]
|
||||
public function rewind()
|
||||
@ -81,6 +82,7 @@ class RepeatedFieldIter implements \Iterator
|
||||
* Return the element at the current position.
|
||||
*
|
||||
* @return object The element at the current position.
|
||||
* @todo need to add return type mixed (require update php version to 8.0)
|
||||
*/
|
||||
#[\ReturnTypeWillChange]
|
||||
public function current()
|
||||
@ -92,6 +94,7 @@ class RepeatedFieldIter implements \Iterator
|
||||
* Return the current position.
|
||||
*
|
||||
* @return integer The current position.
|
||||
* @todo need to add return type mixed (require update php version to 8.0)
|
||||
*/
|
||||
#[\ReturnTypeWillChange]
|
||||
public function key()
|
||||
@ -103,6 +106,7 @@ class RepeatedFieldIter implements \Iterator
|
||||
* Move to the next position.
|
||||
*
|
||||
* @return void
|
||||
* @todo need to add return type void (require update php version to 7.1)
|
||||
*/
|
||||
#[\ReturnTypeWillChange]
|
||||
public function next()
|
||||
|
@ -19,8 +19,8 @@ class Location extends \Google\Protobuf\Internal\Message
|
||||
* Identifies which part of the FileDescriptorProto was defined at this
|
||||
* location.
|
||||
* Each element is a field number or an index. They form a path from
|
||||
* the root FileDescriptorProto to the place where the definition. For
|
||||
* example, this path:
|
||||
* the root FileDescriptorProto to the place where the definition occurs.
|
||||
* For example, this path:
|
||||
* [ 4, 3, 2, 7, 1 ]
|
||||
* refers to:
|
||||
* file.message_type(3) // 4, 3
|
||||
@ -111,8 +111,8 @@ class Location extends \Google\Protobuf\Internal\Message
|
||||
* Identifies which part of the FileDescriptorProto was defined at this
|
||||
* location.
|
||||
* Each element is a field number or an index. They form a path from
|
||||
* the root FileDescriptorProto to the place where the definition. For
|
||||
* example, this path:
|
||||
* the root FileDescriptorProto to the place where the definition occurs.
|
||||
* For example, this path:
|
||||
* [ 4, 3, 2, 7, 1 ]
|
||||
* refers to:
|
||||
* file.message_type(3) // 4, 3
|
||||
@ -185,8 +185,8 @@ class Location extends \Google\Protobuf\Internal\Message
|
||||
* Identifies which part of the FileDescriptorProto was defined at this
|
||||
* location.
|
||||
* Each element is a field number or an index. They form a path from
|
||||
* the root FileDescriptorProto to the place where the definition. For
|
||||
* example, this path:
|
||||
* the root FileDescriptorProto to the place where the definition occurs.
|
||||
* For example, this path:
|
||||
* [ 4, 3, 2, 7, 1 ]
|
||||
* refers to:
|
||||
* file.message_type(3) // 4, 3
|
||||
@ -216,8 +216,8 @@ class Location extends \Google\Protobuf\Internal\Message
|
||||
* Identifies which part of the FileDescriptorProto was defined at this
|
||||
* location.
|
||||
* Each element is a field number or an index. They form a path from
|
||||
* the root FileDescriptorProto to the place where the definition. For
|
||||
* example, this path:
|
||||
* the root FileDescriptorProto to the place where the definition occurs.
|
||||
* For example, this path:
|
||||
* [ 4, 3, 2, 7, 1 ]
|
||||
* refers to:
|
||||
* file.message_type(3) // 4, 3
|
||||
|
@ -934,6 +934,7 @@ class GeneratedClassTest extends TestBase
|
||||
$m = new \Lower\PBnamespace();
|
||||
$m = new \Lower\PBnew();
|
||||
$m = new \Lower\PBor();
|
||||
$m = new \Lower\PBparent();
|
||||
$m = new \Lower\PBprint();
|
||||
$m = new \Lower\PBprivate();
|
||||
$m = new \Lower\PBprotected();
|
||||
@ -941,6 +942,7 @@ class GeneratedClassTest extends TestBase
|
||||
$m = new \Lower\PBrequire();
|
||||
$m = new \Lower\PBrequire_once();
|
||||
$m = new \Lower\PBreturn();
|
||||
$m = new \Lower\PBself();
|
||||
$m = new \Lower\PBstatic();
|
||||
$m = new \Lower\PBswitch();
|
||||
$m = new \Lower\PBthrow();
|
||||
@ -1012,6 +1014,7 @@ class GeneratedClassTest extends TestBase
|
||||
$m = new \Upper\PBNAMESPACE();
|
||||
$m = new \Upper\PBNEW();
|
||||
$m = new \Upper\PBOR();
|
||||
$m = new \Upper\PBPARENT();
|
||||
$m = new \Upper\PBPRINT();
|
||||
$m = new \Upper\PBPRIVATE();
|
||||
$m = new \Upper\PBPROTECTED();
|
||||
@ -1019,6 +1022,7 @@ class GeneratedClassTest extends TestBase
|
||||
$m = new \Upper\PBREQUIRE();
|
||||
$m = new \Upper\PBREQUIRE_ONCE();
|
||||
$m = new \Upper\PBRETURN();
|
||||
$m = new \Upper\PBSELF();
|
||||
$m = new \Upper\PBSTATIC();
|
||||
$m = new \Upper\PBSWITCH();
|
||||
$m = new \Upper\PBTHROW();
|
||||
@ -1090,6 +1094,7 @@ class GeneratedClassTest extends TestBase
|
||||
$m = new \Lower_enum\PBnamespace();
|
||||
$m = new \Lower_enum\PBnew();
|
||||
$m = new \Lower_enum\PBor();
|
||||
$m = new \Lower_enum\PBparent();
|
||||
$m = new \Lower_enum\PBprint();
|
||||
$m = new \Lower_enum\PBprivate();
|
||||
$m = new \Lower_enum\PBprotected();
|
||||
@ -1097,6 +1102,7 @@ class GeneratedClassTest extends TestBase
|
||||
$m = new \Lower_enum\PBrequire();
|
||||
$m = new \Lower_enum\PBrequire_once();
|
||||
$m = new \Lower_enum\PBreturn();
|
||||
$m = new \Lower_enum\PBself();
|
||||
$m = new \Lower_enum\PBstatic();
|
||||
$m = new \Lower_enum\PBswitch();
|
||||
$m = new \Lower_enum\PBthrow();
|
||||
@ -1168,6 +1174,7 @@ class GeneratedClassTest extends TestBase
|
||||
$m = new \Upper_enum\PBNAMESPACE();
|
||||
$m = new \Upper_enum\PBNEW();
|
||||
$m = new \Upper_enum\PBOR();
|
||||
$m = new \Upper_enum\PBPARENT();
|
||||
$m = new \Upper_enum\PBPRINT();
|
||||
$m = new \Upper_enum\PBPRIVATE();
|
||||
$m = new \Upper_enum\PBPROTECTED();
|
||||
@ -1175,6 +1182,7 @@ class GeneratedClassTest extends TestBase
|
||||
$m = new \Upper_enum\PBREQUIRE();
|
||||
$m = new \Upper_enum\PBREQUIRE_ONCE();
|
||||
$m = new \Upper_enum\PBRETURN();
|
||||
$m = new \Upper_enum\PBSELF();
|
||||
$m = new \Upper_enum\PBSTATIC();
|
||||
$m = new \Upper_enum\PBSWITCH();
|
||||
$m = new \Upper_enum\PBTHROW();
|
||||
@ -1273,6 +1281,8 @@ class GeneratedClassTest extends TestBase
|
||||
$m = \Lower_enum_value\NotAllowed::null;
|
||||
$m = \Lower_enum_value\NotAllowed::void;
|
||||
$m = \Lower_enum_value\NotAllowed::iterable;
|
||||
$m = \Lower_enum_value\NotAllowed::parent;
|
||||
$m = \Lower_enum_value\NotAllowed::self;
|
||||
|
||||
$m = \Upper_enum_value\NotAllowed::PBABSTRACT;
|
||||
$m = \Upper_enum_value\NotAllowed::PBAND;
|
||||
@ -1351,6 +1361,8 @@ class GeneratedClassTest extends TestBase
|
||||
$m = \Upper_enum_value\NotAllowed::NULL;
|
||||
$m = \Upper_enum_value\NotAllowed::VOID;
|
||||
$m = \Upper_enum_value\NotAllowed::ITERABLE;
|
||||
$m = \Upper_enum_value\NotAllowed::PARENT;
|
||||
$m = \Upper_enum_value\NotAllowed::SELF;
|
||||
|
||||
$this->assertTrue(true);
|
||||
}
|
||||
|
@ -52,6 +52,7 @@ enum match { ZERO47 = 0; }
|
||||
enum namespace { ZERO48 = 0; }
|
||||
enum new { ZERO49 = 0; }
|
||||
enum or { ZERO50 = 0; }
|
||||
enum parent { ZERO78 = 0; }
|
||||
enum print { ZERO51 = 0; }
|
||||
enum private { ZERO52 = 0; }
|
||||
enum protected { ZERO53 = 0; }
|
||||
@ -59,6 +60,7 @@ enum public { ZERO54 = 0; }
|
||||
enum require { ZERO55 = 0; }
|
||||
enum require_once { ZERO56 = 0; }
|
||||
enum return { ZERO57 = 0; }
|
||||
enum self { ZERO79 = 0; }
|
||||
enum static { ZERO58 = 0; }
|
||||
enum switch { ZERO59 = 0; }
|
||||
enum throw { ZERO60 = 0; }
|
||||
|
@ -52,6 +52,7 @@ enum MATCH { ZERO47 = 0; }
|
||||
enum NAMESPACE { ZERO48 = 0; }
|
||||
enum NEW { ZERO49 = 0; }
|
||||
enum OR { ZERO50 = 0; }
|
||||
enum PARENT { ZERO78 = 0; }
|
||||
enum PRINT { ZERO51 = 0; }
|
||||
enum PRIVATE { ZERO52 = 0; }
|
||||
enum PROTECTED { ZERO53 = 0; }
|
||||
@ -59,6 +60,7 @@ enum PUBLIC { ZERO54 = 0; }
|
||||
enum REQUIRE { ZERO55 = 0; }
|
||||
enum REQUIRE_ONCE { ZERO56 = 0; }
|
||||
enum RETURN { ZERO57 = 0; }
|
||||
enum SELF { ZERO79 = 0; }
|
||||
enum STATIC { ZERO58 = 0; }
|
||||
enum SWITCH { ZERO59 = 0; }
|
||||
enum THROW { ZERO60 = 0; }
|
||||
|
@ -53,6 +53,7 @@ enum NotAllowed {
|
||||
namespace = 47;
|
||||
new = 48;
|
||||
or = 49;
|
||||
parent = 77;
|
||||
print = 50;
|
||||
private = 51;
|
||||
protected = 52;
|
||||
@ -60,6 +61,7 @@ enum NotAllowed {
|
||||
require = 54;
|
||||
require_once = 55;
|
||||
return = 56;
|
||||
self = 78;
|
||||
static = 57;
|
||||
switch = 58;
|
||||
throw = 59;
|
||||
|
@ -53,6 +53,7 @@ enum NotAllowed {
|
||||
NAMESPACE = 47;
|
||||
NEW = 48;
|
||||
OR = 49;
|
||||
PARENT = 77;
|
||||
PRINT = 50;
|
||||
PRIVATE = 51;
|
||||
PROTECTED = 52;
|
||||
@ -60,6 +61,7 @@ enum NotAllowed {
|
||||
REQUIRE = 54;
|
||||
REQUIRE_ONCE = 55;
|
||||
RETURN = 56;
|
||||
SELF = 78;
|
||||
STATIC = 57;
|
||||
SWITCH = 58;
|
||||
THROW = 59;
|
||||
|
@ -52,6 +52,7 @@ message match {}
|
||||
message namespace {}
|
||||
message new {}
|
||||
message or {}
|
||||
message parent {}
|
||||
message print {}
|
||||
message private {}
|
||||
message protected {}
|
||||
@ -59,6 +60,7 @@ message public {}
|
||||
message require {}
|
||||
message require_once {}
|
||||
message return {}
|
||||
message self {}
|
||||
message static {}
|
||||
message switch {}
|
||||
message throw {}
|
||||
|
@ -52,6 +52,7 @@ message MATCH {}
|
||||
message NAMESPACE {}
|
||||
message NEW {}
|
||||
message OR {}
|
||||
message PARENT {}
|
||||
message PRINT {}
|
||||
message PRIVATE {}
|
||||
message PROTECTED {}
|
||||
@ -59,6 +60,7 @@ message PUBLIC {}
|
||||
message REQUIRE {}
|
||||
message REQUIRE_ONCE {}
|
||||
message RETURN {}
|
||||
message SELF {}
|
||||
message STATIC {}
|
||||
message SWITCH {}
|
||||
message THROW {}
|
||||
|
64
protobuf.bzl
64
protobuf.bzl
@ -391,6 +391,70 @@ internal_gen_well_known_protos_java = rule(
|
||||
},
|
||||
)
|
||||
|
||||
def _internal_gen_kt_protos(ctx):
|
||||
args = ctx.actions.args()
|
||||
|
||||
deps = [d[ProtoInfo] for d in ctx.attr.deps]
|
||||
|
||||
srcjar = ctx.actions.declare_file("{}.srcjar".format(ctx.attr.name))
|
||||
if ctx.attr.lite:
|
||||
out = "lite:%s" % srcjar.path
|
||||
else:
|
||||
out = srcjar
|
||||
|
||||
args.add("--kotlin_out", out)
|
||||
|
||||
descriptors = depset(
|
||||
transitive = [dep.transitive_descriptor_sets for dep in deps],
|
||||
)
|
||||
args.add_joined(
|
||||
"--descriptor_set_in",
|
||||
descriptors,
|
||||
join_with = ctx.configuration.host_path_separator,
|
||||
)
|
||||
|
||||
for dep in deps:
|
||||
if "." == dep.proto_source_root:
|
||||
args.add_all([src.path for src in dep.direct_sources])
|
||||
else:
|
||||
source_root = dep.proto_source_root
|
||||
offset = len(source_root) + 1 # + '/'.
|
||||
args.add_all([src.path[offset:] for src in dep.direct_sources])
|
||||
|
||||
ctx.actions.run(
|
||||
executable = ctx.executable._protoc,
|
||||
inputs = descriptors,
|
||||
outputs = [srcjar],
|
||||
arguments = [args],
|
||||
use_default_shell_env = True,
|
||||
)
|
||||
|
||||
return [
|
||||
DefaultInfo(
|
||||
files = depset([srcjar]),
|
||||
),
|
||||
]
|
||||
|
||||
internal_gen_kt_protos = rule(
|
||||
implementation = _internal_gen_kt_protos,
|
||||
attrs = {
|
||||
"deps": attr.label_list(
|
||||
mandatory = True,
|
||||
providers = [ProtoInfo],
|
||||
),
|
||||
"lite": attr.bool(
|
||||
default = False,
|
||||
),
|
||||
"_protoc": attr.label(
|
||||
executable = True,
|
||||
cfg = "exec",
|
||||
default = "//:protoc",
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
|
||||
def internal_copied_filegroup(name, srcs, strip_prefix, dest, **kwargs):
|
||||
"""Macro to copy files to a different directory and then create a filegroup.
|
||||
|
||||
|
48
protobuf_release.bzl
Normal file
48
protobuf_release.bzl
Normal file
@ -0,0 +1,48 @@
|
||||
"""
|
||||
Generates package naming variables for use with rules_pkg.
|
||||
"""
|
||||
|
||||
load("@rules_pkg//:providers.bzl", "PackageVariablesInfo")
|
||||
load("@bazel_tools//tools/cpp:toolchain_utils.bzl", "find_cpp_toolchain")
|
||||
load(":protobuf_version.bzl", "PROTOBUF_VERSION")
|
||||
|
||||
def _package_naming_impl(ctx):
|
||||
values = {}
|
||||
values["version"] = PROTOBUF_VERSION
|
||||
|
||||
# infer from the current cpp toolchain.
|
||||
toolchain = find_cpp_toolchain(ctx)
|
||||
cpu = toolchain.cpu
|
||||
system_name = toolchain.target_gnu_system_name
|
||||
|
||||
# rename cpus to match what we want artifacts to be
|
||||
if cpu == "systemz":
|
||||
cpu = "s390_64"
|
||||
elif cpu == "aarch64":
|
||||
cpu = "aarch_64"
|
||||
|
||||
# use the system name to determine the os and then create platform names
|
||||
if "apple" in system_name:
|
||||
values["platform"] = "osx-" + cpu
|
||||
elif "linux" in system_name:
|
||||
values["platform"] = "linux-" + cpu
|
||||
elif "mingw" in system_name:
|
||||
if "cpu" == "x86_64":
|
||||
values["platform"] = "win64"
|
||||
else:
|
||||
values["platform"] = "win32"
|
||||
else:
|
||||
values["platform"] = "unknown"
|
||||
|
||||
return PackageVariablesInfo(values = values)
|
||||
|
||||
|
||||
package_naming = rule(
|
||||
implementation = _package_naming_impl,
|
||||
attrs = {
|
||||
# Necessary data dependency for find_cpp_toolchain.
|
||||
"_cc_toolchain": attr.label(default = Label("@bazel_tools//tools/cpp:current_cc_toolchain")),
|
||||
},
|
||||
toolchains = ["@bazel_tools//tools/cpp:toolchain_type"],
|
||||
incompatible_use_toolchain_transition = True,
|
||||
)
|
@ -30,4 +30,4 @@
|
||||
|
||||
# Copyright 2007 Google Inc. All Rights Reserved.
|
||||
|
||||
__version__ = '3.18.1'
|
||||
__version__ = '3.19.4'
|
||||
|
@ -667,7 +667,7 @@ class EnumDescriptor(_NestedDescriptorBase):
|
||||
full_name (str): Full name of the type, including package name
|
||||
and any enclosing type(s).
|
||||
|
||||
values (list[EnumValueDescriptors]): List of the values
|
||||
values (list[EnumValueDescriptor]): List of the values
|
||||
in this enum.
|
||||
values_by_name (dict(str, EnumValueDescriptor)): Same as :attr:`values`,
|
||||
but indexed by the "name" field of each EnumValueDescriptor.
|
||||
|
@ -148,10 +148,7 @@ __author__ = 'tmarek@google.com (Torsten Marek)'
|
||||
import functools
|
||||
import re
|
||||
import types
|
||||
try:
|
||||
import unittest2 as unittest
|
||||
except ImportError:
|
||||
import unittest
|
||||
import unittest
|
||||
import uuid
|
||||
|
||||
try:
|
||||
|
@ -28,6 +28,7 @@
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
#define PY_SSIZE_T_CLEAN
|
||||
#include <Python.h>
|
||||
|
||||
namespace google {
|
||||
|
@ -100,7 +100,7 @@ class DescriptorDatabaseTest(unittest.TestCase):
|
||||
self.assertEqual(file_desc_proto2, db.FindFileContainingSymbol(
|
||||
'protobuf_unittest.TestAllTypes.none_field'))
|
||||
|
||||
with self.assertRaisesRegexp(KeyError, r'\'protobuf_unittest\.NoneMessage\''):
|
||||
with self.assertRaisesRegex(KeyError, r'\'protobuf_unittest\.NoneMessage\''):
|
||||
db.FindFileContainingSymbol('protobuf_unittest.NoneMessage')
|
||||
|
||||
def testConflictRegister(self):
|
||||
|
@ -654,11 +654,11 @@ class SecondaryDescriptorFromDescriptorDB(DescriptorPoolTestBase,
|
||||
enum_value.number = 0
|
||||
self.db.Add(file_proto)
|
||||
|
||||
self.assertRaisesRegexp(KeyError, 'SubMessage',
|
||||
self.pool.FindMessageTypeByName,
|
||||
'collector.ErrorMessage')
|
||||
self.assertRaisesRegexp(KeyError, 'SubMessage',
|
||||
self.pool.FindFileByName, 'error_file')
|
||||
self.assertRaisesRegex(KeyError, 'SubMessage',
|
||||
self.pool.FindMessageTypeByName,
|
||||
'collector.ErrorMessage')
|
||||
self.assertRaisesRegex(KeyError, 'SubMessage', self.pool.FindFileByName,
|
||||
'error_file')
|
||||
with self.assertRaises(KeyError) as exc:
|
||||
self.pool.FindFileByName('none_file')
|
||||
self.assertIn(str(exc.exception), ('\'none_file\'',
|
||||
|
@ -43,6 +43,15 @@ class EnumTypeWrapper(object):
|
||||
|
||||
DESCRIPTOR = None
|
||||
|
||||
# This is a type alias, which mypy typing stubs can type as
|
||||
# a genericized parameter constrained to an int, allowing subclasses
|
||||
# to be typed with more constraint in .pyi stubs
|
||||
# Eg.
|
||||
# def MyGeneratedEnum(Message):
|
||||
# ValueType = NewType('ValueType', int)
|
||||
# def Name(self, number: MyGeneratedEnum.ValueType) -> str
|
||||
ValueType = int
|
||||
|
||||
def __init__(self, enum_type):
|
||||
"""Inits EnumTypeWrapper with an EnumDescriptor."""
|
||||
self._enum_type = enum_type
|
||||
|
@ -39,10 +39,7 @@ further ensures that we can use Python protocol message objects as we expect.
|
||||
|
||||
__author__ = 'robinson@google.com (Will Robinson)'
|
||||
|
||||
try:
|
||||
import unittest2 as unittest #PY26
|
||||
except ImportError:
|
||||
import unittest
|
||||
import unittest
|
||||
|
||||
from google.protobuf.internal import test_bad_identifiers_pb2
|
||||
from google.protobuf import unittest_custom_options_pb2
|
||||
|
@ -100,10 +100,8 @@ class JsonFormatBase(unittest.TestCase):
|
||||
|
||||
def CheckError(self, text, error_message):
|
||||
message = json_format_proto3_pb2.TestMessage()
|
||||
self.assertRaisesRegexp(
|
||||
json_format.ParseError,
|
||||
error_message,
|
||||
json_format.Parse, text, message)
|
||||
self.assertRaisesRegex(json_format.ParseError, error_message,
|
||||
json_format.Parse, text, message)
|
||||
|
||||
|
||||
class JsonFormatTest(JsonFormatBase):
|
||||
@ -812,9 +810,8 @@ class JsonFormatTest(JsonFormatBase):
|
||||
json_format.Parse('{"messageValue": {}}', parsed_message)
|
||||
self.assertTrue(parsed_message.HasField('message_value'))
|
||||
# Null is not allowed to be used as an element in repeated field.
|
||||
self.assertRaisesRegexp(
|
||||
json_format.ParseError,
|
||||
r'Failed to parse repeatedInt32Value field: '
|
||||
self.assertRaisesRegex(
|
||||
json_format.ParseError, r'Failed to parse repeatedInt32Value field: '
|
||||
r'null is not allowed to be used as an element in a repeated field '
|
||||
r'at TestMessage.repeatedInt32Value\[1\].', json_format.Parse,
|
||||
'{"repeatedInt32Value":[1, null]}', parsed_message)
|
||||
@ -901,7 +898,7 @@ class JsonFormatTest(JsonFormatBase):
|
||||
json_format.Parse(text, message)
|
||||
# Proto2 does not accept unknown enums.
|
||||
message = unittest_pb2.TestAllTypes()
|
||||
self.assertRaisesRegexp(
|
||||
self.assertRaisesRegex(
|
||||
json_format.ParseError,
|
||||
'Failed to parse optionalNestedEnum field: Invalid enum value 12345 '
|
||||
'for enum type protobuf_unittest.TestAllTypes.NestedEnum at '
|
||||
@ -1019,28 +1016,25 @@ class JsonFormatTest(JsonFormatBase):
|
||||
def testInvalidMap(self):
|
||||
message = json_format_proto3_pb2.TestMap()
|
||||
text = '{"int32Map": {"null": 2, "2": 3}}'
|
||||
self.assertRaisesRegexp(
|
||||
json_format.ParseError,
|
||||
'Failed to parse int32Map field: invalid literal',
|
||||
json_format.Parse, text, message)
|
||||
self.assertRaisesRegex(json_format.ParseError,
|
||||
'Failed to parse int32Map field: invalid literal',
|
||||
json_format.Parse, text, message)
|
||||
text = '{"int32Map": {1: 2, "2": 3}}'
|
||||
self.assertRaisesRegexp(
|
||||
json_format.ParseError,
|
||||
(r'Failed to load JSON: Expecting property name'
|
||||
r'( enclosed in double quotes)?: line 1'),
|
||||
json_format.Parse, text, message)
|
||||
self.assertRaisesRegex(json_format.ParseError,
|
||||
(r'Failed to load JSON: Expecting property name'
|
||||
r'( enclosed in double quotes)?: line 1'),
|
||||
json_format.Parse, text, message)
|
||||
text = '{"boolMap": {"null": 1}}'
|
||||
self.assertRaisesRegexp(
|
||||
self.assertRaisesRegex(
|
||||
json_format.ParseError,
|
||||
'Failed to parse boolMap field: Expected "true" or "false", not null at '
|
||||
'TestMap.boolMap.key', json_format.Parse, text, message)
|
||||
text = r'{"stringMap": {"a": 3, "\u0061": 2}}'
|
||||
self.assertRaisesRegexp(
|
||||
json_format.ParseError,
|
||||
'Failed to load JSON: duplicate key a',
|
||||
json_format.Parse, text, message)
|
||||
self.assertRaisesRegex(json_format.ParseError,
|
||||
'Failed to load JSON: duplicate key a',
|
||||
json_format.Parse, text, message)
|
||||
text = r'{"stringMap": 0}'
|
||||
self.assertRaisesRegexp(
|
||||
self.assertRaisesRegex(
|
||||
json_format.ParseError,
|
||||
'Failed to parse stringMap field: Map field string_map must be '
|
||||
'in a dict which is 0 at TestMap.stringMap.', json_format.Parse, text,
|
||||
@ -1055,27 +1049,24 @@ class JsonFormatTest(JsonFormatBase):
|
||||
' format \'%Y-%m-%dT%H:%M:%S\' at TestTimestamp.value.',
|
||||
json_format.Parse, text, message)
|
||||
text = '{"value": "1970-01-01T00:00:00.0123456789012Z"}'
|
||||
self.assertRaisesRegexp(
|
||||
self.assertRaisesRegex(
|
||||
json_format.ParseError,
|
||||
'nanos 0123456789012 more than 9 fractional digits.',
|
||||
json_format.Parse, text, message)
|
||||
'nanos 0123456789012 more than 9 fractional digits.', json_format.Parse,
|
||||
text, message)
|
||||
text = '{"value": "1972-01-01T01:00:00.01+08"}'
|
||||
self.assertRaisesRegexp(
|
||||
json_format.ParseError,
|
||||
(r'Invalid timezone offset value: \+08.'),
|
||||
json_format.Parse, text, message)
|
||||
self.assertRaisesRegex(json_format.ParseError,
|
||||
(r'Invalid timezone offset value: \+08.'),
|
||||
json_format.Parse, text, message)
|
||||
# Time smaller than minimum time.
|
||||
text = '{"value": "0000-01-01T00:00:00Z"}'
|
||||
self.assertRaisesRegexp(
|
||||
self.assertRaisesRegex(
|
||||
json_format.ParseError,
|
||||
'Failed to parse value field: year (0 )?is out of range.',
|
||||
json_format.Parse, text, message)
|
||||
# Time bigger than maximum time.
|
||||
message.value.seconds = 253402300800
|
||||
self.assertRaisesRegexp(
|
||||
OverflowError,
|
||||
'date value out of range',
|
||||
json_format.MessageToJson, message)
|
||||
self.assertRaisesRegex(OverflowError, 'date value out of range',
|
||||
json_format.MessageToJson, message)
|
||||
# Lower case t does not accept.
|
||||
text = '{"value": "0001-01-01t00:00:00Z"}'
|
||||
with self.assertRaises(json_format.ParseError) as e:
|
||||
@ -1097,11 +1088,10 @@ class JsonFormatTest(JsonFormatBase):
|
||||
def testInvalidListValue(self):
|
||||
message = json_format_proto3_pb2.TestListValue()
|
||||
text = '{"value": 1234}'
|
||||
self.assertRaisesRegexp(
|
||||
self.assertRaisesRegex(
|
||||
json_format.ParseError,
|
||||
r'Failed to parse value field: ListValue must be in \[\] which is '
|
||||
'1234 at TestListValue.value.',
|
||||
json_format.Parse, text, message)
|
||||
'1234 at TestListValue.value.', json_format.Parse, text, message)
|
||||
|
||||
class UnknownClass(object):
|
||||
|
||||
@ -1116,16 +1106,15 @@ class JsonFormatTest(JsonFormatBase):
|
||||
def testInvalidStruct(self):
|
||||
message = json_format_proto3_pb2.TestStruct()
|
||||
text = '{"value": 1234}'
|
||||
self.assertRaisesRegexp(
|
||||
self.assertRaisesRegex(
|
||||
json_format.ParseError,
|
||||
'Failed to parse value field: Struct must be in a dict which is '
|
||||
'1234 at TestStruct.value',
|
||||
json_format.Parse, text, message)
|
||||
'1234 at TestStruct.value', json_format.Parse, text, message)
|
||||
|
||||
def testTimestampInvalidStringValue(self):
|
||||
message = json_format_proto3_pb2.TestTimestamp()
|
||||
text = '{"value": {"foo": 123}}'
|
||||
self.assertRaisesRegexp(
|
||||
self.assertRaisesRegex(
|
||||
json_format.ParseError,
|
||||
r"Timestamp JSON value not a string: {u?'foo': 123}", json_format.Parse,
|
||||
text, message)
|
||||
@ -1133,15 +1122,14 @@ class JsonFormatTest(JsonFormatBase):
|
||||
def testDurationInvalidStringValue(self):
|
||||
message = json_format_proto3_pb2.TestDuration()
|
||||
text = '{"value": {"foo": 123}}'
|
||||
self.assertRaisesRegexp(
|
||||
json_format.ParseError,
|
||||
r"Duration JSON value not a string: {u?'foo': 123}", json_format.Parse,
|
||||
text, message)
|
||||
self.assertRaisesRegex(json_format.ParseError,
|
||||
r"Duration JSON value not a string: {u?'foo': 123}",
|
||||
json_format.Parse, text, message)
|
||||
|
||||
def testFieldMaskInvalidStringValue(self):
|
||||
message = json_format_proto3_pb2.TestFieldMask()
|
||||
text = '{"value": {"foo": 123}}'
|
||||
self.assertRaisesRegexp(
|
||||
self.assertRaisesRegex(
|
||||
json_format.ParseError,
|
||||
r"FieldMask JSON value not a string: {u?'foo': 123}", json_format.Parse,
|
||||
text, message)
|
||||
@ -1149,10 +1137,7 @@ class JsonFormatTest(JsonFormatBase):
|
||||
def testInvalidAny(self):
|
||||
message = any_pb2.Any()
|
||||
text = '{"@type": "type.googleapis.com/google.protobuf.Int32Value"}'
|
||||
self.assertRaisesRegexp(
|
||||
KeyError,
|
||||
'value',
|
||||
json_format.Parse, text, message)
|
||||
self.assertRaisesRegex(KeyError, 'value', json_format.Parse, text, message)
|
||||
text = '{"value": 1234}'
|
||||
self.assertRaisesRegex(json_format.ParseError,
|
||||
'@type is missing when parsing any message at Any',
|
||||
@ -1247,12 +1232,10 @@ class JsonFormatTest(JsonFormatBase):
|
||||
def __repr__(self):
|
||||
return 'v'
|
||||
message = json_format_proto3_pb2.TestValue()
|
||||
self.assertRaisesRegexp(
|
||||
self.assertRaisesRegex(
|
||||
json_format.ParseError,
|
||||
r"Value v has unexpected type <class '.*\.UnknownClass'>.",
|
||||
json_format.ParseDict,
|
||||
{'value': UnknownClass()},
|
||||
message)
|
||||
json_format.ParseDict, {'value': UnknownClass()}, message)
|
||||
|
||||
def testMessageToDict(self):
|
||||
message = json_format_proto3_pb2.TestMessage()
|
||||
|
@ -75,21 +75,19 @@ from google.protobuf.internal import _parameterized
|
||||
|
||||
UCS2_MAXUNICODE = 65535
|
||||
|
||||
|
||||
warnings.simplefilter('error', DeprecationWarning)
|
||||
|
||||
|
||||
@_parameterized.named_parameters(
|
||||
('_proto2', unittest_pb2),
|
||||
('_proto3', unittest_proto3_arena_pb2))
|
||||
@_parameterized.named_parameters(('_proto2', unittest_pb2),
|
||||
('_proto3', unittest_proto3_arena_pb2))
|
||||
@testing_refleaks.TestCase
|
||||
class MessageTest(unittest.TestCase):
|
||||
|
||||
def testBadUtf8String(self, message_module):
|
||||
if api_implementation.Type() != 'python':
|
||||
self.skipTest("Skipping testBadUtf8String, currently only the python "
|
||||
"api implementation raises UnicodeDecodeError when a "
|
||||
"string field contains bad utf-8.")
|
||||
self.skipTest('Skipping testBadUtf8String, currently only the python '
|
||||
'api implementation raises UnicodeDecodeError when a '
|
||||
'string field contains bad utf-8.')
|
||||
bad_utf8_data = test_util.GoldenFileData('bad_utf8_string')
|
||||
with self.assertRaises(UnicodeDecodeError) as context:
|
||||
message_module.TestAllTypes.FromString(bad_utf8_data)
|
||||
@ -100,8 +98,7 @@ class MessageTest(unittest.TestCase):
|
||||
# and doesn't preserve unknown fields, so for proto3 we use a golden
|
||||
# message that doesn't have these fields set.
|
||||
if message_module is unittest_pb2:
|
||||
golden_data = test_util.GoldenFileData(
|
||||
'golden_message_oneof_implemented')
|
||||
golden_data = test_util.GoldenFileData('golden_message_oneof_implemented')
|
||||
else:
|
||||
golden_data = test_util.GoldenFileData('golden_message_proto3')
|
||||
|
||||
@ -440,8 +437,7 @@ class MessageTest(unittest.TestCase):
|
||||
except TypeError:
|
||||
pass
|
||||
self.assertEqual(2, len(msg.repeated_nested_message))
|
||||
self.assertEqual([1, 2],
|
||||
[m.bb for m in msg.repeated_nested_message])
|
||||
self.assertEqual([1, 2], [m.bb for m in msg.repeated_nested_message])
|
||||
|
||||
def testInsertRepeatedCompositeField(self, message_module):
|
||||
msg = message_module.TestAllTypes()
|
||||
@ -463,22 +459,22 @@ class MessageTest(unittest.TestCase):
|
||||
self.assertEqual(5, len(msg.repeated_nested_message))
|
||||
self.assertEqual([-1000, 2, -1, 1, 3],
|
||||
[m.bb for m in msg.repeated_nested_message])
|
||||
self.assertEqual(str(msg),
|
||||
'repeated_nested_message {\n'
|
||||
' bb: -1000\n'
|
||||
'}\n'
|
||||
'repeated_nested_message {\n'
|
||||
' bb: 2\n'
|
||||
'}\n'
|
||||
'repeated_nested_message {\n'
|
||||
' bb: -1\n'
|
||||
'}\n'
|
||||
'repeated_nested_message {\n'
|
||||
' bb: 1\n'
|
||||
'}\n'
|
||||
'repeated_nested_message {\n'
|
||||
' bb: 3\n'
|
||||
'}\n')
|
||||
self.assertEqual(
|
||||
str(msg), 'repeated_nested_message {\n'
|
||||
' bb: -1000\n'
|
||||
'}\n'
|
||||
'repeated_nested_message {\n'
|
||||
' bb: 2\n'
|
||||
'}\n'
|
||||
'repeated_nested_message {\n'
|
||||
' bb: -1\n'
|
||||
'}\n'
|
||||
'repeated_nested_message {\n'
|
||||
' bb: 1\n'
|
||||
'}\n'
|
||||
'repeated_nested_message {\n'
|
||||
' bb: 3\n'
|
||||
'}\n')
|
||||
self.assertEqual(sub_msg.bb, 1)
|
||||
|
||||
def testMergeFromRepeatedField(self, message_module):
|
||||
@ -497,8 +493,7 @@ class MessageTest(unittest.TestCase):
|
||||
self.assertEqual(4, len(msg.repeated_int32))
|
||||
|
||||
msg.repeated_nested_message.MergeFrom(other_msg.repeated_nested_message)
|
||||
self.assertEqual([1, 2, 3, 4],
|
||||
[m.bb for m in msg.repeated_nested_message])
|
||||
self.assertEqual([1, 2, 3, 4], [m.bb for m in msg.repeated_nested_message])
|
||||
|
||||
def testAddWrongRepeatedNestedField(self, message_module):
|
||||
msg = message_module.TestAllTypes()
|
||||
@ -543,8 +538,7 @@ class MessageTest(unittest.TestCase):
|
||||
msg.repeated_nested_message.add(bb=3)
|
||||
msg.repeated_nested_message.add(bb=4)
|
||||
|
||||
self.assertEqual([1, 2, 3, 4],
|
||||
[m.bb for m in msg.repeated_nested_message])
|
||||
self.assertEqual([1, 2, 3, 4], [m.bb for m in msg.repeated_nested_message])
|
||||
self.assertEqual([4, 3, 2, 1],
|
||||
[m.bb for m in reversed(msg.repeated_nested_message)])
|
||||
self.assertEqual([4, 3, 2, 1],
|
||||
@ -627,8 +621,9 @@ class MessageTest(unittest.TestCase):
|
||||
self.assertEqual(message.repeated_nested_message[3].bb, 4)
|
||||
self.assertEqual(message.repeated_nested_message[4].bb, 5)
|
||||
self.assertEqual(message.repeated_nested_message[5].bb, 6)
|
||||
self.assertEqual(str(message.repeated_nested_message),
|
||||
'[bb: 1\n, bb: 2\n, bb: 3\n, bb: 4\n, bb: 5\n, bb: 6\n]')
|
||||
self.assertEqual(
|
||||
str(message.repeated_nested_message),
|
||||
'[bb: 1\n, bb: 2\n, bb: 3\n, bb: 4\n, bb: 5\n, bb: 6\n]')
|
||||
|
||||
def testSortingRepeatedCompositeFieldsStable(self, message_module):
|
||||
"""Check passing a custom comparator to sort a repeated composite field."""
|
||||
@ -642,18 +637,16 @@ class MessageTest(unittest.TestCase):
|
||||
message.repeated_nested_message.add().bb = 24
|
||||
message.repeated_nested_message.add().bb = 10
|
||||
message.repeated_nested_message.sort(key=lambda z: z.bb // 10)
|
||||
self.assertEqual(
|
||||
[13, 11, 10, 21, 20, 24, 33],
|
||||
[n.bb for n in message.repeated_nested_message])
|
||||
self.assertEqual([13, 11, 10, 21, 20, 24, 33],
|
||||
[n.bb for n in message.repeated_nested_message])
|
||||
|
||||
# Make sure that for the C++ implementation, the underlying fields
|
||||
# are actually reordered.
|
||||
pb = message.SerializeToString()
|
||||
message.Clear()
|
||||
message.MergeFromString(pb)
|
||||
self.assertEqual(
|
||||
[13, 11, 10, 21, 20, 24, 33],
|
||||
[n.bb for n in message.repeated_nested_message])
|
||||
self.assertEqual([13, 11, 10, 21, 20, 24, 33],
|
||||
[n.bb for n in message.repeated_nested_message])
|
||||
|
||||
def testRepeatedCompositeFieldSortArguments(self, message_module):
|
||||
"""Check sorting a repeated composite field using list.sort() arguments."""
|
||||
@ -826,7 +819,7 @@ class MessageTest(unittest.TestCase):
|
||||
self.assertTrue(m.HasField('oneof_uint32'))
|
||||
self.assertFalse(m.HasField('oneof_string'))
|
||||
|
||||
m.oneof_string = ""
|
||||
m.oneof_string = ''
|
||||
self.assertEqual('oneof_string', m.WhichOneof('oneof_field'))
|
||||
self.assertTrue(m.HasField('oneof_string'))
|
||||
self.assertFalse(m.HasField('oneof_uint32'))
|
||||
@ -973,7 +966,9 @@ class MessageTest(unittest.TestCase):
|
||||
|
||||
def testAssignByteStringToUnicodeField(self, message_module):
|
||||
"""Assigning a byte string to a string field should result
|
||||
in the value being converted to a Unicode string."""
|
||||
|
||||
in the value being converted to a Unicode string.
|
||||
"""
|
||||
m = message_module.TestAllTypes()
|
||||
m.optional_string = str('')
|
||||
self.assertIsInstance(m.optional_string, str)
|
||||
@ -1001,8 +996,7 @@ class MessageTest(unittest.TestCase):
|
||||
with self.assertRaises(NameError) as _:
|
||||
m.repeated_int32.extend(a for i in range(10)) # pylint: disable=undefined-variable
|
||||
with self.assertRaises(NameError) as _:
|
||||
m.repeated_nested_enum.extend(
|
||||
a for i in range(10)) # pylint: disable=undefined-variable
|
||||
m.repeated_nested_enum.extend(a for i in range(10)) # pylint: disable=undefined-variable
|
||||
|
||||
FALSY_VALUES = [None, False, 0, 0.0, b'', u'', bytearray(), [], {}, set()]
|
||||
|
||||
@ -1179,14 +1173,12 @@ class MessageTest(unittest.TestCase):
|
||||
pickle.dumps(m.repeated_int32, pickle.HIGHEST_PROTOCOL)
|
||||
|
||||
def testSortEmptyRepeatedCompositeContainer(self, message_module):
|
||||
"""Exercise a scenario that has led to segfaults in the past.
|
||||
"""
|
||||
"""Exercise a scenario that has led to segfaults in the past."""
|
||||
m = message_module.TestAllTypes()
|
||||
m.repeated_nested_message.sort()
|
||||
|
||||
def testHasFieldOnRepeatedField(self, message_module):
|
||||
"""Using HasField on a repeated field should raise an exception.
|
||||
"""
|
||||
"""Using HasField on a repeated field should raise an exception."""
|
||||
m = message_module.TestAllTypes()
|
||||
with self.assertRaises(ValueError) as _:
|
||||
m.HasField('repeated_int32')
|
||||
@ -1226,6 +1218,7 @@ class MessageTest(unittest.TestCase):
|
||||
|
||||
def testReleasedNestedMessages(self, message_module):
|
||||
"""A case that lead to a segfault when a message detached from its parent
|
||||
|
||||
container has itself a child container.
|
||||
"""
|
||||
m = message_module.NestedTestAllTypes()
|
||||
@ -1271,17 +1264,17 @@ class Proto2Test(unittest.TestCase):
|
||||
def testFieldPresence(self):
|
||||
message = unittest_pb2.TestAllTypes()
|
||||
|
||||
self.assertFalse(message.HasField("optional_int32"))
|
||||
self.assertFalse(message.HasField("optional_bool"))
|
||||
self.assertFalse(message.HasField("optional_nested_message"))
|
||||
self.assertFalse(message.HasField('optional_int32'))
|
||||
self.assertFalse(message.HasField('optional_bool'))
|
||||
self.assertFalse(message.HasField('optional_nested_message'))
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
message.HasField("field_doesnt_exist")
|
||||
message.HasField('field_doesnt_exist')
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
message.HasField("repeated_int32")
|
||||
message.HasField('repeated_int32')
|
||||
with self.assertRaises(ValueError):
|
||||
message.HasField("repeated_nested_message")
|
||||
message.HasField('repeated_nested_message')
|
||||
|
||||
self.assertEqual(0, message.optional_int32)
|
||||
self.assertEqual(False, message.optional_bool)
|
||||
@ -1291,27 +1284,27 @@ class Proto2Test(unittest.TestCase):
|
||||
message.optional_int32 = 0
|
||||
message.optional_bool = False
|
||||
message.optional_nested_message.bb = 0
|
||||
self.assertTrue(message.HasField("optional_int32"))
|
||||
self.assertTrue(message.HasField("optional_bool"))
|
||||
self.assertTrue(message.HasField("optional_nested_message"))
|
||||
self.assertTrue(message.HasField('optional_int32'))
|
||||
self.assertTrue(message.HasField('optional_bool'))
|
||||
self.assertTrue(message.HasField('optional_nested_message'))
|
||||
|
||||
# Set the fields to non-default values.
|
||||
message.optional_int32 = 5
|
||||
message.optional_bool = True
|
||||
message.optional_nested_message.bb = 15
|
||||
|
||||
self.assertTrue(message.HasField(u"optional_int32"))
|
||||
self.assertTrue(message.HasField("optional_bool"))
|
||||
self.assertTrue(message.HasField("optional_nested_message"))
|
||||
self.assertTrue(message.HasField(u'optional_int32'))
|
||||
self.assertTrue(message.HasField('optional_bool'))
|
||||
self.assertTrue(message.HasField('optional_nested_message'))
|
||||
|
||||
# Clearing the fields unsets them and resets their value to default.
|
||||
message.ClearField("optional_int32")
|
||||
message.ClearField(u"optional_bool")
|
||||
message.ClearField("optional_nested_message")
|
||||
message.ClearField('optional_int32')
|
||||
message.ClearField(u'optional_bool')
|
||||
message.ClearField('optional_nested_message')
|
||||
|
||||
self.assertFalse(message.HasField("optional_int32"))
|
||||
self.assertFalse(message.HasField("optional_bool"))
|
||||
self.assertFalse(message.HasField("optional_nested_message"))
|
||||
self.assertFalse(message.HasField('optional_int32'))
|
||||
self.assertFalse(message.HasField('optional_bool'))
|
||||
self.assertFalse(message.HasField('optional_nested_message'))
|
||||
self.assertEqual(0, message.optional_int32)
|
||||
self.assertEqual(False, message.optional_bool)
|
||||
self.assertEqual(0, message.optional_nested_message.bb)
|
||||
@ -1361,16 +1354,17 @@ class Proto2Test(unittest.TestCase):
|
||||
msg1 = more_extensions_pb2.TopLevelMessage()
|
||||
msg2 = more_extensions_pb2.TopLevelMessage()
|
||||
# Cpp extension will lazily create a sub message which is immutable.
|
||||
self.assertEqual(0, msg1.submessage.Extensions[
|
||||
more_extensions_pb2.optional_int_extension])
|
||||
self.assertEqual(
|
||||
0,
|
||||
msg1.submessage.Extensions[more_extensions_pb2.optional_int_extension])
|
||||
self.assertFalse(msg1.HasField('submessage'))
|
||||
msg2.submessage.Extensions[
|
||||
more_extensions_pb2.optional_int_extension] = 123
|
||||
msg2.submessage.Extensions[more_extensions_pb2.optional_int_extension] = 123
|
||||
# Make sure cmessage and extensions pointing to a mutable message
|
||||
# after merge instead of the lazily created message.
|
||||
msg1.MergeFrom(msg2)
|
||||
self.assertEqual(123, msg1.submessage.Extensions[
|
||||
more_extensions_pb2.optional_int_extension])
|
||||
self.assertEqual(
|
||||
123,
|
||||
msg1.submessage.Extensions[more_extensions_pb2.optional_int_extension])
|
||||
|
||||
def testGoldenExtensions(self):
|
||||
golden_data = test_util.GoldenFileData('golden_message')
|
||||
@ -1404,17 +1398,19 @@ class Proto2Test(unittest.TestCase):
|
||||
# This is still an incomplete proto - so serializing should fail
|
||||
self.assertRaises(message.EncodeError, unpickled_message.SerializeToString)
|
||||
|
||||
|
||||
# TODO(haberman): this isn't really a proto2-specific test except that this
|
||||
# message has a required field in it. Should probably be factored out so
|
||||
# that we can test the other parts with proto3.
|
||||
def testParsingMerge(self):
|
||||
"""Check the merge behavior when a required or optional field appears
|
||||
multiple times in the input."""
|
||||
|
||||
multiple times in the input.
|
||||
"""
|
||||
messages = [
|
||||
unittest_pb2.TestAllTypes(),
|
||||
unittest_pb2.TestAllTypes(),
|
||||
unittest_pb2.TestAllTypes() ]
|
||||
unittest_pb2.TestAllTypes()
|
||||
]
|
||||
messages[0].optional_int32 = 1
|
||||
messages[1].optional_int64 = 2
|
||||
messages[2].optional_int32 = 3
|
||||
@ -1447,15 +1443,16 @@ class Proto2Test(unittest.TestCase):
|
||||
self.assertEqual(parsing_merge.optional_all_types, merged_message)
|
||||
self.assertEqual(parsing_merge.optionalgroup.optional_group_all_types,
|
||||
merged_message)
|
||||
self.assertEqual(parsing_merge.Extensions[
|
||||
unittest_pb2.TestParsingMerge.optional_ext],
|
||||
merged_message)
|
||||
self.assertEqual(
|
||||
parsing_merge.Extensions[unittest_pb2.TestParsingMerge.optional_ext],
|
||||
merged_message)
|
||||
|
||||
# Repeated fields should not be merged.
|
||||
self.assertEqual(len(parsing_merge.repeated_all_types), 3)
|
||||
self.assertEqual(len(parsing_merge.repeatedgroup), 3)
|
||||
self.assertEqual(len(parsing_merge.Extensions[
|
||||
unittest_pb2.TestParsingMerge.repeated_ext]), 3)
|
||||
self.assertEqual(
|
||||
len(parsing_merge.Extensions[
|
||||
unittest_pb2.TestParsingMerge.repeated_ext]), 3)
|
||||
|
||||
def testPythonicInit(self):
|
||||
message = unittest_pb2.TestAllTypes(
|
||||
@ -1467,8 +1464,11 @@ class Proto2Test(unittest.TestCase):
|
||||
optional_nested_message={'bb': 500},
|
||||
optional_foreign_message={},
|
||||
optional_nested_enum='BAZ',
|
||||
repeatedgroup=[{'a': 600},
|
||||
{'a': 700}],
|
||||
repeatedgroup=[{
|
||||
'a': 600
|
||||
}, {
|
||||
'a': 700
|
||||
}],
|
||||
repeated_nested_enum=['FOO', unittest_pb2.TestAllTypes.BAR],
|
||||
default_int32=800,
|
||||
oneof_string='y')
|
||||
@ -1848,8 +1848,7 @@ class Proto3Test(unittest.TestCase):
|
||||
self.assertEqual(True, msg2.map_bool_bool[True])
|
||||
self.assertEqual(2, msg2.map_int32_enum[888])
|
||||
self.assertEqual(456, msg2.map_int32_enum[123])
|
||||
self.assertEqual('{-123: -456}',
|
||||
str(msg2.map_int32_int32))
|
||||
self.assertEqual('{-123: -456}', str(msg2.map_int32_int32))
|
||||
|
||||
def testMapEntryAlwaysSerialized(self):
|
||||
msg = map_unittest_pb2.TestMap()
|
||||
@ -1912,8 +1911,9 @@ class Proto3Test(unittest.TestCase):
|
||||
self.assertEqual(2, len(msg2.map_int32_foreign_message))
|
||||
msg2.map_int32_foreign_message[123].c = 1
|
||||
# TODO(jieluo): Fix text format for message map.
|
||||
self.assertIn(str(msg2.map_int32_foreign_message),
|
||||
('{-456: , 123: c: 1\n}', '{123: c: 1\n, -456: }'))
|
||||
self.assertIn(
|
||||
str(msg2.map_int32_foreign_message),
|
||||
('{-456: , 123: c: 1\n}', '{123: c: 1\n, -456: }'))
|
||||
|
||||
def testNestedMessageMapItemDelete(self):
|
||||
msg = map_unittest_pb2.TestMap()
|
||||
@ -2041,8 +2041,7 @@ class Proto3Test(unittest.TestCase):
|
||||
# Test when cpp extension cache a map.
|
||||
m1 = map_unittest_pb2.TestMap()
|
||||
m2 = map_unittest_pb2.TestMap()
|
||||
self.assertEqual(m1.map_int32_foreign_message,
|
||||
m1.map_int32_foreign_message)
|
||||
self.assertEqual(m1.map_int32_foreign_message, m1.map_int32_foreign_message)
|
||||
m2.map_int32_foreign_message[123].c = 10
|
||||
m1.MergeFrom(m2)
|
||||
self.assertEqual(10, m2.map_int32_foreign_message[123].c)
|
||||
@ -2070,7 +2069,7 @@ class Proto3Test(unittest.TestCase):
|
||||
|
||||
def testMergeFromBadType(self):
|
||||
msg = map_unittest_pb2.TestMap()
|
||||
with self.assertRaisesRegexp(
|
||||
with self.assertRaisesRegex(
|
||||
TypeError,
|
||||
r'Parameter to MergeFrom\(\) must be instance of same class: expected '
|
||||
r'.+TestMap got int\.'):
|
||||
@ -2078,7 +2077,7 @@ class Proto3Test(unittest.TestCase):
|
||||
|
||||
def testCopyFromBadType(self):
|
||||
msg = map_unittest_pb2.TestMap()
|
||||
with self.assertRaisesRegexp(
|
||||
with self.assertRaisesRegex(
|
||||
TypeError,
|
||||
r'Parameter to [A-Za-z]*From\(\) must be instance of same class: '
|
||||
r'expected .+TestMap got int\.'):
|
||||
@ -2167,6 +2166,34 @@ class Proto3Test(unittest.TestCase):
|
||||
for key in int32_foreign_iter:
|
||||
pass
|
||||
|
||||
def testModifyMapEntryWhileIterating(self):
|
||||
msg = map_unittest_pb2.TestMap()
|
||||
|
||||
msg.map_string_string['abc'] = '123'
|
||||
msg.map_string_string['def'] = '456'
|
||||
msg.map_string_string['ghi'] = '789'
|
||||
|
||||
msg.map_int32_foreign_message[5].c = 5
|
||||
msg.map_int32_foreign_message[6].c = 6
|
||||
msg.map_int32_foreign_message[7].c = 7
|
||||
|
||||
string_string_keys = list(msg.map_string_string.keys())
|
||||
int32_foreign_keys = list(msg.map_int32_foreign_message.keys())
|
||||
|
||||
keys = []
|
||||
for key in msg.map_string_string:
|
||||
keys.append(key)
|
||||
msg.map_string_string[key] = '000'
|
||||
self.assertEqual(keys, string_string_keys)
|
||||
self.assertEqual(keys, list(msg.map_string_string.keys()))
|
||||
|
||||
keys = []
|
||||
for key in msg.map_int32_foreign_message:
|
||||
keys.append(key)
|
||||
msg.map_int32_foreign_message[key].c = 0
|
||||
self.assertEqual(keys, int32_foreign_keys)
|
||||
self.assertEqual(keys, list(msg.map_int32_foreign_message.keys()))
|
||||
|
||||
def testSubmessageMap(self):
|
||||
msg = map_unittest_pb2.TestMap()
|
||||
|
||||
@ -2278,7 +2305,7 @@ class Proto3Test(unittest.TestCase):
|
||||
msg1 = map_unittest_pb2.TestMap()
|
||||
msg1.map_string_foreign_message['test'].c = 42
|
||||
msg2 = map_unittest_pb2.TestMap(
|
||||
map_string_foreign_message=msg1.map_string_foreign_message)
|
||||
map_string_foreign_message=msg1.map_string_foreign_message)
|
||||
self.assertEqual(42, msg2.map_string_foreign_message['test'].c)
|
||||
|
||||
def testMapFieldRaisesCorrectError(self):
|
||||
@ -2413,24 +2440,21 @@ class Proto3Test(unittest.TestCase):
|
||||
msg2.MergeFromString(serialized)
|
||||
self.assertEqual(msg2.optional_string, u'😍')
|
||||
|
||||
msg = unittest_proto3_arena_pb2.TestAllTypes(
|
||||
optional_string=u'\ud001')
|
||||
msg = unittest_proto3_arena_pb2.TestAllTypes(optional_string=u'\ud001')
|
||||
self.assertEqual(msg.optional_string, u'\ud001')
|
||||
|
||||
def testSurrogatesInPython3(self):
|
||||
# Surrogates are rejected at setters in Python3.
|
||||
with self.assertRaises(ValueError):
|
||||
unittest_proto3_arena_pb2.TestAllTypes(
|
||||
optional_string=u'\ud801\udc01')
|
||||
unittest_proto3_arena_pb2.TestAllTypes(optional_string=u'\ud801\udc01')
|
||||
with self.assertRaises(ValueError):
|
||||
unittest_proto3_arena_pb2.TestAllTypes(
|
||||
optional_string=b'\xed\xa0\x81')
|
||||
unittest_proto3_arena_pb2.TestAllTypes(optional_string=b'\xed\xa0\x81')
|
||||
with self.assertRaises(ValueError):
|
||||
unittest_proto3_arena_pb2.TestAllTypes(
|
||||
optional_string=u'\ud801')
|
||||
unittest_proto3_arena_pb2.TestAllTypes(optional_string=u'\ud801')
|
||||
with self.assertRaises(ValueError):
|
||||
unittest_proto3_arena_pb2.TestAllTypes(
|
||||
optional_string=u'\ud801\ud801')
|
||||
unittest_proto3_arena_pb2.TestAllTypes(optional_string=u'\ud801\ud801')
|
||||
|
||||
|
||||
|
||||
|
||||
@testing_refleaks.TestCase
|
||||
@ -2441,8 +2465,9 @@ class ValidTypeNamesTest(unittest.TestCase):
|
||||
tp_name = str(type(msg)).split("'")[1]
|
||||
valid_names = ('Repeated%sContainer' % base_name,
|
||||
'Repeated%sFieldContainer' % base_name)
|
||||
self.assertTrue(any(tp_name.endswith(v) for v in valid_names),
|
||||
'%r does end with any of %r' % (tp_name, valid_names))
|
||||
self.assertTrue(
|
||||
any(tp_name.endswith(v) for v in valid_names),
|
||||
'%r does end with any of %r' % (tp_name, valid_names))
|
||||
|
||||
parts = tp_name.split('.')
|
||||
class_name = parts[-1]
|
||||
@ -2455,6 +2480,7 @@ class ValidTypeNamesTest(unittest.TestCase):
|
||||
self.assertImportFromName(pb.repeated_int32, 'Scalar')
|
||||
self.assertImportFromName(pb.repeated_nested_message, 'Composite')
|
||||
|
||||
|
||||
@testing_refleaks.TestCase
|
||||
class PackedFieldTest(unittest.TestCase):
|
||||
|
||||
@ -2574,5 +2600,6 @@ class OversizeProtosTest(unittest.TestCase):
|
||||
q.ParseFromString(self.p_serialized)
|
||||
self.assertEqual(self.p.field.payload, q.field.payload)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@ -31,10 +31,7 @@
|
||||
"""Tests for google.protobuf.proto_builder."""
|
||||
|
||||
import collections
|
||||
try:
|
||||
import unittest2 as unittest
|
||||
except ImportError:
|
||||
import unittest
|
||||
import unittest
|
||||
|
||||
from google.protobuf import descriptor_pb2 # pylint: disable=g-import-not-at-top
|
||||
from google.protobuf import descriptor
|
||||
|
@ -37,6 +37,7 @@ import copy
|
||||
import gc
|
||||
import operator
|
||||
import struct
|
||||
import sys
|
||||
import warnings
|
||||
import unittest
|
||||
|
||||
@ -376,7 +377,8 @@ class ReflectionTest(unittest.TestCase):
|
||||
self.assertRaises(TypeError, setattr, proto, 'optional_float', 'foo')
|
||||
self.assertRaises(TypeError, setattr, proto, 'optional_double', 'foo')
|
||||
# TODO(jieluo): Fix type checking difference for python and c extension
|
||||
if api_implementation.Type() == 'python':
|
||||
if (api_implementation.Type() == 'python' or
|
||||
(sys.version_info.major, sys.version_info.minor) >= (3, 10)):
|
||||
self.assertRaises(TypeError, setattr, proto, 'optional_bool', 1.1)
|
||||
else:
|
||||
proto.optional_bool = 1.1
|
||||
@ -427,7 +429,7 @@ class ReflectionTest(unittest.TestCase):
|
||||
pb.optional_uint64 = '2'
|
||||
|
||||
# The exact error should propagate with a poorly written custom integer.
|
||||
with self.assertRaisesRegexp(RuntimeError, 'my_error'):
|
||||
with self.assertRaisesRegex(RuntimeError, 'my_error'):
|
||||
pb.optional_uint64 = test_util.NonStandardInteger(5, 'my_error')
|
||||
|
||||
def assetIntegerBoundsChecking(self, integer_fn, message_module):
|
||||
|
@ -36,8 +36,6 @@ import collections.abc as collections_abc
|
||||
import datetime
|
||||
import unittest
|
||||
|
||||
import dateutil.tz
|
||||
|
||||
from google.protobuf import any_pb2
|
||||
from google.protobuf import duration_pb2
|
||||
from google.protobuf import field_mask_pb2
|
||||
@ -50,9 +48,17 @@ from google.protobuf.internal import test_util
|
||||
from google.protobuf.internal import well_known_types
|
||||
from google.protobuf import descriptor
|
||||
from google.protobuf import text_format
|
||||
from google3.pyglib import datelib
|
||||
from google.protobuf.internal import _parameterized
|
||||
|
||||
try:
|
||||
# New module in Python 3.9:
|
||||
import zoneinfo # pylint:disable=g-import-not-at-top
|
||||
_TZ_JAPAN = zoneinfo.ZoneInfo('Japan')
|
||||
_TZ_PACIFIC = zoneinfo.ZoneInfo('US/Pacific')
|
||||
except ImportError:
|
||||
_TZ_JAPAN = datetime.timezone(datetime.timedelta(hours=9), 'Japan')
|
||||
_TZ_PACIFIC = datetime.timezone(datetime.timedelta(hours=-8), 'US/Pacific')
|
||||
|
||||
|
||||
class TimeUtilTestBase(_parameterized.TestCase):
|
||||
|
||||
@ -270,12 +276,12 @@ class TimeUtilTest(TimeUtilTestBase):
|
||||
|
||||
# Two hours after the Unix Epoch, around the world.
|
||||
@_parameterized.named_parameters(
|
||||
('London', [1970, 1, 1, 2], dateutil.tz.UTC),
|
||||
('Tokyo', [1970, 1, 1, 11], dateutil.tz.gettz('Japan')),
|
||||
('LA', [1969, 12, 31, 18], dateutil.tz.gettz('US/Pacific')),
|
||||
('London', [1970, 1, 1, 2], datetime.timezone.utc),
|
||||
('Tokyo', [1970, 1, 1, 11], _TZ_JAPAN),
|
||||
('LA', [1969, 12, 31, 18], _TZ_PACIFIC),
|
||||
)
|
||||
def testTimezoneAwareDatetimeConversion(self, date_parts, tzinfo):
|
||||
original_datetime = datelib.CreateDatetime(*date_parts, tzinfo=tzinfo)
|
||||
original_datetime = datetime.datetime(*date_parts, tzinfo=tzinfo) # pylint:disable=g-tzinfo-datetime
|
||||
|
||||
message = timestamp_pb2.Timestamp()
|
||||
message.FromDatetime(original_datetime)
|
||||
@ -296,7 +302,7 @@ class TimeUtilTest(TimeUtilTestBase):
|
||||
aware_datetime = message.ToDatetime(tzinfo=tzinfo)
|
||||
self.assertEqual(original_datetime, aware_datetime)
|
||||
self.assertEqual(
|
||||
datelib.CreateDatetime(1970, 1, 1, 2, tzinfo=dateutil.tz.UTC),
|
||||
datetime.datetime(1970, 1, 1, 2, tzinfo=datetime.timezone.utc),
|
||||
aware_datetime)
|
||||
self.assertEqual(tzinfo, aware_datetime.tzinfo)
|
||||
|
||||
@ -324,85 +330,64 @@ class TimeUtilTest(TimeUtilTestBase):
|
||||
|
||||
def testInvalidTimestamp(self):
|
||||
message = timestamp_pb2.Timestamp()
|
||||
self.assertRaisesRegexp(
|
||||
ValueError,
|
||||
'Failed to parse timestamp: missing valid timezone offset.',
|
||||
message.FromJsonString,
|
||||
'')
|
||||
self.assertRaisesRegexp(
|
||||
ValueError,
|
||||
'Failed to parse timestamp: invalid trailing data '
|
||||
'1970-01-01T00:00:01Ztrail.',
|
||||
message.FromJsonString,
|
||||
self.assertRaisesRegex(
|
||||
ValueError, 'Failed to parse timestamp: missing valid timezone offset.',
|
||||
message.FromJsonString, '')
|
||||
self.assertRaisesRegex(
|
||||
ValueError, 'Failed to parse timestamp: invalid trailing data '
|
||||
'1970-01-01T00:00:01Ztrail.', message.FromJsonString,
|
||||
'1970-01-01T00:00:01Ztrail')
|
||||
self.assertRaisesRegexp(
|
||||
ValueError,
|
||||
'time data \'10000-01-01T00:00:00\' does not match'
|
||||
' format \'%Y-%m-%dT%H:%M:%S\'',
|
||||
message.FromJsonString, '10000-01-01T00:00:00.00Z')
|
||||
self.assertRaisesRegexp(
|
||||
ValueError,
|
||||
'nanos 0123456789012 more than 9 fractional digits.',
|
||||
message.FromJsonString,
|
||||
'1970-01-01T00:00:00.0123456789012Z')
|
||||
self.assertRaisesRegexp(
|
||||
self.assertRaisesRegex(
|
||||
ValueError, 'time data \'10000-01-01T00:00:00\' does not match'
|
||||
' format \'%Y-%m-%dT%H:%M:%S\'', message.FromJsonString,
|
||||
'10000-01-01T00:00:00.00Z')
|
||||
self.assertRaisesRegex(
|
||||
ValueError, 'nanos 0123456789012 more than 9 fractional digits.',
|
||||
message.FromJsonString, '1970-01-01T00:00:00.0123456789012Z')
|
||||
self.assertRaisesRegex(
|
||||
ValueError,
|
||||
(r'Invalid timezone offset value: \+08.'),
|
||||
message.FromJsonString,
|
||||
'1972-01-01T01:00:00.01+08',)
|
||||
self.assertRaisesRegexp(
|
||||
ValueError,
|
||||
'year (0 )?is out of range',
|
||||
message.FromJsonString,
|
||||
'0000-01-01T00:00:00Z')
|
||||
'1972-01-01T01:00:00.01+08',
|
||||
)
|
||||
self.assertRaisesRegex(ValueError, 'year (0 )?is out of range',
|
||||
message.FromJsonString, '0000-01-01T00:00:00Z')
|
||||
message.seconds = 253402300800
|
||||
self.assertRaisesRegexp(
|
||||
OverflowError,
|
||||
'date value out of range',
|
||||
message.ToJsonString)
|
||||
self.assertRaisesRegex(OverflowError, 'date value out of range',
|
||||
message.ToJsonString)
|
||||
|
||||
def testInvalidDuration(self):
|
||||
message = duration_pb2.Duration()
|
||||
self.assertRaisesRegexp(
|
||||
ValueError,
|
||||
'Duration must end with letter "s": 1.',
|
||||
message.FromJsonString, '1')
|
||||
self.assertRaisesRegexp(
|
||||
ValueError,
|
||||
'Couldn\'t parse duration: 1...2s.',
|
||||
message.FromJsonString, '1...2s')
|
||||
self.assertRaisesRegex(ValueError, 'Duration must end with letter "s": 1.',
|
||||
message.FromJsonString, '1')
|
||||
self.assertRaisesRegex(ValueError, 'Couldn\'t parse duration: 1...2s.',
|
||||
message.FromJsonString, '1...2s')
|
||||
text = '-315576000001.000000000s'
|
||||
self.assertRaisesRegexp(
|
||||
self.assertRaisesRegex(
|
||||
ValueError,
|
||||
r'Duration is not valid\: Seconds -315576000001 must be in range'
|
||||
r' \[-315576000000\, 315576000000\].',
|
||||
message.FromJsonString, text)
|
||||
r' \[-315576000000\, 315576000000\].', message.FromJsonString, text)
|
||||
text = '315576000001.000000000s'
|
||||
self.assertRaisesRegexp(
|
||||
self.assertRaisesRegex(
|
||||
ValueError,
|
||||
r'Duration is not valid\: Seconds 315576000001 must be in range'
|
||||
r' \[-315576000000\, 315576000000\].',
|
||||
message.FromJsonString, text)
|
||||
r' \[-315576000000\, 315576000000\].', message.FromJsonString, text)
|
||||
message.seconds = -315576000001
|
||||
message.nanos = 0
|
||||
self.assertRaisesRegexp(
|
||||
self.assertRaisesRegex(
|
||||
ValueError,
|
||||
r'Duration is not valid\: Seconds -315576000001 must be in range'
|
||||
r' \[-315576000000\, 315576000000\].',
|
||||
message.ToJsonString)
|
||||
r' \[-315576000000\, 315576000000\].', message.ToJsonString)
|
||||
message.seconds = 0
|
||||
message.nanos = 999999999 + 1
|
||||
self.assertRaisesRegexp(
|
||||
ValueError,
|
||||
r'Duration is not valid\: Nanos 1000000000 must be in range'
|
||||
r' \[-999999999\, 999999999\].',
|
||||
message.ToJsonString)
|
||||
self.assertRaisesRegex(
|
||||
ValueError, r'Duration is not valid\: Nanos 1000000000 must be in range'
|
||||
r' \[-999999999\, 999999999\].', message.ToJsonString)
|
||||
message.seconds = -1
|
||||
message.nanos = 1
|
||||
self.assertRaisesRegexp(
|
||||
ValueError,
|
||||
r'Duration is not valid\: Sign mismatch.',
|
||||
message.ToJsonString)
|
||||
self.assertRaisesRegex(ValueError,
|
||||
r'Duration is not valid\: Sign mismatch.',
|
||||
message.ToJsonString)
|
||||
|
||||
|
||||
class FieldMaskTest(unittest.TestCase):
|
||||
@ -724,34 +709,29 @@ class FieldMaskTest(unittest.TestCase):
|
||||
well_known_types._SnakeCaseToCamelCase('foo3_bar'))
|
||||
|
||||
# No uppercase letter is allowed.
|
||||
self.assertRaisesRegexp(
|
||||
self.assertRaisesRegex(
|
||||
ValueError,
|
||||
'Fail to print FieldMask to Json string: Path name Foo must '
|
||||
'not contain uppercase letters.',
|
||||
well_known_types._SnakeCaseToCamelCase,
|
||||
'Foo')
|
||||
well_known_types._SnakeCaseToCamelCase, 'Foo')
|
||||
# Any character after a "_" must be a lowercase letter.
|
||||
# 1. "_" cannot be followed by another "_".
|
||||
# 2. "_" cannot be followed by a digit.
|
||||
# 3. "_" cannot appear as the last character.
|
||||
self.assertRaisesRegexp(
|
||||
self.assertRaisesRegex(
|
||||
ValueError,
|
||||
'Fail to print FieldMask to Json string: The character after a '
|
||||
'"_" must be a lowercase letter in path name foo__bar.',
|
||||
well_known_types._SnakeCaseToCamelCase,
|
||||
'foo__bar')
|
||||
self.assertRaisesRegexp(
|
||||
well_known_types._SnakeCaseToCamelCase, 'foo__bar')
|
||||
self.assertRaisesRegex(
|
||||
ValueError,
|
||||
'Fail to print FieldMask to Json string: The character after a '
|
||||
'"_" must be a lowercase letter in path name foo_3bar.',
|
||||
well_known_types._SnakeCaseToCamelCase,
|
||||
'foo_3bar')
|
||||
self.assertRaisesRegexp(
|
||||
well_known_types._SnakeCaseToCamelCase, 'foo_3bar')
|
||||
self.assertRaisesRegex(
|
||||
ValueError,
|
||||
'Fail to print FieldMask to Json string: Trailing "_" in path '
|
||||
'name foo_bar_.',
|
||||
well_known_types._SnakeCaseToCamelCase,
|
||||
'foo_bar_')
|
||||
'name foo_bar_.', well_known_types._SnakeCaseToCamelCase, 'foo_bar_')
|
||||
|
||||
def testCamelCaseToSnakeCase(self):
|
||||
self.assertEqual('foo_bar',
|
||||
@ -760,11 +740,10 @@ class FieldMaskTest(unittest.TestCase):
|
||||
well_known_types._CamelCaseToSnakeCase('FooBar'))
|
||||
self.assertEqual('foo3_bar',
|
||||
well_known_types._CamelCaseToSnakeCase('foo3Bar'))
|
||||
self.assertRaisesRegexp(
|
||||
self.assertRaisesRegex(
|
||||
ValueError,
|
||||
'Fail to parse FieldMask: Path name foo_bar must not contain "_"s.',
|
||||
well_known_types._CamelCaseToSnakeCase,
|
||||
'foo_bar')
|
||||
well_known_types._CamelCaseToSnakeCase, 'foo_bar')
|
||||
|
||||
|
||||
class StructTest(unittest.TestCase):
|
||||
|
@ -194,6 +194,9 @@ class Message(object):
|
||||
"""Parse serialized protocol buffer data into this message.
|
||||
|
||||
Like :func:`MergeFromString()`, except we clear the object first.
|
||||
|
||||
Raises:
|
||||
message.DecodeError if the input cannot be parsed.
|
||||
"""
|
||||
self.Clear()
|
||||
return self.MergeFromString(serialized)
|
||||
|
@ -45,6 +45,7 @@
|
||||
#ifndef GOOGLE_PROTOBUF_PYTHON_PROTO_API_H__
|
||||
#define GOOGLE_PROTOBUF_PYTHON_PROTO_API_H__
|
||||
|
||||
#define PY_SSIZE_T_CLEAN
|
||||
#include <Python.h>
|
||||
|
||||
#include <google/protobuf/descriptor_database.h>
|
||||
|
@ -32,6 +32,7 @@
|
||||
|
||||
#include <google/protobuf/pyext/descriptor.h>
|
||||
|
||||
#define PY_SSIZE_T_CLEAN
|
||||
#include <Python.h>
|
||||
#include <frameobject.h>
|
||||
|
||||
@ -415,11 +416,15 @@ static PyGetSetDef Getters[] = {
|
||||
|
||||
PyTypeObject PyBaseDescriptor_Type = {
|
||||
PyVarObject_HEAD_INIT(&PyType_Type, 0) FULL_MODULE_NAME
|
||||
".DescriptorBase", // tp_name
|
||||
sizeof(PyBaseDescriptor), // tp_basicsize
|
||||
0, // tp_itemsize
|
||||
(destructor)Dealloc, // tp_dealloc
|
||||
0, // tp_print
|
||||
".DescriptorBase", // tp_name
|
||||
sizeof(PyBaseDescriptor), // tp_basicsize
|
||||
0, // tp_itemsize
|
||||
(destructor)Dealloc, // tp_dealloc
|
||||
#if PY_VERSION_HEX < 0x03080000
|
||||
nullptr, // tp_print
|
||||
#else
|
||||
0, // tp_vectorcall_offset
|
||||
#endif
|
||||
nullptr, // tp_getattr
|
||||
nullptr, // tp_setattr
|
||||
nullptr, // tp_compare
|
||||
@ -685,11 +690,15 @@ static PyMethodDef Methods[] = {
|
||||
|
||||
PyTypeObject PyMessageDescriptor_Type = {
|
||||
PyVarObject_HEAD_INIT(&PyType_Type, 0) FULL_MODULE_NAME
|
||||
".MessageDescriptor", // tp_name
|
||||
sizeof(PyBaseDescriptor), // tp_basicsize
|
||||
0, // tp_itemsize
|
||||
nullptr, // tp_dealloc
|
||||
0, // tp_print
|
||||
".MessageDescriptor", // tp_name
|
||||
sizeof(PyBaseDescriptor), // tp_basicsize
|
||||
0, // tp_itemsize
|
||||
nullptr, // tp_dealloc
|
||||
#if PY_VERSION_HEX < 0x03080000
|
||||
nullptr, // tp_print
|
||||
#else
|
||||
0, // tp_vectorcall_offset
|
||||
#endif
|
||||
nullptr, // tp_getattr
|
||||
nullptr, // tp_setattr
|
||||
nullptr, // tp_compare
|
||||
@ -1011,11 +1020,15 @@ static PyMethodDef Methods[] = {
|
||||
|
||||
PyTypeObject PyFieldDescriptor_Type = {
|
||||
PyVarObject_HEAD_INIT(&PyType_Type, 0) FULL_MODULE_NAME
|
||||
".FieldDescriptor", // tp_name
|
||||
sizeof(PyBaseDescriptor), // tp_basicsize
|
||||
0, // tp_itemsize
|
||||
nullptr, // tp_dealloc
|
||||
0, // tp_print
|
||||
".FieldDescriptor", // tp_name
|
||||
sizeof(PyBaseDescriptor), // tp_basicsize
|
||||
0, // tp_itemsize
|
||||
nullptr, // tp_dealloc
|
||||
#if PY_VERSION_HEX < 0x03080000
|
||||
nullptr, // tp_print
|
||||
#else
|
||||
0, // tp_vectorcall_offset
|
||||
#endif
|
||||
nullptr, // tp_getattr
|
||||
nullptr, // tp_setattr
|
||||
nullptr, // tp_compare
|
||||
@ -1167,11 +1180,15 @@ static PyGetSetDef Getters[] = {
|
||||
|
||||
PyTypeObject PyEnumDescriptor_Type = {
|
||||
PyVarObject_HEAD_INIT(&PyType_Type, 0) FULL_MODULE_NAME
|
||||
".EnumDescriptor", // tp_name
|
||||
sizeof(PyBaseDescriptor), // tp_basicsize
|
||||
0, // tp_itemsize
|
||||
nullptr, // tp_dealloc
|
||||
0, // tp_print
|
||||
".EnumDescriptor", // tp_name
|
||||
sizeof(PyBaseDescriptor), // tp_basicsize
|
||||
0, // tp_itemsize
|
||||
nullptr, // tp_dealloc
|
||||
#if PY_VERSION_HEX < 0x03080000
|
||||
nullptr, // tp_print
|
||||
#else
|
||||
0, // tp_vectorcall_offset
|
||||
#endif
|
||||
nullptr, // tp_getattr
|
||||
nullptr, // tp_setattr
|
||||
nullptr, // tp_compare
|
||||
@ -1288,11 +1305,15 @@ static PyMethodDef Methods[] = {
|
||||
|
||||
PyTypeObject PyEnumValueDescriptor_Type = {
|
||||
PyVarObject_HEAD_INIT(&PyType_Type, 0) FULL_MODULE_NAME
|
||||
".EnumValueDescriptor", // tp_name
|
||||
sizeof(PyBaseDescriptor), // tp_basicsize
|
||||
0, // tp_itemsize
|
||||
nullptr, // tp_dealloc
|
||||
0, // tp_print
|
||||
".EnumValueDescriptor", // tp_name
|
||||
sizeof(PyBaseDescriptor), // tp_basicsize
|
||||
0, // tp_itemsize
|
||||
nullptr, // tp_dealloc
|
||||
#if PY_VERSION_HEX < 0x03080000
|
||||
nullptr, // tp_print
|
||||
#else
|
||||
0, // tp_vectorcall_offset
|
||||
#endif
|
||||
nullptr, // tp_getattr
|
||||
nullptr, // tp_setattr
|
||||
nullptr, // tp_compare
|
||||
@ -1477,40 +1498,44 @@ PyTypeObject PyFileDescriptor_Type = {
|
||||
sizeof(PyFileDescriptor), // tp_basicsize
|
||||
0, // tp_itemsize
|
||||
(destructor)file_descriptor::Dealloc, // tp_dealloc
|
||||
0, // tp_print
|
||||
nullptr, // tp_getattr
|
||||
nullptr, // tp_setattr
|
||||
nullptr, // tp_compare
|
||||
nullptr, // tp_repr
|
||||
nullptr, // tp_as_number
|
||||
nullptr, // tp_as_sequence
|
||||
nullptr, // tp_as_mapping
|
||||
nullptr, // tp_hash
|
||||
nullptr, // tp_call
|
||||
nullptr, // tp_str
|
||||
nullptr, // tp_getattro
|
||||
nullptr, // tp_setattro
|
||||
nullptr, // tp_as_buffer
|
||||
Py_TPFLAGS_DEFAULT, // tp_flags
|
||||
"A File Descriptor", // tp_doc
|
||||
nullptr, // tp_traverse
|
||||
nullptr, // tp_clear
|
||||
nullptr, // tp_richcompare
|
||||
0, // tp_weaklistoffset
|
||||
nullptr, // tp_iter
|
||||
nullptr, // tp_iternext
|
||||
file_descriptor::Methods, // tp_methods
|
||||
nullptr, // tp_members
|
||||
file_descriptor::Getters, // tp_getset
|
||||
&descriptor::PyBaseDescriptor_Type, // tp_base
|
||||
nullptr, // tp_dict
|
||||
nullptr, // tp_descr_get
|
||||
nullptr, // tp_descr_set
|
||||
0, // tp_dictoffset
|
||||
nullptr, // tp_init
|
||||
nullptr, // tp_alloc
|
||||
nullptr, // tp_new
|
||||
PyObject_GC_Del, // tp_free
|
||||
#if PY_VERSION_HEX < 0x03080000
|
||||
nullptr, // tp_print
|
||||
#else
|
||||
0, // tp_vectorcall_offset
|
||||
#endif
|
||||
nullptr, // tp_getattr
|
||||
nullptr, // tp_setattr
|
||||
nullptr, // tp_compare
|
||||
nullptr, // tp_repr
|
||||
nullptr, // tp_as_number
|
||||
nullptr, // tp_as_sequence
|
||||
nullptr, // tp_as_mapping
|
||||
nullptr, // tp_hash
|
||||
nullptr, // tp_call
|
||||
nullptr, // tp_str
|
||||
nullptr, // tp_getattro
|
||||
nullptr, // tp_setattro
|
||||
nullptr, // tp_as_buffer
|
||||
Py_TPFLAGS_DEFAULT, // tp_flags
|
||||
"A File Descriptor", // tp_doc
|
||||
nullptr, // tp_traverse
|
||||
nullptr, // tp_clear
|
||||
nullptr, // tp_richcompare
|
||||
0, // tp_weaklistoffset
|
||||
nullptr, // tp_iter
|
||||
nullptr, // tp_iternext
|
||||
file_descriptor::Methods, // tp_methods
|
||||
nullptr, // tp_members
|
||||
file_descriptor::Getters, // tp_getset
|
||||
&descriptor::PyBaseDescriptor_Type, // tp_base
|
||||
nullptr, // tp_dict
|
||||
nullptr, // tp_descr_get
|
||||
nullptr, // tp_descr_set
|
||||
0, // tp_dictoffset
|
||||
nullptr, // tp_init
|
||||
nullptr, // tp_alloc
|
||||
nullptr, // tp_new
|
||||
PyObject_GC_Del, // tp_free
|
||||
};
|
||||
|
||||
PyObject* PyFileDescriptor_FromDescriptor(
|
||||
@ -1634,11 +1659,15 @@ static PyMethodDef Methods[] = {
|
||||
|
||||
PyTypeObject PyOneofDescriptor_Type = {
|
||||
PyVarObject_HEAD_INIT(&PyType_Type, 0) FULL_MODULE_NAME
|
||||
".OneofDescriptor", // tp_name
|
||||
sizeof(PyBaseDescriptor), // tp_basicsize
|
||||
0, // tp_itemsize
|
||||
nullptr, // tp_dealloc
|
||||
0, // tp_print
|
||||
".OneofDescriptor", // tp_name
|
||||
sizeof(PyBaseDescriptor), // tp_basicsize
|
||||
0, // tp_itemsize
|
||||
nullptr, // tp_dealloc
|
||||
#if PY_VERSION_HEX < 0x03080000
|
||||
nullptr, // tp_print
|
||||
#else
|
||||
0, // tp_vectorcall_offset
|
||||
#endif
|
||||
nullptr, // tp_getattr
|
||||
nullptr, // tp_setattr
|
||||
nullptr, // tp_compare
|
||||
@ -1752,11 +1781,15 @@ static PyMethodDef Methods[] = {
|
||||
|
||||
PyTypeObject PyServiceDescriptor_Type = {
|
||||
PyVarObject_HEAD_INIT(&PyType_Type, 0) FULL_MODULE_NAME
|
||||
".ServiceDescriptor", // tp_name
|
||||
sizeof(PyBaseDescriptor), // tp_basicsize
|
||||
0, // tp_itemsize
|
||||
nullptr, // tp_dealloc
|
||||
0, // tp_print
|
||||
".ServiceDescriptor", // tp_name
|
||||
sizeof(PyBaseDescriptor), // tp_basicsize
|
||||
0, // tp_itemsize
|
||||
nullptr, // tp_dealloc
|
||||
#if PY_VERSION_HEX < 0x03080000
|
||||
nullptr, // tp_print
|
||||
#else
|
||||
0, // tp_vectorcall_offset
|
||||
#endif
|
||||
nullptr, // tp_getattr
|
||||
nullptr, // tp_setattr
|
||||
nullptr, // tp_compare
|
||||
@ -1876,11 +1909,15 @@ static PyMethodDef Methods[] = {
|
||||
|
||||
PyTypeObject PyMethodDescriptor_Type = {
|
||||
PyVarObject_HEAD_INIT(&PyType_Type, 0) FULL_MODULE_NAME
|
||||
".MethodDescriptor", // tp_name
|
||||
sizeof(PyBaseDescriptor), // tp_basicsize
|
||||
0, // tp_itemsize
|
||||
nullptr, // tp_dealloc
|
||||
0, // tp_print
|
||||
".MethodDescriptor", // tp_name
|
||||
sizeof(PyBaseDescriptor), // tp_basicsize
|
||||
0, // tp_itemsize
|
||||
nullptr, // tp_dealloc
|
||||
#if PY_VERSION_HEX < 0x03080000
|
||||
nullptr, // tp_print
|
||||
#else
|
||||
0, // tp_vectorcall_offset
|
||||
#endif
|
||||
nullptr, // tp_getattr
|
||||
nullptr, // tp_setattr
|
||||
nullptr, // tp_compare
|
||||
|
@ -33,6 +33,7 @@
|
||||
#ifndef GOOGLE_PROTOBUF_PYTHON_CPP_DESCRIPTOR_H__
|
||||
#define GOOGLE_PROTOBUF_PYTHON_CPP_DESCRIPTOR_H__
|
||||
|
||||
#define PY_SSIZE_T_CLEAN
|
||||
#include <Python.h>
|
||||
|
||||
#include <google/protobuf/descriptor.h>
|
||||
|
@ -49,6 +49,7 @@
|
||||
// because the Python API is based on C, and does not play well with C++
|
||||
// inheritance.
|
||||
|
||||
#define PY_SSIZE_T_CLEAN
|
||||
#include <Python.h>
|
||||
|
||||
#include <google/protobuf/descriptor.h>
|
||||
@ -549,12 +550,16 @@ PyTypeObject DescriptorMapping_Type = {
|
||||
sizeof(PyContainer), // tp_basicsize
|
||||
0, // tp_itemsize
|
||||
nullptr, // tp_dealloc
|
||||
0, // tp_pkrint
|
||||
nullptr, // tp_getattr
|
||||
nullptr, // tp_setattr
|
||||
nullptr, // tp_compare
|
||||
(reprfunc)ContainerRepr, // tp_repr
|
||||
nullptr, // tp_as_number
|
||||
#if PY_VERSION_HEX < 0x03080000
|
||||
nullptr, // tp_print
|
||||
#else
|
||||
0, // tp_vectorcall_offset
|
||||
#endif
|
||||
nullptr, // tp_getattr
|
||||
nullptr, // tp_setattr
|
||||
nullptr, // tp_compare
|
||||
(reprfunc)ContainerRepr, // tp_repr
|
||||
nullptr, // tp_as_number
|
||||
&MappingSequenceMethods, // tp_as_sequence
|
||||
&MappingMappingMethods, // tp_as_mapping
|
||||
nullptr, // tp_hash
|
||||
@ -729,10 +734,14 @@ static PyMappingMethods SeqMappingMethods = {
|
||||
|
||||
PyTypeObject DescriptorSequence_Type = {
|
||||
PyVarObject_HEAD_INIT(&PyType_Type, 0) "DescriptorSequence", // tp_name
|
||||
sizeof(PyContainer), // tp_basicsize
|
||||
0, // tp_itemsize
|
||||
nullptr, // tp_dealloc
|
||||
0, // tp_print
|
||||
sizeof(PyContainer), // tp_basicsize
|
||||
0, // tp_itemsize
|
||||
nullptr, // tp_dealloc
|
||||
#if PY_VERSION_HEX < 0x03080000
|
||||
nullptr, // tp_print
|
||||
#else
|
||||
0, // tp_vectorcall_offset
|
||||
#endif
|
||||
nullptr, // tp_getattr
|
||||
nullptr, // tp_setattr
|
||||
nullptr, // tp_compare
|
||||
@ -874,25 +883,29 @@ static PyTypeObject ContainerIterator_Type = {
|
||||
sizeof(PyContainerIterator), // tp_basicsize
|
||||
0, // tp_itemsize
|
||||
(destructor)Iterator_Dealloc, // tp_dealloc
|
||||
0, // tp_print
|
||||
nullptr, // tp_getattr
|
||||
nullptr, // tp_setattr
|
||||
nullptr, // tp_compare
|
||||
nullptr, // tp_repr
|
||||
nullptr, // tp_as_number
|
||||
nullptr, // tp_as_sequence
|
||||
nullptr, // tp_as_mapping
|
||||
nullptr, // tp_hash
|
||||
nullptr, // tp_call
|
||||
nullptr, // tp_str
|
||||
nullptr, // tp_getattro
|
||||
nullptr, // tp_setattro
|
||||
nullptr, // tp_as_buffer
|
||||
Py_TPFLAGS_DEFAULT, // tp_flags
|
||||
nullptr, // tp_doc
|
||||
nullptr, // tp_traverse
|
||||
nullptr, // tp_clear
|
||||
nullptr, // tp_richcompare
|
||||
#if PY_VERSION_HEX < 0x03080000
|
||||
nullptr, // tp_print
|
||||
#else
|
||||
0, // tp_vectorcall_offset
|
||||
#endif
|
||||
nullptr, // tp_getattr
|
||||
nullptr, // tp_setattr
|
||||
nullptr, // tp_compare
|
||||
nullptr, // tp_repr
|
||||
nullptr, // tp_as_number
|
||||
nullptr, // tp_as_sequence
|
||||
nullptr, // tp_as_mapping
|
||||
nullptr, // tp_hash
|
||||
nullptr, // tp_call
|
||||
nullptr, // tp_str
|
||||
nullptr, // tp_getattro
|
||||
nullptr, // tp_setattro
|
||||
nullptr, // tp_as_buffer
|
||||
Py_TPFLAGS_DEFAULT, // tp_flags
|
||||
nullptr, // tp_doc
|
||||
nullptr, // tp_traverse
|
||||
nullptr, // tp_clear
|
||||
nullptr, // tp_richcompare
|
||||
0, // tp_weaklistoffset
|
||||
PyObject_SelfIter, // tp_iter
|
||||
(iternextfunc)Iterator_Next, // tp_iternext
|
||||
|
@ -34,6 +34,7 @@
|
||||
// Mappings and Sequences of descriptors.
|
||||
// They implement containers like fields_by_name, EnumDescriptor.values...
|
||||
// See descriptor_containers.cc for more description.
|
||||
#define PY_SSIZE_T_CLEAN
|
||||
#include <Python.h>
|
||||
|
||||
namespace google {
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user