Sync from Piper @451217119
PROTOBUF_SYNC_PIPER
This commit is contained in:
commit
6dd8af4ecf
22
CHANGES.txt
22
CHANGES.txt
@ -1,3 +1,23 @@
|
|||||||
|
2022-05-19 version 21.0-rc2(C++/Java/Python/PHP/Objective-C/C#/Ruby)
|
||||||
|
|
||||||
|
Python
|
||||||
|
* Fix windows builds
|
||||||
|
* Throw more helpful error if generated code is out of date
|
||||||
|
* Fixed two reference leaks
|
||||||
|
|
||||||
|
Ruby
|
||||||
|
* Support x64-mingw-ucrt platform
|
||||||
|
|
||||||
|
PHP
|
||||||
|
* Fix SEGV by not writing to shared memory for zend_class_entry
|
||||||
|
|
||||||
|
C#
|
||||||
|
* Suppress warning CS8981
|
||||||
|
|
||||||
|
Other
|
||||||
|
* Fix Maven release to release actual osx_aarch64 binary
|
||||||
|
* Fix protoc zips to have the proto files for well known types
|
||||||
|
|
||||||
2022-05-10 version 21.0-rc1 (C++/Java/Python/PHP/Objective-C/C#/Ruby)
|
2022-05-10 version 21.0-rc1 (C++/Java/Python/PHP/Objective-C/C#/Ruby)
|
||||||
|
|
||||||
C++
|
C++
|
||||||
@ -15,6 +35,7 @@
|
|||||||
Java
|
Java
|
||||||
* Update protobuf_version.bzl to separate protoc and per-language java … (#9900)
|
* Update protobuf_version.bzl to separate protoc and per-language java … (#9900)
|
||||||
* 6x speedup in ArrayEncoder.writeUInt32NotTag
|
* 6x speedup in ArrayEncoder.writeUInt32NotTag
|
||||||
|
* Java generated code is no longer compatible with runtimes 2.6.1 and earlier
|
||||||
|
|
||||||
Python
|
Python
|
||||||
* Increment python major version to 4 in version.json for python upb (#9926)
|
* Increment python major version to 4 in version.json for python upb (#9926)
|
||||||
@ -26,6 +47,7 @@
|
|||||||
* Due to the breaking changes for Python, the major version number for Python
|
* Due to the breaking changes for Python, the major version number for Python
|
||||||
has been incremented.
|
has been incremented.
|
||||||
* The binary wheel for macOS now supports Apple silicon.
|
* The binary wheel for macOS now supports Apple silicon.
|
||||||
|
* In TextFormat, transform UnicodeDecodeError into ParseError.
|
||||||
|
|
||||||
|
|
||||||
PHP
|
PHP
|
||||||
|
@ -87,7 +87,7 @@ pkg_filegroup(
|
|||||||
srcs = [
|
srcs = [
|
||||||
":dist_files",
|
":dist_files",
|
||||||
"//benchmarks/cpp:dist_files",
|
"//benchmarks/cpp:dist_files",
|
||||||
# "//benchmarks/datasets:dist_files", # not in autotools dist
|
"//benchmarks/datasets:dist_files", # not in autotools dist
|
||||||
"//benchmarks/datasets/google_message1/proto2:dist_files",
|
"//benchmarks/datasets/google_message1/proto2:dist_files",
|
||||||
"//benchmarks/datasets/google_message1/proto3:dist_files",
|
"//benchmarks/datasets/google_message1/proto3:dist_files",
|
||||||
"//benchmarks/datasets/google_message2:dist_files",
|
"//benchmarks/datasets/google_message2:dist_files",
|
||||||
|
@ -151,7 +151,7 @@ public class ProtoCaliperBenchmark {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("IgnoredPureGetter")
|
@SuppressWarnings({"IgnoredPureGetter", "CheckReturnValue"})
|
||||||
@Benchmark
|
@Benchmark
|
||||||
void serializeToByteArray(int reps) throws IOException {
|
void serializeToByteArray(int reps) throws IOException {
|
||||||
if (sampleMessageList.size() == 0) {
|
if (sampleMessageList.size() == 0) {
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
# C++ compile/link options for Protobuf.
|
"""C++ compile/link options for Protobuf libraries."""
|
||||||
|
|
||||||
COPTS = select({
|
COPTS = select({
|
||||||
"//build_defs:config_msvc": [
|
"//build_defs:config_msvc": [
|
||||||
|
@ -17,7 +17,7 @@ AC_PREREQ(2.59)
|
|||||||
# In the SVN trunk, the version should always be the next anticipated release
|
# In the SVN trunk, the version should always be the next anticipated release
|
||||||
# version with the "-pre" suffix. (We used to use "-SNAPSHOT" but this pushed
|
# version with the "-pre" suffix. (We used to use "-SNAPSHOT" but this pushed
|
||||||
# the size of one file name in the dist tarfile over the 99-char limit.)
|
# the size of one file name in the dist tarfile over the 99-char limit.)
|
||||||
AC_INIT([Protocol Buffers],[3.21.0-rc-1],[protobuf@googlegroups.com],[protobuf])
|
AC_INIT([Protocol Buffers],[3.21.0-rc-2],[protobuf@googlegroups.com],[protobuf])
|
||||||
|
|
||||||
AM_MAINTAINER_MODE([enable])
|
AM_MAINTAINER_MODE([enable])
|
||||||
|
|
||||||
|
@ -1,6 +1,22 @@
|
|||||||
# PLEASE DO NOT DEPEND ON THE CONTENTS OF THIS FILE, IT IS UNSTABLE.
|
"""Starlark definitions for Protobuf conformance tests.
|
||||||
|
|
||||||
def conformance_test(name, testee, failure_list = None, text_format_failure_list = None):
|
PLEASE DO NOT DEPEND ON THE CONTENTS OF THIS FILE, IT IS UNSTABLE.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def conformance_test(
|
||||||
|
name,
|
||||||
|
testee,
|
||||||
|
failure_list = None,
|
||||||
|
text_format_failure_list = None):
|
||||||
|
"""Conformance test runner.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: the name for the test.
|
||||||
|
testee: a conformance test client binary.
|
||||||
|
failure_list: a text file with known failures, one per line.
|
||||||
|
text_format_failure_list: a text file with known failures (one per line)
|
||||||
|
for the text format conformance suite.
|
||||||
|
"""
|
||||||
args = ["--testee %s" % _strip_bazel(testee)]
|
args = ["--testee %s" % _strip_bazel(testee)]
|
||||||
failure_lists = []
|
failure_lists = []
|
||||||
if failure_list:
|
if failure_list:
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
<title>Google Protocol Buffers tools</title>
|
<title>Google Protocol Buffers tools</title>
|
||||||
<summary>Tools for Protocol Buffers - Google's data interchange format.</summary>
|
<summary>Tools for Protocol Buffers - Google's data interchange format.</summary>
|
||||||
<description>See project site for more info.</description>
|
<description>See project site for more info.</description>
|
||||||
<version>3.21.0-rc1</version>
|
<version>3.21.0-rc2</version>
|
||||||
<authors>Google Inc.</authors>
|
<authors>Google Inc.</authors>
|
||||||
<owners>protobuf-packages</owners>
|
<owners>protobuf-packages</owners>
|
||||||
<licenseUrl>https://github.com/protocolbuffers/protobuf/blob/main/LICENSE</licenseUrl>
|
<licenseUrl>https://github.com/protocolbuffers/protobuf/blob/main/LICENSE</licenseUrl>
|
||||||
|
Binary file not shown.
@ -4,7 +4,7 @@
|
|||||||
<Description>C# runtime library for Protocol Buffers - Google's data interchange format.</Description>
|
<Description>C# runtime library for Protocol Buffers - Google's data interchange format.</Description>
|
||||||
<Copyright>Copyright 2015, Google Inc.</Copyright>
|
<Copyright>Copyright 2015, Google Inc.</Copyright>
|
||||||
<AssemblyTitle>Google Protocol Buffers</AssemblyTitle>
|
<AssemblyTitle>Google Protocol Buffers</AssemblyTitle>
|
||||||
<VersionPrefix>3.21.0-rc1</VersionPrefix>
|
<VersionPrefix>3.21.0-rc2</VersionPrefix>
|
||||||
<!-- C# 7.2 is required for Span/BufferWriter/ReadOnlySequence -->
|
<!-- C# 7.2 is required for Span/BufferWriter/ReadOnlySequence -->
|
||||||
<LangVersion>7.2</LangVersion>
|
<LangVersion>7.2</LangVersion>
|
||||||
<Authors>Google Inc.</Authors>
|
<Authors>Google Inc.</Authors>
|
||||||
|
@ -7098,11 +7098,8 @@ namespace Google.Protobuf.Reflection {
|
|||||||
/// check its required fields, regardless of whether or not the message has
|
/// check its required fields, regardless of whether or not the message has
|
||||||
/// been parsed.
|
/// been parsed.
|
||||||
///
|
///
|
||||||
/// As of 2021, lazy does no correctness checks on the byte stream during
|
/// As of May 2022, lazy verifies the contents of the byte stream during
|
||||||
/// parsing. This may lead to crashes if and when an invalid byte stream is
|
/// parsing. An invalid byte stream will cause the overall parsing to fail.
|
||||||
/// finally parsed upon access.
|
|
||||||
///
|
|
||||||
/// TODO(b/211906113): Enable validation on lazy fields.
|
|
||||||
/// </summary>
|
/// </summary>
|
||||||
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
|
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
|
||||||
[global::System.CodeDom.Compiler.GeneratedCode("protoc", null)]
|
[global::System.CodeDom.Compiler.GeneratedCode("protoc", null)]
|
||||||
|
@ -324,3 +324,7 @@ with info about your project (name and website) so we can add an entry for you.
|
|||||||
1. Protonium
|
1. Protonium
|
||||||
* Website: https://github.com/zyp/protonium
|
* Website: https://github.com/zyp/protonium
|
||||||
* Extension: 1146
|
* Extension: 1146
|
||||||
|
|
||||||
|
1. Protoc-gen-xo
|
||||||
|
* Website: https://github.com/xo/ecosystem
|
||||||
|
* Extension: 1147
|
||||||
|
@ -23,7 +23,7 @@ If you are using Maven, use the following:
|
|||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.google.protobuf</groupId>
|
<groupId>com.google.protobuf</groupId>
|
||||||
<artifactId>protobuf-java</artifactId>
|
<artifactId>protobuf-java</artifactId>
|
||||||
<version>3.21.0-rc-1</version>
|
<version>3.21.0-rc-2</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
```
|
```
|
||||||
|
|
||||||
@ -37,7 +37,7 @@ protobuf-java-util package:
|
|||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.google.protobuf</groupId>
|
<groupId>com.google.protobuf</groupId>
|
||||||
<artifactId>protobuf-java-util</artifactId>
|
<artifactId>protobuf-java-util</artifactId>
|
||||||
<version>3.21.0-rc-1</version>
|
<version>3.21.0-rc-2</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
```
|
```
|
||||||
|
|
||||||
@ -45,7 +45,7 @@ protobuf-java-util package:
|
|||||||
|
|
||||||
If you are using Gradle, add the following to your `build.gradle` file's dependencies:
|
If you are using Gradle, add the following to your `build.gradle` file's dependencies:
|
||||||
```
|
```
|
||||||
implementation 'com.google.protobuf:protobuf-java:3.21.0-rc-1'
|
implementation 'com.google.protobuf:protobuf-java:3.21.0-rc-2'
|
||||||
```
|
```
|
||||||
Again, be sure to check that the version number matches (or is newer than) the version number of protoc that you are using.
|
Again, be sure to check that the version number matches (or is newer than) the version number of protoc that you are using.
|
||||||
|
|
||||||
|
@ -4,7 +4,7 @@
|
|||||||
|
|
||||||
<groupId>com.google.protobuf</groupId>
|
<groupId>com.google.protobuf</groupId>
|
||||||
<artifactId>protobuf-bom</artifactId>
|
<artifactId>protobuf-bom</artifactId>
|
||||||
<version>3.21.0-rc-1</version>
|
<version>3.21.0-rc-2</version>
|
||||||
<packaging>pom</packaging>
|
<packaging>pom</packaging>
|
||||||
|
|
||||||
<name>Protocol Buffers [BOM]</name>
|
<name>Protocol Buffers [BOM]</name>
|
||||||
|
@ -4,7 +4,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>com.google.protobuf</groupId>
|
<groupId>com.google.protobuf</groupId>
|
||||||
<artifactId>protobuf-parent</artifactId>
|
<artifactId>protobuf-parent</artifactId>
|
||||||
<version>3.21.0-rc-1</version>
|
<version>3.21.0-rc-2</version>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
<artifactId>protobuf-java</artifactId>
|
<artifactId>protobuf-java</artifactId>
|
||||||
|
@ -59,6 +59,7 @@ import java.io.OutputStream;
|
|||||||
*
|
*
|
||||||
* @author kenton@google.com Kenton Varda
|
* @author kenton@google.com Kenton Varda
|
||||||
*/
|
*/
|
||||||
|
@CheckReturnValue
|
||||||
public interface MessageLite extends MessageLiteOrBuilder {
|
public interface MessageLite extends MessageLiteOrBuilder {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -138,6 +139,7 @@ public interface MessageLite extends MessageLiteOrBuilder {
|
|||||||
/** Abstract interface implemented by Protocol Message builders. */
|
/** Abstract interface implemented by Protocol Message builders. */
|
||||||
interface Builder extends MessageLiteOrBuilder, Cloneable {
|
interface Builder extends MessageLiteOrBuilder, Cloneable {
|
||||||
/** Resets all fields to their default values. */
|
/** Resets all fields to their default values. */
|
||||||
|
@CanIgnoreReturnValue
|
||||||
Builder clear();
|
Builder clear();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -181,11 +183,12 @@ public interface MessageLite extends MessageLiteOrBuilder {
|
|||||||
* <p>Note: The caller should call {@link CodedInputStream#checkLastTagWas(int)} after calling
|
* <p>Note: The caller should call {@link CodedInputStream#checkLastTagWas(int)} after calling
|
||||||
* this to verify that the last tag seen was the appropriate end-group tag, or zero for EOF.
|
* this to verify that the last tag seen was the appropriate end-group tag, or zero for EOF.
|
||||||
*
|
*
|
||||||
* @throws InvalidProtocolBufferException the bytes read are not syntactically correct
|
* @throws InvalidProtocolBufferException the bytes read are not syntactically correct according
|
||||||
* according to the protobuf wire format specification. The data is corrupt, incomplete,
|
* to the protobuf wire format specification. The data is corrupt, incomplete, or was never
|
||||||
* or was never a protobuf in the first place.
|
* a protobuf in the first place.
|
||||||
* @throws IOException an I/O error reading from the stream
|
* @throws IOException an I/O error reading from the stream
|
||||||
*/
|
*/
|
||||||
|
@CanIgnoreReturnValue
|
||||||
Builder mergeFrom(CodedInputStream input) throws IOException;
|
Builder mergeFrom(CodedInputStream input) throws IOException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -193,11 +196,12 @@ public interface MessageLite extends MessageLiteOrBuilder {
|
|||||||
* that you want to be able to parse must be registered in {@code extensionRegistry}. Extensions
|
* that you want to be able to parse must be registered in {@code extensionRegistry}. Extensions
|
||||||
* not in the registry will be treated as unknown fields.
|
* not in the registry will be treated as unknown fields.
|
||||||
*
|
*
|
||||||
* @throws InvalidProtocolBufferException the bytes read are not syntactically correct
|
* @throws InvalidProtocolBufferException the bytes read are not syntactically correct according
|
||||||
* according to the protobuf wire format specification. The data is corrupt, incomplete,
|
* to the protobuf wire format specification. The data is corrupt, incomplete, or was never
|
||||||
* or was never a protobuf in the first place.
|
* a protobuf in the first place.
|
||||||
* @throws IOException an I/O error reading from the stream
|
* @throws IOException an I/O error reading from the stream
|
||||||
*/
|
*/
|
||||||
|
@CanIgnoreReturnValue
|
||||||
Builder mergeFrom(CodedInputStream input, ExtensionRegistryLite extensionRegistry)
|
Builder mergeFrom(CodedInputStream input, ExtensionRegistryLite extensionRegistry)
|
||||||
throws IOException;
|
throws IOException;
|
||||||
|
|
||||||
@ -209,10 +213,11 @@ public interface MessageLite extends MessageLiteOrBuilder {
|
|||||||
* is just a small wrapper around {@link #mergeFrom(CodedInputStream)}.
|
* is just a small wrapper around {@link #mergeFrom(CodedInputStream)}.
|
||||||
*
|
*
|
||||||
* @throws InvalidProtocolBufferException the bytes in data are not syntactically correct
|
* @throws InvalidProtocolBufferException the bytes in data are not syntactically correct
|
||||||
* according to the protobuf wire format specification. The data is corrupt, incomplete,
|
* according to the protobuf wire format specification. The data is corrupt, incomplete, or
|
||||||
* or was never a protobuf in the first place.
|
* was never a protobuf in the first place.
|
||||||
* @return this
|
* @return this
|
||||||
*/
|
*/
|
||||||
|
@CanIgnoreReturnValue
|
||||||
Builder mergeFrom(ByteString data) throws InvalidProtocolBufferException;
|
Builder mergeFrom(ByteString data) throws InvalidProtocolBufferException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -220,10 +225,11 @@ public interface MessageLite extends MessageLiteOrBuilder {
|
|||||||
* is just a small wrapper around {@link #mergeFrom(CodedInputStream,ExtensionRegistryLite)}.
|
* is just a small wrapper around {@link #mergeFrom(CodedInputStream,ExtensionRegistryLite)}.
|
||||||
*
|
*
|
||||||
* @throws InvalidProtocolBufferException the bytes in data are not syntactically correct
|
* @throws InvalidProtocolBufferException the bytes in data are not syntactically correct
|
||||||
* according to the protobuf wire format specification. The data is corrupt, incomplete,
|
* according to the protobuf wire format specification. The data is corrupt, incomplete, or
|
||||||
* or was never a protobuf in the first place.
|
* was never a protobuf in the first place.
|
||||||
* @return this
|
* @return this
|
||||||
*/
|
*/
|
||||||
|
@CanIgnoreReturnValue
|
||||||
Builder mergeFrom(ByteString data, ExtensionRegistryLite extensionRegistry)
|
Builder mergeFrom(ByteString data, ExtensionRegistryLite extensionRegistry)
|
||||||
throws InvalidProtocolBufferException;
|
throws InvalidProtocolBufferException;
|
||||||
|
|
||||||
@ -232,10 +238,11 @@ public interface MessageLite extends MessageLiteOrBuilder {
|
|||||||
* is just a small wrapper around {@link #mergeFrom(CodedInputStream)}.
|
* is just a small wrapper around {@link #mergeFrom(CodedInputStream)}.
|
||||||
*
|
*
|
||||||
* @throws InvalidProtocolBufferException the bytes in data are not syntactically correct
|
* @throws InvalidProtocolBufferException the bytes in data are not syntactically correct
|
||||||
* according to the protobuf wire format specification. The data is corrupt, incomplete,
|
* according to the protobuf wire format specification. The data is corrupt, incomplete, or
|
||||||
* or was never a protobuf in the first place.
|
* was never a protobuf in the first place.
|
||||||
* @return this
|
* @return this
|
||||||
*/
|
*/
|
||||||
|
@CanIgnoreReturnValue
|
||||||
Builder mergeFrom(byte[] data) throws InvalidProtocolBufferException;
|
Builder mergeFrom(byte[] data) throws InvalidProtocolBufferException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -243,10 +250,11 @@ public interface MessageLite extends MessageLiteOrBuilder {
|
|||||||
* is just a small wrapper around {@link #mergeFrom(CodedInputStream)}.
|
* is just a small wrapper around {@link #mergeFrom(CodedInputStream)}.
|
||||||
*
|
*
|
||||||
* @throws InvalidProtocolBufferException the bytes in data are not syntactically correct
|
* @throws InvalidProtocolBufferException the bytes in data are not syntactically correct
|
||||||
* according to the protobuf wire format specification. The data is corrupt, incomplete,
|
* according to the protobuf wire format specification. The data is corrupt, incomplete, or
|
||||||
* or was never a protobuf in the first place.
|
* was never a protobuf in the first place.
|
||||||
* @return this
|
* @return this
|
||||||
*/
|
*/
|
||||||
|
@CanIgnoreReturnValue
|
||||||
Builder mergeFrom(byte[] data, int off, int len) throws InvalidProtocolBufferException;
|
Builder mergeFrom(byte[] data, int off, int len) throws InvalidProtocolBufferException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -254,10 +262,11 @@ public interface MessageLite extends MessageLiteOrBuilder {
|
|||||||
* is just a small wrapper around {@link #mergeFrom(CodedInputStream,ExtensionRegistryLite)}.
|
* is just a small wrapper around {@link #mergeFrom(CodedInputStream,ExtensionRegistryLite)}.
|
||||||
*
|
*
|
||||||
* @throws InvalidProtocolBufferException the bytes in data are not syntactically correct
|
* @throws InvalidProtocolBufferException the bytes in data are not syntactically correct
|
||||||
* according to the protobuf wire format specification. The data is corrupt, incomplete,
|
* according to the protobuf wire format specification. The data is corrupt, incomplete, or
|
||||||
* or was never a protobuf in the first place.
|
* was never a protobuf in the first place.
|
||||||
* @return this
|
* @return this
|
||||||
*/
|
*/
|
||||||
|
@CanIgnoreReturnValue
|
||||||
Builder mergeFrom(byte[] data, ExtensionRegistryLite extensionRegistry)
|
Builder mergeFrom(byte[] data, ExtensionRegistryLite extensionRegistry)
|
||||||
throws InvalidProtocolBufferException;
|
throws InvalidProtocolBufferException;
|
||||||
|
|
||||||
@ -266,10 +275,11 @@ public interface MessageLite extends MessageLiteOrBuilder {
|
|||||||
* is just a small wrapper around {@link #mergeFrom(CodedInputStream,ExtensionRegistryLite)}.
|
* is just a small wrapper around {@link #mergeFrom(CodedInputStream,ExtensionRegistryLite)}.
|
||||||
*
|
*
|
||||||
* @throws InvalidProtocolBufferException the bytes in data are not syntactically correct
|
* @throws InvalidProtocolBufferException the bytes in data are not syntactically correct
|
||||||
* according to the protobuf wire format specification. The data is corrupt, incomplete,
|
* according to the protobuf wire format specification. The data is corrupt, incomplete, or
|
||||||
* or was never a protobuf in the first place.
|
* was never a protobuf in the first place.
|
||||||
* @return this
|
* @return this
|
||||||
*/
|
*/
|
||||||
|
@CanIgnoreReturnValue
|
||||||
Builder mergeFrom(byte[] data, int off, int len, ExtensionRegistryLite extensionRegistry)
|
Builder mergeFrom(byte[] data, int off, int len, ExtensionRegistryLite extensionRegistry)
|
||||||
throws InvalidProtocolBufferException;
|
throws InvalidProtocolBufferException;
|
||||||
|
|
||||||
@ -283,12 +293,13 @@ public interface MessageLite extends MessageLiteOrBuilder {
|
|||||||
*
|
*
|
||||||
* <p>Despite usually reading the entire input, this does not close the stream.
|
* <p>Despite usually reading the entire input, this does not close the stream.
|
||||||
*
|
*
|
||||||
* @throws InvalidProtocolBufferException the bytes read are not syntactically correct
|
* @throws InvalidProtocolBufferException the bytes read are not syntactically correct according
|
||||||
* according to the protobuf wire format specification. The data is corrupt, incomplete,
|
* to the protobuf wire format specification. The data is corrupt, incomplete, or was never
|
||||||
* or was never a protobuf in the first place.
|
* a protobuf in the first place.
|
||||||
* @throws IOException an I/O error reading from the stream
|
* @throws IOException an I/O error reading from the stream
|
||||||
* @return this
|
* @return this
|
||||||
*/
|
*/
|
||||||
|
@CanIgnoreReturnValue
|
||||||
Builder mergeFrom(InputStream input) throws IOException;
|
Builder mergeFrom(InputStream input) throws IOException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -298,6 +309,7 @@ public interface MessageLite extends MessageLiteOrBuilder {
|
|||||||
*
|
*
|
||||||
* @return this
|
* @return this
|
||||||
*/
|
*/
|
||||||
|
@CanIgnoreReturnValue
|
||||||
Builder mergeFrom(InputStream input, ExtensionRegistryLite extensionRegistry)
|
Builder mergeFrom(InputStream input, ExtensionRegistryLite extensionRegistry)
|
||||||
throws IOException;
|
throws IOException;
|
||||||
|
|
||||||
@ -317,6 +329,7 @@ public interface MessageLite extends MessageLiteOrBuilder {
|
|||||||
*
|
*
|
||||||
* <p>This is equivalent to the {@code Message::MergeFrom} method in C++.
|
* <p>This is equivalent to the {@code Message::MergeFrom} method in C++.
|
||||||
*/
|
*/
|
||||||
|
@CanIgnoreReturnValue
|
||||||
Builder mergeFrom(MessageLite other);
|
Builder mergeFrom(MessageLite other);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -326,11 +339,12 @@ public interface MessageLite extends MessageLiteOrBuilder {
|
|||||||
*
|
*
|
||||||
* @return true if successful, or false if the stream is at EOF when the method starts. Any
|
* @return true if successful, or false if the stream is at EOF when the method starts. Any
|
||||||
* other error (including reaching EOF during parsing) causes an exception to be thrown.
|
* other error (including reaching EOF during parsing) causes an exception to be thrown.
|
||||||
* @throws InvalidProtocolBufferException the bytes read are not syntactically correct
|
* @throws InvalidProtocolBufferException the bytes read are not syntactically correct according
|
||||||
* according to the protobuf wire format specification. The data is corrupt, incomplete,
|
* to the protobuf wire format specification. The data is corrupt, incomplete, or was never
|
||||||
* or was never a protobuf in the first place.
|
* a protobuf in the first place.
|
||||||
* @throws IOException an I/O error reading from the stream
|
* @throws IOException an I/O error reading from the stream
|
||||||
*/
|
*/
|
||||||
|
@CanIgnoreReturnValue // TODO(kak): should this be @CheckReturnValue instead?
|
||||||
boolean mergeDelimitedFrom(InputStream input) throws IOException;
|
boolean mergeDelimitedFrom(InputStream input) throws IOException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -338,11 +352,12 @@ public interface MessageLite extends MessageLiteOrBuilder {
|
|||||||
*
|
*
|
||||||
* @return true if successful, or false if the stream is at EOF when the method starts. Any
|
* @return true if successful, or false if the stream is at EOF when the method starts. Any
|
||||||
* other error (including reaching EOF during parsing) causes an exception to be thrown.
|
* other error (including reaching EOF during parsing) causes an exception to be thrown.
|
||||||
* @throws InvalidProtocolBufferException the bytes read are not syntactically correct
|
* @throws InvalidProtocolBufferException the bytes read are not syntactically correct according
|
||||||
* according to the protobuf wire format specification. The data is corrupt, incomplete,
|
* to the protobuf wire format specification. The data is corrupt, incomplete, or was never
|
||||||
* or was never a protobuf in the first place.
|
* a protobuf in the first place.
|
||||||
* @throws IOException an I/O error reading from the stream
|
* @throws IOException an I/O error reading from the stream
|
||||||
*/
|
*/
|
||||||
|
@CanIgnoreReturnValue // TODO(kak): should this be @CheckReturnValue instead?
|
||||||
boolean mergeDelimitedFrom(InputStream input, ExtensionRegistryLite extensionRegistry)
|
boolean mergeDelimitedFrom(InputStream input, ExtensionRegistryLite extensionRegistry)
|
||||||
throws IOException;
|
throws IOException;
|
||||||
}
|
}
|
||||||
|
@ -36,6 +36,7 @@ package com.google.protobuf;
|
|||||||
*
|
*
|
||||||
* @author jonp@google.com (Jon Perlow)
|
* @author jonp@google.com (Jon Perlow)
|
||||||
*/
|
*/
|
||||||
|
@CheckReturnValue
|
||||||
public interface MessageLiteOrBuilder {
|
public interface MessageLiteOrBuilder {
|
||||||
/**
|
/**
|
||||||
* Get an instance of the type with no fields set. Because no fields are set, all getters for
|
* Get an instance of the type with no fields set. Because no fields are set, all getters for
|
||||||
|
@ -1529,7 +1529,7 @@ public class GeneratedMessageTest {
|
|||||||
assertThat(builder.getFooInt()).isEqualTo(123);
|
assertThat(builder.getFooInt()).isEqualTo(123);
|
||||||
TestOneof2 message = builder.buildPartial();
|
TestOneof2 message = builder.buildPartial();
|
||||||
assertThat(message.hasFooInt()).isTrue();
|
assertThat(message.hasFooInt()).isTrue();
|
||||||
assertThat(123).isEqualTo(message.getFooInt());
|
assertThat(message.getFooInt()).isEqualTo(123);
|
||||||
|
|
||||||
assertThat(builder.clearFooInt().hasFooInt()).isFalse();
|
assertThat(builder.clearFooInt().hasFooInt()).isFalse();
|
||||||
TestOneof2 message2 = builder.build();
|
TestOneof2 message2 = builder.build();
|
||||||
|
@ -235,6 +235,7 @@ import protobuf_unittest.UnittestProto.TestRequired;
|
|||||||
import protobuf_unittest.UnittestProto.TestUnpackedTypes;
|
import protobuf_unittest.UnittestProto.TestUnpackedTypes;
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.io.InputStream;
|
||||||
import java.io.RandomAccessFile;
|
import java.io.RandomAccessFile;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
@ -3842,7 +3843,11 @@ public final class TestUtil {
|
|||||||
|
|
||||||
private static ByteString readBytesFromResource(String name) {
|
private static ByteString readBytesFromResource(String name) {
|
||||||
try {
|
try {
|
||||||
return ByteString.readFrom(TestUtil.class.getResourceAsStream(name));
|
InputStream in = TestUtil.class.getResourceAsStream(name);
|
||||||
|
if (in == null) { //
|
||||||
|
throw new RuntimeException("Tests data file " + name + " is missing.");
|
||||||
|
}
|
||||||
|
return ByteString.readFrom(in);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
}
|
}
|
||||||
|
@ -4,7 +4,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>com.google.protobuf</groupId>
|
<groupId>com.google.protobuf</groupId>
|
||||||
<artifactId>protobuf-parent</artifactId>
|
<artifactId>protobuf-parent</artifactId>
|
||||||
<version>3.21.0-rc-1</version>
|
<version>3.21.0-rc-2</version>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
<artifactId>protobuf-kotlin-lite</artifactId>
|
<artifactId>protobuf-kotlin-lite</artifactId>
|
||||||
|
@ -4,7 +4,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>com.google.protobuf</groupId>
|
<groupId>com.google.protobuf</groupId>
|
||||||
<artifactId>protobuf-parent</artifactId>
|
<artifactId>protobuf-parent</artifactId>
|
||||||
<version>3.21.0-rc-1</version>
|
<version>3.21.0-rc-2</version>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
<artifactId>protobuf-kotlin</artifactId>
|
<artifactId>protobuf-kotlin</artifactId>
|
||||||
|
@ -29,7 +29,7 @@ protobuf Java Lite runtime. If you are using Maven, include the following:
|
|||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.google.protobuf</groupId>
|
<groupId>com.google.protobuf</groupId>
|
||||||
<artifactId>protobuf-javalite</artifactId>
|
<artifactId>protobuf-javalite</artifactId>
|
||||||
<version>3.21.0-rc-1</version>
|
<version>3.21.0-rc-2</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -4,7 +4,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>com.google.protobuf</groupId>
|
<groupId>com.google.protobuf</groupId>
|
||||||
<artifactId>protobuf-parent</artifactId>
|
<artifactId>protobuf-parent</artifactId>
|
||||||
<version>3.21.0-rc-1</version>
|
<version>3.21.0-rc-2</version>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
<artifactId>protobuf-javalite</artifactId>
|
<artifactId>protobuf-javalite</artifactId>
|
||||||
|
@ -4,7 +4,7 @@
|
|||||||
|
|
||||||
<groupId>com.google.protobuf</groupId>
|
<groupId>com.google.protobuf</groupId>
|
||||||
<artifactId>protobuf-parent</artifactId>
|
<artifactId>protobuf-parent</artifactId>
|
||||||
<version>3.21.0-rc-1</version>
|
<version>3.21.0-rc-2</version>
|
||||||
<packaging>pom</packaging>
|
<packaging>pom</packaging>
|
||||||
|
|
||||||
<name>Protocol Buffers [Parent]</name>
|
<name>Protocol Buffers [Parent]</name>
|
||||||
|
@ -4,7 +4,7 @@
|
|||||||
<parent>
|
<parent>
|
||||||
<groupId>com.google.protobuf</groupId>
|
<groupId>com.google.protobuf</groupId>
|
||||||
<artifactId>protobuf-parent</artifactId>
|
<artifactId>protobuf-parent</artifactId>
|
||||||
<version>3.21.0-rc-1</version>
|
<version>3.21.0-rc-2</version>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
<artifactId>protobuf-java-util</artifactId>
|
<artifactId>protobuf-java-util</artifactId>
|
||||||
|
71
kokoro/common/bazel_wrapper.sh
Executable file
71
kokoro/common/bazel_wrapper.sh
Executable file
@ -0,0 +1,71 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Wrapper for invoking bazel on Kokoro.
|
||||||
|
#
|
||||||
|
# This script adds extra flags to a bazel invocation when it is run from Kokoro.
|
||||||
|
# When the special environment variables are not present (e.g., if you run
|
||||||
|
# Kokoro build scripts locally), this script is equivalent to the "bazel"
|
||||||
|
# command.
|
||||||
|
#
|
||||||
|
# Example of running directly:
|
||||||
|
# path/to/bazel_wrapper.sh build //...
|
||||||
|
#
|
||||||
|
# Example of `source`ing:
|
||||||
|
# source path/to/bazel_wrapper.sh
|
||||||
|
# bazel_wrapper build //...
|
||||||
|
|
||||||
|
function bazel_wrapper::gen_invocation_id() {
|
||||||
|
# Create a new invocation ID and store in the artifacts dir.
|
||||||
|
local _invocation_id=$(uuidgen | tr A-Z a-z)
|
||||||
|
|
||||||
|
# Put the new invocation ID at the start of the output IDs file. Some
|
||||||
|
# Google-internal tools only look at the first entry, so this ensures the most
|
||||||
|
# recent entry is first.
|
||||||
|
local _ids_file=${KOKORO_ARTIFACTS_DIR}/bazel_invocation_ids
|
||||||
|
local _temp_ids=$(mktemp)
|
||||||
|
echo ${_invocation_id} > ${_temp_ids}
|
||||||
|
[[ -e ${_ids_file} ]] && cat ${_ids_file} >> ${_temp_ids}
|
||||||
|
mv -f ${_temp_ids} ${_ids_file}
|
||||||
|
|
||||||
|
echo -n ${_invocation_id}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Prints flags to use on Kokoro.
|
||||||
|
function bazel_wrapper::kokoro_flags() {
|
||||||
|
[[ -n ${KOKORO_BES_PROJECT_ID:-} ]] || return
|
||||||
|
|
||||||
|
local -a _flags
|
||||||
|
_flags+=(
|
||||||
|
--bes_backend=${KOKORO_BES_BACKEND_ADDRESS:-buildeventservice.googleapis.com}
|
||||||
|
--bes_results_url=https://source.cloud.google.com/results/invocations/
|
||||||
|
--invocation_id=$(bazel_wrapper::gen_invocation_id)
|
||||||
|
--project_id=${KOKORO_BES_PROJECT_ID} # --bes_instance_name in Bazel 5+
|
||||||
|
--remote_cache=https://storage.googleapis.com/protobuf-bazel-cache
|
||||||
|
)
|
||||||
|
if [[ -n ${KOKORO_BAZEL_AUTH_CREDENTIAL:-} ]]; then
|
||||||
|
_flags+=( --google_credentials=${KOKORO_BAZEL_AUTH_CREDENTIAL} )
|
||||||
|
else
|
||||||
|
_flags+=( --google_default_credentials=true )
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "${_flags[@]}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Runs bazel with Kokoro flags, if appropriate.
|
||||||
|
function bazel_wrapper() {
|
||||||
|
local -a _flags
|
||||||
|
|
||||||
|
# We might need to add flags. They need to come after any startup flags and
|
||||||
|
# the command, but before any terminating "--", so copy them into the _flags
|
||||||
|
# variable.
|
||||||
|
until (( ${#@} == 0 )) || [[ $1 == "--" ]]; do
|
||||||
|
_flags+=( "${1}" ); shift
|
||||||
|
done
|
||||||
|
|
||||||
|
# Set the `BAZEL` env variable to override the actual bazel binary to use:
|
||||||
|
${BAZEL:=bazel} "${_flags[@]}" $(bazel_wrapper::kokoro_flags) "${@}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# If this script was called directly, run bazel. Otherwise (i.e., this script
|
||||||
|
# was `source`d), the sourcing script will call bazel_wrapper themselves.
|
||||||
|
(( ${#BASH_SOURCE[@]} == 1 )) && bazel_wrapper "${@}"
|
16
kokoro/common/pyenv.sh
Normal file
16
kokoro/common/pyenv.sh
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
# Shared logic to choose a Python version with pyenv.
|
||||||
|
#
|
||||||
|
# This file should be `source`d.
|
||||||
|
|
||||||
|
# Requested version of Python can be overridden by env variable.
|
||||||
|
: ${PYTHON_VERSION:=3.9.5}
|
||||||
|
|
||||||
|
if pyenv --version >/dev/null ; then
|
||||||
|
eval "$(pyenv init -)"
|
||||||
|
if ! pyenv global ${PYTHON_VERSION}; then
|
||||||
|
echo "Python ${PYTHON_VERSION} is not available. Versions available:" >&2
|
||||||
|
pyenv versions >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
echo "Using $(python --version || python3 --version)"
|
@ -5,12 +5,9 @@
|
|||||||
|
|
||||||
set -ex
|
set -ex
|
||||||
|
|
||||||
mkdir -p cmake/crossbuild_aarch64
|
|
||||||
cd cmake/crossbuild_aarch64
|
|
||||||
|
|
||||||
# the build commands are expected to run under dockcross docker image
|
# the build commands are expected to run under dockcross docker image
|
||||||
# where the CC, CXX and other toolchain variables already point to the crosscompiler
|
# where the CC, CXX and other toolchain variables already point to the crosscompiler
|
||||||
cmake ..
|
cmake .
|
||||||
make -j8
|
make -j8
|
||||||
|
|
||||||
# check that the resulting test binary is indeed an aarch64 ELF
|
# check that the resulting test binary is indeed an aarch64 ELF
|
||||||
|
@ -13,7 +13,11 @@ else
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
# Pin the dockcross image since newer versions of the image break the build
|
# Pin the dockcross image since newer versions of the image break the build
|
||||||
PINNED_DOCKCROSS_IMAGE_VERSION=dockcross/manylinux2014-aarch64:20210803-41e5c69
|
# We use an older version of dockcross image that has gcc4.9.4 because it was built
|
||||||
|
# before https://github.com/dockcross/dockcross/pull/449
|
||||||
|
# Thanks to that, wheel build with this image aren't actually
|
||||||
|
# compliant with manylinux2014, but only with manylinux_2_24
|
||||||
|
PINNED_DOCKCROSS_IMAGE_VERSION=dockcross/manylinux2014-aarch64:20200929-608e6ac
|
||||||
|
|
||||||
# running dockcross image without any arguments generates a wrapper
|
# running dockcross image without any arguments generates a wrapper
|
||||||
# scripts that can be used to run commands under the dockcross image
|
# scripts that can be used to run commands under the dockcross image
|
||||||
|
@ -4,6 +4,5 @@
|
|||||||
|
|
||||||
set -ex
|
set -ex
|
||||||
|
|
||||||
./autogen.sh
|
cmake -DCMAKE_POSITION_INDEPENDENT_CODE=ON -Dprotobuf_WITH_ZLIB=0 .
|
||||||
CXXFLAGS="-fPIC -g -O2" ./configure --host=aarch64
|
|
||||||
make -j8
|
make -j8
|
||||||
|
@ -8,13 +8,20 @@ set -ex
|
|||||||
|
|
||||||
PYTHON="/opt/python/cp38-cp38/bin/python"
|
PYTHON="/opt/python/cp38-cp38/bin/python"
|
||||||
|
|
||||||
./autogen.sh
|
# Initialize any submodules.
|
||||||
CXXFLAGS="-fPIC -g -O2" ./configure --host=aarch64
|
git submodule update --init --recursive
|
||||||
|
|
||||||
|
# Build protoc and libprotobuf
|
||||||
|
cmake -DCMAKE_POSITION_INDEPENDENT_CODE=ON -Dprotobuf_WITH_ZLIB=0 .
|
||||||
make -j8
|
make -j8
|
||||||
|
|
||||||
|
# Copy lib files to the expected location.
|
||||||
|
mkdir -p src/.libs
|
||||||
|
ln -f *.a src/.libs/
|
||||||
|
|
||||||
# create a simple shell wrapper that runs crosscompiled protoc under qemu
|
# create a simple shell wrapper that runs crosscompiled protoc under qemu
|
||||||
echo '#!/bin/bash' >protoc_qemu_wrapper.sh
|
echo '#!/bin/bash' >protoc_qemu_wrapper.sh
|
||||||
echo 'exec qemu-aarch64 "../src/protoc" "$@"' >>protoc_qemu_wrapper.sh
|
echo 'exec qemu-aarch64 "../protoc" "$@"' >>protoc_qemu_wrapper.sh
|
||||||
chmod ugo+x protoc_qemu_wrapper.sh
|
chmod ugo+x protoc_qemu_wrapper.sh
|
||||||
|
|
||||||
# PROTOC variable is by build_py step that runs under ./python directory
|
# PROTOC variable is by build_py step that runs under ./python directory
|
||||||
|
@ -16,7 +16,7 @@ ${PYTHON} -m pip install --user pytest auditwheel
|
|||||||
# we've built the python extension previously with --inplace option
|
# we've built the python extension previously with --inplace option
|
||||||
# so we can just discover all the unittests and run them directly under
|
# so we can just discover all the unittests and run them directly under
|
||||||
# the python/ directory.
|
# the python/ directory.
|
||||||
LD_LIBRARY_PATH=../src/.libs PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=cpp ${PYTHON} -m pytest google/protobuf
|
LD_LIBRARY_PATH=. PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=cpp ${PYTHON} -m pytest google/protobuf
|
||||||
|
|
||||||
# step 2: run auditwheel show to check that the wheel is manylinux2014 compatible.
|
# step 2: run auditwheel show to check that the wheel is manylinux2014 compatible.
|
||||||
# auditwheel needs to run on wheel's target platform (or under an emulator)
|
# auditwheel needs to run on wheel's target platform (or under an emulator)
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
#
|
#
|
||||||
# Build file to set up and run tests
|
# Build file to set up and run tests
|
||||||
set -ex
|
set -eu
|
||||||
|
|
||||||
# Install Bazel 4.0.0.
|
# Install Bazel 4.0.0.
|
||||||
use_bazel.sh 4.0.0
|
use_bazel.sh 4.0.0
|
||||||
@ -10,22 +10,35 @@ bazel version
|
|||||||
# Change to repo root
|
# Change to repo root
|
||||||
cd $(dirname $0)/../../..
|
cd $(dirname $0)/../../..
|
||||||
|
|
||||||
git submodule update --init --recursive
|
# Get kokoro scripts from repo root by default.
|
||||||
|
: ${SCRIPT_ROOT:=$(pwd)}
|
||||||
|
source ${SCRIPT_ROOT}/kokoro/common/pyenv.sh
|
||||||
|
|
||||||
# Disabled for now, re-enable if appropriate.
|
# Disabled for now, re-enable if appropriate.
|
||||||
# //:build_files_updated_unittest \
|
# //:build_files_updated_unittest \
|
||||||
|
|
||||||
bazel test \
|
bazel_args=(
|
||||||
-k --copt=-Werror --host_copt=-Werror --test_output=errors \
|
test
|
||||||
//build_defs:all \
|
--keep_going
|
||||||
//java:tests \
|
--copt=-Werror
|
||||||
//src/... \
|
--host_copt=-Werror
|
||||||
//:protobuf_python \
|
--test_output=errors
|
||||||
|
--
|
||||||
|
//...
|
||||||
|
-//objectivec/... # only works on macOS
|
||||||
@com_google_protobuf_examples//...
|
@com_google_protobuf_examples//...
|
||||||
|
)
|
||||||
|
|
||||||
|
${SCRIPT_ROOT}/kokoro/common/bazel_wrapper.sh "${bazel_args[@]}"
|
||||||
|
|
||||||
# Verify that we can build successfully from generated tar files.
|
# Verify that we can build successfully from generated tar files.
|
||||||
./autogen.sh && ./configure && make -j$(nproc) dist
|
(
|
||||||
|
pyenv versions
|
||||||
|
pyenv shell 2.7.9 # python2 required for old googletest autotools support
|
||||||
|
git submodule update --init --recursive
|
||||||
|
./autogen.sh && ./configure && make -j$(nproc) dist
|
||||||
|
)
|
||||||
DIST=`ls *.tar.gz`
|
DIST=`ls *.tar.gz`
|
||||||
tar -xf $DIST
|
tar -xf $DIST
|
||||||
cd ${DIST//.tar.gz}
|
cd ${DIST//.tar.gz}
|
||||||
bazel build //:protobuf //:protobuf_java
|
${SCRIPT_ROOT}/kokoro/common/bazel_wrapper.sh build //:protobuf //:protobuf_java
|
||||||
|
9
kokoro/linux/bazel/common.cfg
Normal file
9
kokoro/linux/bazel/common.cfg
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
# Common config shared by presubmit and continuous.
|
||||||
|
|
||||||
|
bazel_setting: {
|
||||||
|
project_id: "protobuf-build"
|
||||||
|
bes_backend_address: "buildeventservice.googleapis.com"
|
||||||
|
foundry_backend_address: "remotebuildexecution.googleapis.com"
|
||||||
|
upsalite_frontend_address: "https://source.cloud.google.com"
|
||||||
|
local_execution: true
|
||||||
|
}
|
@ -5,26 +5,7 @@
|
|||||||
# Note that the builds use WORKSPACE to fetch external sources, not
|
# Note that the builds use WORKSPACE to fetch external sources, not
|
||||||
# git submodules.
|
# git submodules.
|
||||||
|
|
||||||
set -eux
|
set -eu
|
||||||
|
|
||||||
BUILD_ONLY_TARGETS=(
|
|
||||||
//pkg:all
|
|
||||||
//:protoc
|
|
||||||
//:protobuf
|
|
||||||
//:protobuf_python
|
|
||||||
)
|
|
||||||
|
|
||||||
TEST_TARGETS=(
|
|
||||||
//build_defs:all
|
|
||||||
//conformance:all
|
|
||||||
//java:tests
|
|
||||||
//python:all
|
|
||||||
//src/...
|
|
||||||
@com_google_protobuf_examples//...
|
|
||||||
)
|
|
||||||
|
|
||||||
CONTAINER_NAME=gcr.io/protobuf-build/bazel/linux
|
|
||||||
CONTAINER_VERSION=5.1.1-e41ccfa1648716433276ebe077c665796550fcbb
|
|
||||||
|
|
||||||
use_bazel.sh 5.0.0 || true
|
use_bazel.sh 5.0.0 || true
|
||||||
bazel version
|
bazel version
|
||||||
@ -32,58 +13,48 @@ bazel version
|
|||||||
# Change to repo root
|
# Change to repo root
|
||||||
cd $(dirname $0)/../../..
|
cd $(dirname $0)/../../..
|
||||||
|
|
||||||
|
# Get kokoro scripts from repo root by default.
|
||||||
|
: ${SCRIPT_ROOT:=$(pwd)}
|
||||||
|
source ${SCRIPT_ROOT}/kokoro/common/pyenv.sh
|
||||||
|
|
||||||
|
# Build distribution archive
|
||||||
|
echo "============================================================"
|
||||||
|
echo -e "[[ $(date) ]] Building distribution archive...\n"
|
||||||
|
${SCRIPT_ROOT}/kokoro/common/bazel_wrapper.sh build //pkg:dist_all_tar
|
||||||
|
DIST_ARCHIVE=$(readlink $(bazel info bazel-bin)/pkg/dist_all_tar.tar.gz)
|
||||||
|
bazel shutdown
|
||||||
|
|
||||||
|
# Extract the dist archive.
|
||||||
|
echo "============================================================"
|
||||||
|
echo -e "[[ $(date) ]] Extracting distribution archive...\n"
|
||||||
|
|
||||||
# Construct temp directory for running the dist build.
|
# Construct temp directory for running the dist build.
|
||||||
# If you want to run locally and keep the build dir, create a directory
|
# If you want to run locally and keep the build dir, create a directory
|
||||||
# and pass it in the DIST_WORK_ROOT env var.
|
# and pass it in the DIST_WORK_ROOT env var.
|
||||||
if [[ -z ${DIST_WORK_ROOT:-} ]]; then
|
if [[ -z ${DIST_WORK_ROOT:-} ]]; then
|
||||||
: ${DIST_WORK_ROOT:=$(mktemp -d)}
|
: ${DIST_WORK_ROOT:=$(mktemp -d)}
|
||||||
function dist_cleanup() {
|
function dist_cleanup() {
|
||||||
rm -rf ${DIST_WORK_ROOT}
|
(( $BASH_SUBSHELL == 0 )) && rm -rf ${DIST_WORK_ROOT}
|
||||||
}
|
}
|
||||||
trap dist_cleanup EXIT
|
trap dist_cleanup EXIT
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Let Bazel share the distdir.
|
|
||||||
TMP_DISTDIR=${DIST_WORK_ROOT}/bazel-distdir
|
|
||||||
mkdir -p ${TMP_DISTDIR}
|
|
||||||
|
|
||||||
# Build distribution archive
|
|
||||||
date
|
|
||||||
bazel fetch --distdir=${TMP_DISTDIR} //pkg:dist_all_tar
|
|
||||||
bazel build --distdir=${TMP_DISTDIR} //pkg:dist_all_tar
|
|
||||||
DIST_ARCHIVE=$(readlink $(bazel info bazel-bin)/pkg/dist_all_tar.tar.gz)
|
|
||||||
bazel shutdown
|
|
||||||
|
|
||||||
# The `pkg_tar` rule emits a symlink based on the rule name. The actual
|
|
||||||
# file is named with the current version.
|
|
||||||
date
|
|
||||||
echo "Resolved archive path: ${DIST_ARCHIVE}"
|
|
||||||
|
|
||||||
# Extract the dist archive.
|
|
||||||
date
|
|
||||||
DIST_WORKSPACE=${DIST_WORK_ROOT}/protobuf
|
DIST_WORKSPACE=${DIST_WORK_ROOT}/protobuf
|
||||||
mkdir -p ${DIST_WORKSPACE}
|
mkdir -p ${DIST_WORKSPACE}
|
||||||
tar -C ${DIST_WORKSPACE} --strip-components=1 -axf ${DIST_ARCHIVE}
|
tar -C ${DIST_WORKSPACE} --strip-components=1 -axf bazel-bin/pkg/dist_all_tar.tar.gz
|
||||||
|
|
||||||
# Perform build steps in the extracted dist sources.
|
echo "============================================================"
|
||||||
|
echo -e "[[ $(date) ]] Building extracted archive...\n"
|
||||||
|
|
||||||
cd ${DIST_WORKSPACE}
|
cd ${DIST_WORKSPACE}
|
||||||
FAILED=false
|
|
||||||
|
|
||||||
until docker pull gcr.io/protobuf-build/bazel/linux:${CONTAINER_VERSION}; do
|
bazel_args=(
|
||||||
sleep 10
|
test
|
||||||
done
|
--keep_going
|
||||||
|
--test_output=errors
|
||||||
date
|
--
|
||||||
docker run --rm \
|
//...
|
||||||
-v ${DIST_WORKSPACE}:/workspace \
|
-//objectivec/... # only works on macOS
|
||||||
-v ${TMP_DISTDIR}:${TMP_DISTDIR} \
|
@com_google_protobuf_examples//...
|
||||||
${CONTAINER_NAME}:${CONTAINER_VERSION} \
|
)
|
||||||
test --distdir=${TMP_DISTDIR} --test_output=errors -k \
|
${SCRIPT_ROOT}/kokoro/common/bazel_wrapper.sh "${bazel_args[@]}"
|
||||||
"${BUILD_ONLY_TARGETS[@]}" "${TEST_TARGETS[@]}" || FAILED=true
|
|
||||||
|
|
||||||
if ${FAILED}; then
|
|
||||||
echo FAILED
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
echo PASS
|
|
||||||
|
9
kokoro/linux/bazel_distcheck/common.cfg
Normal file
9
kokoro/linux/bazel_distcheck/common.cfg
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
# Common config shared by presubmit and continuous.
|
||||||
|
|
||||||
|
bazel_setting: {
|
||||||
|
project_id: "protobuf-build"
|
||||||
|
bes_backend_address: "buildeventservice.googleapis.com"
|
||||||
|
foundry_backend_address: "remotebuildexecution.googleapis.com"
|
||||||
|
upsalite_frontend_address: "https://source.cloud.google.com"
|
||||||
|
local_execution: true
|
||||||
|
}
|
@ -10,10 +10,10 @@
|
|||||||
<email>protobuf-opensource@google.com</email>
|
<email>protobuf-opensource@google.com</email>
|
||||||
<active>yes</active>
|
<active>yes</active>
|
||||||
</lead>
|
</lead>
|
||||||
<date>2022-05-10</date>
|
<date>2022-05-19</date>
|
||||||
<time>11:33:40</time>
|
<time>13:35:18</time>
|
||||||
<version>
|
<version>
|
||||||
<release>3.21.0RC1</release>
|
<release>3.21.0RC2</release>
|
||||||
<api>3.21.0</api>
|
<api>3.21.0</api>
|
||||||
</version>
|
</version>
|
||||||
<stability>
|
<stability>
|
||||||
@ -1298,5 +1298,20 @@ G A release.
|
|||||||
<notes>
|
<notes>
|
||||||
</notes>
|
</notes>
|
||||||
</release>
|
</release>
|
||||||
|
<release>
|
||||||
|
<version>
|
||||||
|
<release>3.21.0RC2</release>
|
||||||
|
<api>3.21.0</api>
|
||||||
|
</version>
|
||||||
|
<stability>
|
||||||
|
<release>beta</release>
|
||||||
|
<api>beta</api>
|
||||||
|
</stability>
|
||||||
|
<date>2022-05-19</date>
|
||||||
|
<time>13:35:18</time>
|
||||||
|
<license uri="https://opensource.org/licenses/BSD-3-Clause">BSD-3-Clause</license>
|
||||||
|
<notes>
|
||||||
|
</notes>
|
||||||
|
</release>
|
||||||
</changelog>
|
</changelog>
|
||||||
</package>
|
</package>
|
||||||
|
@ -127,7 +127,7 @@ ZEND_BEGIN_ARG_INFO_EX(arginfo_setter, 0, 0, 1)
|
|||||||
ZEND_ARG_INFO(0, value)
|
ZEND_ARG_INFO(0, value)
|
||||||
ZEND_END_ARG_INFO()
|
ZEND_END_ARG_INFO()
|
||||||
|
|
||||||
#define PHP_PROTOBUF_VERSION "3.21.0RC1"
|
#define PHP_PROTOBUF_VERSION "3.21.0RC2"
|
||||||
|
|
||||||
// ptr -> PHP object cache. This is a weak map that caches lazily-created
|
// ptr -> PHP object cache. This is a weak map that caches lazily-created
|
||||||
// wrapper objects around upb types:
|
// wrapper objects around upb types:
|
||||||
|
@ -74,10 +74,8 @@ class FieldOptions extends \Google\Protobuf\Internal\Message
|
|||||||
* implementation must either *always* check its required fields, or *never*
|
* implementation must either *always* check its required fields, or *never*
|
||||||
* check its required fields, regardless of whether or not the message has
|
* check its required fields, regardless of whether or not the message has
|
||||||
* been parsed.
|
* been parsed.
|
||||||
* As of 2021, lazy does no correctness checks on the byte stream during
|
* As of May 2022, lazy verifies the contents of the byte stream during
|
||||||
* parsing. This may lead to crashes if and when an invalid byte stream is
|
* parsing. An invalid byte stream will cause the overall parsing to fail.
|
||||||
* finally parsed upon access.
|
|
||||||
* TODO(b/211906113): Enable validation on lazy fields.
|
|
||||||
*
|
*
|
||||||
* Generated from protobuf field <code>optional bool lazy = 5 [default = false];</code>
|
* Generated from protobuf field <code>optional bool lazy = 5 [default = false];</code>
|
||||||
*/
|
*/
|
||||||
@ -165,10 +163,8 @@ class FieldOptions extends \Google\Protobuf\Internal\Message
|
|||||||
* implementation must either *always* check its required fields, or *never*
|
* implementation must either *always* check its required fields, or *never*
|
||||||
* check its required fields, regardless of whether or not the message has
|
* check its required fields, regardless of whether or not the message has
|
||||||
* been parsed.
|
* been parsed.
|
||||||
* As of 2021, lazy does no correctness checks on the byte stream during
|
* As of May 2022, lazy verifies the contents of the byte stream during
|
||||||
* parsing. This may lead to crashes if and when an invalid byte stream is
|
* parsing. An invalid byte stream will cause the overall parsing to fail.
|
||||||
* finally parsed upon access.
|
|
||||||
* TODO(b/211906113): Enable validation on lazy fields.
|
|
||||||
* @type bool $unverified_lazy
|
* @type bool $unverified_lazy
|
||||||
* unverified_lazy does no correctness checks on the byte stream. This should
|
* unverified_lazy does no correctness checks on the byte stream. This should
|
||||||
* only be used where lazy with verification is prohibitive for performance
|
* only be used where lazy with verification is prohibitive for performance
|
||||||
@ -354,10 +350,8 @@ class FieldOptions extends \Google\Protobuf\Internal\Message
|
|||||||
* implementation must either *always* check its required fields, or *never*
|
* implementation must either *always* check its required fields, or *never*
|
||||||
* check its required fields, regardless of whether or not the message has
|
* check its required fields, regardless of whether or not the message has
|
||||||
* been parsed.
|
* been parsed.
|
||||||
* As of 2021, lazy does no correctness checks on the byte stream during
|
* As of May 2022, lazy verifies the contents of the byte stream during
|
||||||
* parsing. This may lead to crashes if and when an invalid byte stream is
|
* parsing. An invalid byte stream will cause the overall parsing to fail.
|
||||||
* finally parsed upon access.
|
|
||||||
* TODO(b/211906113): Enable validation on lazy fields.
|
|
||||||
*
|
*
|
||||||
* Generated from protobuf field <code>optional bool lazy = 5 [default = false];</code>
|
* Generated from protobuf field <code>optional bool lazy = 5 [default = false];</code>
|
||||||
* @return bool
|
* @return bool
|
||||||
@ -402,10 +396,8 @@ class FieldOptions extends \Google\Protobuf\Internal\Message
|
|||||||
* implementation must either *always* check its required fields, or *never*
|
* implementation must either *always* check its required fields, or *never*
|
||||||
* check its required fields, regardless of whether or not the message has
|
* check its required fields, regardless of whether or not the message has
|
||||||
* been parsed.
|
* been parsed.
|
||||||
* As of 2021, lazy does no correctness checks on the byte stream during
|
* As of May 2022, lazy verifies the contents of the byte stream during
|
||||||
* parsing. This may lead to crashes if and when an invalid byte stream is
|
* parsing. An invalid byte stream will cause the overall parsing to fail.
|
||||||
* finally parsed upon access.
|
|
||||||
* TODO(b/211906113): Enable validation on lazy fields.
|
|
||||||
*
|
*
|
||||||
* Generated from protobuf field <code>optional bool lazy = 5 [default = false];</code>
|
* Generated from protobuf field <code>optional bool lazy = 5 [default = false];</code>
|
||||||
* @param bool $var
|
* @param bool $var
|
||||||
|
@ -16,17 +16,20 @@ package_naming(
|
|||||||
|
|
||||||
pkg_files(
|
pkg_files(
|
||||||
name = "wkt_protos_files",
|
name = "wkt_protos_files",
|
||||||
srcs = ["//src/google/protobuf:well_known_type_protos"],
|
srcs = [
|
||||||
|
"//:well_known_type_protos",
|
||||||
|
"//src/google/protobuf:descriptor_proto_srcs",
|
||||||
|
],
|
||||||
prefix = "include/google/protobuf",
|
prefix = "include/google/protobuf",
|
||||||
visibility = ["//visibility:private"],
|
visibility = ["//visibility:private"],
|
||||||
)
|
)
|
||||||
|
|
||||||
pkg_files(
|
pkg_files(
|
||||||
name = "descriptor_protos_files",
|
name = "compiler_plugin_protos_files",
|
||||||
srcs = [
|
srcs = [
|
||||||
"//:descriptor_proto",
|
"//src/google/protobuf/compiler:compiler_plugin_protos_files",
|
||||||
],
|
],
|
||||||
prefix = "include/google/protobuf",
|
prefix = "include/google/protobuf/compiler",
|
||||||
visibility = ["//visibility:private"],
|
visibility = ["//visibility:private"],
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -65,11 +68,10 @@ pkg_files(
|
|||||||
pkg_zip(
|
pkg_zip(
|
||||||
name = "protoc_release",
|
name = "protoc_release",
|
||||||
srcs = [
|
srcs = [
|
||||||
":descriptor_protos_files",
|
":compiler_plugin_protos_files",
|
||||||
":protoc_files",
|
":protoc_files",
|
||||||
":protoc_readme",
|
":protoc_readme",
|
||||||
":wkt_protos_files",
|
":wkt_protos_files",
|
||||||
"//src/google/protobuf/compiler:compiler_plugin_protos_files",
|
|
||||||
],
|
],
|
||||||
package_file_name = "protoc-{version}-{platform}.zip",
|
package_file_name = "protoc-{version}-{platform}.zip",
|
||||||
package_variables = ":protobuf_pkg_naming",
|
package_variables = ":protobuf_pkg_naming",
|
||||||
@ -303,9 +305,9 @@ gen_file_lists(
|
|||||||
out_stem = "src_file_lists",
|
out_stem = "src_file_lists",
|
||||||
src_libs = {
|
src_libs = {
|
||||||
# source rule: name in generated file
|
# source rule: name in generated file
|
||||||
"//:protobuf": "libprotobuf",
|
":protobuf": "libprotobuf",
|
||||||
"//src/google/protobuf/compiler:protoc_lib": "libprotoc",
|
":protoc": "libprotoc",
|
||||||
"//:protobuf_lite": "libprotobuf_lite",
|
":protobuf_lite": "libprotobuf_lite",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -341,8 +343,8 @@ cc_dist_library(
|
|||||||
}),
|
}),
|
||||||
tags = ["manual"],
|
tags = ["manual"],
|
||||||
deps = [
|
deps = [
|
||||||
"//:protobuf_lite",
|
|
||||||
"//src/google/protobuf:arena",
|
"//src/google/protobuf:arena",
|
||||||
|
"//src/google/protobuf:protobuf_lite",
|
||||||
"//src/google/protobuf/io",
|
"//src/google/protobuf/io",
|
||||||
"//src/google/protobuf/io:io_win32",
|
"//src/google/protobuf/io:io_win32",
|
||||||
"//src/google/protobuf/stubs:lite",
|
"//src/google/protobuf/stubs:lite",
|
||||||
@ -360,8 +362,6 @@ cc_dist_library(
|
|||||||
}),
|
}),
|
||||||
tags = ["manual"],
|
tags = ["manual"],
|
||||||
deps = [
|
deps = [
|
||||||
"//:protobuf",
|
|
||||||
"//:protobuf_lite",
|
|
||||||
"//src/google/protobuf:arena",
|
"//src/google/protobuf:arena",
|
||||||
"//src/google/protobuf/compiler:importer",
|
"//src/google/protobuf/compiler:importer",
|
||||||
"//src/google/protobuf/io",
|
"//src/google/protobuf/io",
|
||||||
@ -369,6 +369,8 @@ cc_dist_library(
|
|||||||
"//src/google/protobuf/io:io_win32",
|
"//src/google/protobuf/io:io_win32",
|
||||||
"//src/google/protobuf/io:printer",
|
"//src/google/protobuf/io:printer",
|
||||||
"//src/google/protobuf/io:tokenizer",
|
"//src/google/protobuf/io:tokenizer",
|
||||||
|
"//src/google/protobuf:protobuf",
|
||||||
|
"//src/google/protobuf:protobuf_lite",
|
||||||
"//src/google/protobuf/stubs",
|
"//src/google/protobuf/stubs",
|
||||||
"//src/google/protobuf/stubs:lite",
|
"//src/google/protobuf/stubs:lite",
|
||||||
"//src/google/protobuf/util:delimited_message_util",
|
"//src/google/protobuf/util:delimited_message_util",
|
||||||
@ -380,6 +382,22 @@ cc_dist_library(
|
|||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
cc_dist_library(
|
||||||
|
name = "protoc",
|
||||||
|
tags = ["manual"],
|
||||||
|
deps = [
|
||||||
|
"//src/google/protobuf/compiler:code_generator",
|
||||||
|
"//src/google/protobuf/compiler:command_line_interface",
|
||||||
|
"//src/google/protobuf/compiler/cpp",
|
||||||
|
"//src/google/protobuf/compiler/csharp",
|
||||||
|
"//src/google/protobuf/compiler/java",
|
||||||
|
"//src/google/protobuf/compiler/objectivec",
|
||||||
|
"//src/google/protobuf/compiler/php",
|
||||||
|
"//src/google/protobuf/compiler/python",
|
||||||
|
"//src/google/protobuf/compiler/ruby",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
################################################################################
|
################################################################################
|
||||||
# Distribution sources
|
# Distribution sources
|
||||||
################################################################################
|
################################################################################
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
# Starlark utilities for working with other build systems
|
# Starlark utilities for working with other build systems
|
||||||
|
|
||||||
load("@rules_pkg//:providers.bzl", "PackageFilegroupInfo", "PackageFilesInfo")
|
load("@rules_pkg//:providers.bzl", "PackageFilegroupInfo", "PackageFilesInfo")
|
||||||
|
load(":cc_dist_library.bzl", "CcFileList")
|
||||||
|
|
||||||
################################################################################
|
################################################################################
|
||||||
# Macro to create CMake and Automake source lists.
|
# Macro to create CMake and Automake source lists.
|
||||||
@ -31,21 +32,6 @@ def gen_file_lists(name, out_stem, **kwargs):
|
|||||||
# Aspect that extracts srcs, hdrs, etc.
|
# Aspect that extracts srcs, hdrs, etc.
|
||||||
################################################################################
|
################################################################################
|
||||||
|
|
||||||
CcFileList = provider(
|
|
||||||
doc = "List of files to be built into a library.",
|
|
||||||
fields = {
|
|
||||||
# As a rule of thumb, `hdrs` and `textual_hdrs` are the files that
|
|
||||||
# would be installed along with a prebuilt library.
|
|
||||||
"hdrs": "public header files, including those used by generated code",
|
|
||||||
"textual_hdrs": "files which are included but are not self-contained",
|
|
||||||
|
|
||||||
# The `internal_hdrs` are header files which appear in `srcs`.
|
|
||||||
# These are only used when compiling the library.
|
|
||||||
"internal_hdrs": "internal header files (only used to build .cc files)",
|
|
||||||
"srcs": "source files",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
ProtoFileList = provider(
|
ProtoFileList = provider(
|
||||||
doc = "List of proto files and generated code to be built into a library.",
|
doc = "List of proto files and generated code to be built into a library.",
|
||||||
fields = {
|
fields = {
|
||||||
@ -65,56 +51,11 @@ def _flatten_target_files(targets):
|
|||||||
files.append(tfile)
|
files.append(tfile)
|
||||||
return files
|
return files
|
||||||
|
|
||||||
def _combine_cc_file_lists(file_lists):
|
|
||||||
hdrs = {}
|
|
||||||
textual_hdrs = {}
|
|
||||||
internal_hdrs = {}
|
|
||||||
srcs = {}
|
|
||||||
for file_list in file_lists:
|
|
||||||
hdrs.update({f: 1 for f in file_list.hdrs})
|
|
||||||
textual_hdrs.update({f: 1 for f in file_list.textual_hdrs})
|
|
||||||
internal_hdrs.update({f: 1 for f in file_list.internal_hdrs})
|
|
||||||
srcs.update({f: 1 for f in file_list.srcs})
|
|
||||||
return CcFileList(
|
|
||||||
hdrs = sorted(hdrs.keys()),
|
|
||||||
textual_hdrs = sorted(textual_hdrs.keys()),
|
|
||||||
internal_hdrs = sorted(internal_hdrs.keys()),
|
|
||||||
srcs = sorted(srcs.keys()),
|
|
||||||
)
|
|
||||||
|
|
||||||
def _file_list_aspect_impl(target, ctx):
|
def _file_list_aspect_impl(target, ctx):
|
||||||
# We're going to reach directly into the attrs on the traversed rule.
|
# We're going to reach directly into the attrs on the traversed rule.
|
||||||
rule_attr = ctx.rule.attr
|
rule_attr = ctx.rule.attr
|
||||||
providers = []
|
providers = []
|
||||||
|
|
||||||
# Extract sources from a `cc_library` (or similar):
|
|
||||||
if CcInfo in target:
|
|
||||||
# CcInfo is a proxy for what we expect this rule to look like.
|
|
||||||
# However, some deps may expose `CcInfo` without having `srcs`,
|
|
||||||
# `hdrs`, etc., so we use `getattr` to handle that gracefully.
|
|
||||||
|
|
||||||
internal_hdrs = []
|
|
||||||
srcs = []
|
|
||||||
|
|
||||||
# Filter `srcs` so it only contains source files. Headers will go
|
|
||||||
# into `internal_headers`.
|
|
||||||
for src in _flatten_target_files(getattr(rule_attr, "srcs", [])):
|
|
||||||
if src.extension.lower() in ["c", "cc", "cpp", "cxx"]:
|
|
||||||
srcs.append(src)
|
|
||||||
else:
|
|
||||||
internal_hdrs.append(src)
|
|
||||||
|
|
||||||
providers.append(CcFileList(
|
|
||||||
hdrs = _flatten_target_files(getattr(rule_attr, "hdrs", [])),
|
|
||||||
textual_hdrs = _flatten_target_files(getattr(
|
|
||||||
rule_attr,
|
|
||||||
"textual_hdrs",
|
|
||||||
[],
|
|
||||||
)),
|
|
||||||
internal_hdrs = internal_hdrs,
|
|
||||||
srcs = srcs,
|
|
||||||
))
|
|
||||||
|
|
||||||
# Extract sources from a `proto_library`:
|
# Extract sources from a `proto_library`:
|
||||||
if ProtoInfo in target:
|
if ProtoInfo in target:
|
||||||
proto_srcs = []
|
proto_srcs = []
|
||||||
@ -178,7 +119,7 @@ Output is CcFileList and/or ProtoFileList. Example:
|
|||||||
# fragment generator function.
|
# fragment generator function.
|
||||||
################################################################################
|
################################################################################
|
||||||
|
|
||||||
def _create_file_list_impl(fragment_generator):
|
def _create_file_list_impl(ctx, fragment_generator):
|
||||||
# `fragment_generator` is a function like:
|
# `fragment_generator` is a function like:
|
||||||
# def fn(originating_rule: Label,
|
# def fn(originating_rule: Label,
|
||||||
# varname: str,
|
# varname: str,
|
||||||
@ -191,92 +132,98 @@ def _create_file_list_impl(fragment_generator):
|
|||||||
# When dealing with `File` objects, the `short_path` is used to strip
|
# When dealing with `File` objects, the `short_path` is used to strip
|
||||||
# the output prefix for generated files.
|
# the output prefix for generated files.
|
||||||
|
|
||||||
def _impl(ctx):
|
out = ctx.outputs.out
|
||||||
out = ctx.outputs.out
|
|
||||||
|
|
||||||
fragments = []
|
fragments = []
|
||||||
for srcrule, libname in ctx.attr.src_libs.items():
|
for srcrule, libname in ctx.attr.src_libs.items():
|
||||||
if CcFileList in srcrule:
|
if CcFileList in srcrule:
|
||||||
cc_file_list = srcrule[CcFileList]
|
cc_file_list = srcrule[CcFileList]
|
||||||
fragments.extend([
|
|
||||||
fragment_generator(
|
|
||||||
srcrule.label,
|
|
||||||
libname + "_srcs",
|
|
||||||
ctx.attr.source_prefix,
|
|
||||||
[f.short_path for f in cc_file_list.srcs],
|
|
||||||
),
|
|
||||||
fragment_generator(
|
|
||||||
srcrule.label,
|
|
||||||
libname + "_hdrs",
|
|
||||||
ctx.attr.source_prefix,
|
|
||||||
[f.short_path for f in (cc_file_list.hdrs +
|
|
||||||
cc_file_list.textual_hdrs)],
|
|
||||||
),
|
|
||||||
])
|
|
||||||
|
|
||||||
if ProtoFileList in srcrule:
|
# Turn depsets of files into sorted lists.
|
||||||
proto_file_list = srcrule[ProtoFileList]
|
srcs = sorted(cc_file_list.srcs.to_list())
|
||||||
fragments.extend([
|
hdrs = sorted(
|
||||||
fragment_generator(
|
depset(transitive = [
|
||||||
srcrule.label,
|
cc_file_list.textual_hdrs,
|
||||||
libname + "_proto_srcs",
|
cc_file_list.hdrs,
|
||||||
ctx.attr.source_prefix,
|
]).to_list(),
|
||||||
[f.short_path for f in proto_file_list.proto_srcs],
|
)
|
||||||
),
|
|
||||||
fragment_generator(
|
|
||||||
srcrule.label,
|
|
||||||
libname + "_srcs",
|
|
||||||
ctx.attr.source_prefix,
|
|
||||||
proto_file_list.srcs,
|
|
||||||
),
|
|
||||||
fragment_generator(
|
|
||||||
srcrule.label,
|
|
||||||
libname + "_hdrs",
|
|
||||||
ctx.attr.source_prefix,
|
|
||||||
proto_file_list.hdrs,
|
|
||||||
),
|
|
||||||
])
|
|
||||||
|
|
||||||
files = {}
|
fragments.extend([
|
||||||
|
fragment_generator(
|
||||||
|
srcrule.label,
|
||||||
|
libname + "_srcs",
|
||||||
|
ctx.attr.source_prefix,
|
||||||
|
[f.short_path for f in srcs],
|
||||||
|
),
|
||||||
|
fragment_generator(
|
||||||
|
srcrule.label,
|
||||||
|
libname + "_hdrs",
|
||||||
|
ctx.attr.source_prefix,
|
||||||
|
[f.short_path for f in hdrs],
|
||||||
|
),
|
||||||
|
])
|
||||||
|
|
||||||
if PackageFilegroupInfo in srcrule:
|
if ProtoFileList in srcrule:
|
||||||
for pkg_files_info, origin in srcrule[PackageFilegroupInfo].pkg_files:
|
proto_file_list = srcrule[ProtoFileList]
|
||||||
# keys are the destination path:
|
fragments.extend([
|
||||||
files.update(pkg_files_info.dest_src_map)
|
fragment_generator(
|
||||||
|
srcrule.label,
|
||||||
|
libname + "_proto_srcs",
|
||||||
|
ctx.attr.source_prefix,
|
||||||
|
[f.short_path for f in proto_file_list.proto_srcs],
|
||||||
|
),
|
||||||
|
fragment_generator(
|
||||||
|
srcrule.label,
|
||||||
|
libname + "_srcs",
|
||||||
|
ctx.attr.source_prefix,
|
||||||
|
proto_file_list.srcs,
|
||||||
|
),
|
||||||
|
fragment_generator(
|
||||||
|
srcrule.label,
|
||||||
|
libname + "_hdrs",
|
||||||
|
ctx.attr.source_prefix,
|
||||||
|
proto_file_list.hdrs,
|
||||||
|
),
|
||||||
|
])
|
||||||
|
|
||||||
if PackageFilesInfo in srcrule:
|
files = {}
|
||||||
# keys are the destination:
|
|
||||||
files.update(srcrule[PackageFilesInfo].dest_src_map)
|
|
||||||
|
|
||||||
if files == {} and DefaultInfo in srcrule and CcInfo not in srcrule:
|
if PackageFilegroupInfo in srcrule:
|
||||||
# This could be an individual file or filegroup.
|
for pkg_files_info, origin in srcrule[PackageFilegroupInfo].pkg_files:
|
||||||
# We explicitly ignore rules with CcInfo, since their
|
# keys are the destination path:
|
||||||
# output artifacts are libraries or binaries.
|
files.update(pkg_files_info.dest_src_map)
|
||||||
files.update(
|
|
||||||
{
|
|
||||||
f.short_path: 1
|
|
||||||
for f in srcrule[DefaultInfo].files.to_list()
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
if files:
|
if PackageFilesInfo in srcrule:
|
||||||
fragments.append(
|
# keys are the destination:
|
||||||
fragment_generator(
|
files.update(srcrule[PackageFilesInfo].dest_src_map)
|
||||||
srcrule.label,
|
|
||||||
libname + "_files",
|
|
||||||
ctx.attr.source_prefix,
|
|
||||||
sorted(files.keys()),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
ctx.actions.write(
|
if files == {} and DefaultInfo in srcrule and CcFileList not in srcrule:
|
||||||
output = out,
|
# This could be an individual file or filegroup.
|
||||||
content = (ctx.attr._header % ctx.label) + "\n".join(fragments),
|
# We explicitly ignore rules with CcInfo, since their
|
||||||
)
|
# output artifacts are libraries or binaries.
|
||||||
|
files.update(
|
||||||
|
{
|
||||||
|
f.short_path: 1
|
||||||
|
for f in srcrule[DefaultInfo].files.to_list()
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
return [DefaultInfo(files = depset([out]))]
|
if files:
|
||||||
|
fragments.append(
|
||||||
|
fragment_generator(
|
||||||
|
srcrule.label,
|
||||||
|
libname + "_files",
|
||||||
|
ctx.attr.source_prefix,
|
||||||
|
sorted(files.keys()),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
return _impl
|
ctx.actions.write(
|
||||||
|
output = out,
|
||||||
|
content = (ctx.attr._header % ctx.label) + "\n".join(fragments),
|
||||||
|
)
|
||||||
|
|
||||||
|
return [DefaultInfo(files = depset([out]))]
|
||||||
|
|
||||||
# Common rule attrs for rules that use `_create_file_list_impl`:
|
# Common rule attrs for rules that use `_create_file_list_impl`:
|
||||||
# (note that `_header` is also required)
|
# (note that `_header` is also required)
|
||||||
@ -343,6 +290,9 @@ def _cmake_var_fragment(owner, varname, prefix, entries):
|
|||||||
entries = "\n".join([" %s%s" % (prefix, f) for f in entries]),
|
entries = "\n".join([" %s%s" % (prefix, f) for f in entries]),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def _cmake_file_list_impl(ctx):
|
||||||
|
_create_file_list_impl(ctx, _cmake_var_fragment)
|
||||||
|
|
||||||
gen_cmake_file_lists = rule(
|
gen_cmake_file_lists = rule(
|
||||||
doc = """
|
doc = """
|
||||||
Generates a CMake-syntax file with lists of files.
|
Generates a CMake-syntax file with lists of files.
|
||||||
@ -361,7 +311,7 @@ For proto_library, the following are generated:
|
|||||||
{libname}_hdrs: contains syntesized paths for generated C++ headers.
|
{libname}_hdrs: contains syntesized paths for generated C++ headers.
|
||||||
|
|
||||||
""",
|
""",
|
||||||
implementation = _create_file_list_impl(_cmake_var_fragment),
|
implementation = _cmake_file_list_impl,
|
||||||
attrs = dict(
|
attrs = dict(
|
||||||
_source_list_common_attrs,
|
_source_list_common_attrs,
|
||||||
_header = attr.string(
|
_header = attr.string(
|
||||||
@ -416,6 +366,9 @@ def _automake_var_fragment(owner, varname, prefix, entries):
|
|||||||
)
|
)
|
||||||
return fragment.rstrip("\\ ") + "\n"
|
return fragment.rstrip("\\ ") + "\n"
|
||||||
|
|
||||||
|
def _automake_file_list_impl(ctx):
|
||||||
|
_create_file_list_impl(ctx, _automake_var_fragment)
|
||||||
|
|
||||||
gen_automake_file_lists = rule(
|
gen_automake_file_lists = rule(
|
||||||
doc = """
|
doc = """
|
||||||
Generates an Automake-syntax file with lists of files.
|
Generates an Automake-syntax file with lists of files.
|
||||||
@ -434,7 +387,7 @@ For proto_library, the following are generated:
|
|||||||
{libname}_hdrs: contains syntesized paths for generated C++ headers.
|
{libname}_hdrs: contains syntesized paths for generated C++ headers.
|
||||||
|
|
||||||
""",
|
""",
|
||||||
implementation = _create_file_list_impl(_automake_var_fragment),
|
implementation = _automake_file_list_impl,
|
||||||
attrs = dict(
|
attrs = dict(
|
||||||
_source_list_common_attrs.items(),
|
_source_list_common_attrs.items(),
|
||||||
_header = attr.string(
|
_header = attr.string(
|
||||||
|
@ -3,12 +3,32 @@
|
|||||||
load("@rules_cc//cc:action_names.bzl", cc_action_names = "ACTION_NAMES")
|
load("@rules_cc//cc:action_names.bzl", cc_action_names = "ACTION_NAMES")
|
||||||
load("@rules_cc//cc:find_cc_toolchain.bzl", "find_cc_toolchain")
|
load("@rules_cc//cc:find_cc_toolchain.bzl", "find_cc_toolchain")
|
||||||
|
|
||||||
|
################################################################################
|
||||||
|
# Archive/linking support
|
||||||
|
################################################################################
|
||||||
|
|
||||||
|
def _collect_linker_input_objects(dep_label, cc_info, objs, pic_objs):
|
||||||
|
"""Accumulate .o and .pic.o files into `objs` and `pic_objs`."""
|
||||||
|
link_ctx = cc_info.linking_context
|
||||||
|
if link_ctx == None:
|
||||||
|
return
|
||||||
|
|
||||||
|
linker_inputs = link_ctx.linker_inputs.to_list()
|
||||||
|
for link_input in linker_inputs:
|
||||||
|
if link_input.owner != dep_label:
|
||||||
|
# This is a transitive dep: skip it.
|
||||||
|
continue
|
||||||
|
|
||||||
|
for lib in link_input.libraries:
|
||||||
|
objs.extend(lib.objects or [])
|
||||||
|
pic_objs.extend(lib.pic_objects or [])
|
||||||
|
|
||||||
# Creates an action to build the `output_file` static library (archive)
|
# Creates an action to build the `output_file` static library (archive)
|
||||||
# using `object_files`.
|
# using `object_files`.
|
||||||
def _create_archive_action(
|
def _create_archive_action(
|
||||||
ctx,
|
ctx,
|
||||||
feature_configuration,
|
feature_configuration,
|
||||||
cc_toolchain,
|
cc_toolchain_info,
|
||||||
output_file,
|
output_file,
|
||||||
object_files):
|
object_files):
|
||||||
# Based on Bazel's src/main/starlark/builtins_bzl/common/cc/cc_import.bzl:
|
# Based on Bazel's src/main/starlark/builtins_bzl/common/cc/cc_import.bzl:
|
||||||
@ -16,7 +36,7 @@ def _create_archive_action(
|
|||||||
# Build the command line and add args for all of the input files:
|
# Build the command line and add args for all of the input files:
|
||||||
archiver_variables = cc_common.create_link_variables(
|
archiver_variables = cc_common.create_link_variables(
|
||||||
feature_configuration = feature_configuration,
|
feature_configuration = feature_configuration,
|
||||||
cc_toolchain = cc_toolchain,
|
cc_toolchain = cc_toolchain_info,
|
||||||
output_file = output_file.path,
|
output_file = output_file.path,
|
||||||
is_using_linker = False,
|
is_using_linker = False,
|
||||||
)
|
)
|
||||||
@ -48,7 +68,7 @@ def _create_archive_action(
|
|||||||
inputs = depset(
|
inputs = depset(
|
||||||
direct = object_files,
|
direct = object_files,
|
||||||
transitive = [
|
transitive = [
|
||||||
cc_toolchain.all_files,
|
cc_toolchain_info.all_files,
|
||||||
],
|
],
|
||||||
),
|
),
|
||||||
use_default_shell_env = False,
|
use_default_shell_env = False,
|
||||||
@ -56,72 +76,15 @@ def _create_archive_action(
|
|||||||
mnemonic = "CppArchiveDist",
|
mnemonic = "CppArchiveDist",
|
||||||
)
|
)
|
||||||
|
|
||||||
# Implementation for cc_dist_library rule.
|
def _create_dso_link_action(
|
||||||
def _cc_dist_library_impl(ctx):
|
ctx,
|
||||||
cc_toolchain_info = find_cc_toolchain(ctx)
|
feature_configuration,
|
||||||
if cc_toolchain_info.ar_executable == None:
|
cc_toolchain_info,
|
||||||
return []
|
object_files,
|
||||||
|
pic_object_files):
|
||||||
feature_configuration = cc_common.configure_features(
|
|
||||||
ctx = ctx,
|
|
||||||
cc_toolchain = cc_toolchain_info,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Collect the set of object files from the immediate deps.
|
|
||||||
|
|
||||||
objs = []
|
|
||||||
pic_objs = []
|
|
||||||
for dep in ctx.attr.deps:
|
|
||||||
if CcInfo not in dep:
|
|
||||||
continue
|
|
||||||
|
|
||||||
link_ctx = dep[CcInfo].linking_context
|
|
||||||
if link_ctx == None:
|
|
||||||
continue
|
|
||||||
|
|
||||||
linker_inputs = link_ctx.linker_inputs.to_list()
|
|
||||||
for link_input in linker_inputs:
|
|
||||||
if link_input.owner != dep.label:
|
|
||||||
# This is a transitive dep: skip it.
|
|
||||||
continue
|
|
||||||
|
|
||||||
for lib in link_input.libraries:
|
|
||||||
objs.extend(lib.objects or [])
|
|
||||||
pic_objs.extend(lib.pic_objects or [])
|
|
||||||
|
|
||||||
# For static libraries, build separately with and without pic.
|
|
||||||
|
|
||||||
stemname = "lib" + ctx.label.name
|
|
||||||
outputs = []
|
|
||||||
|
|
||||||
if len(objs) > 0:
|
|
||||||
archive_out = ctx.actions.declare_file(stemname + ".a")
|
|
||||||
_create_archive_action(
|
|
||||||
ctx,
|
|
||||||
feature_configuration,
|
|
||||||
cc_toolchain_info,
|
|
||||||
archive_out,
|
|
||||||
objs,
|
|
||||||
)
|
|
||||||
outputs.append(archive_out)
|
|
||||||
|
|
||||||
if len(pic_objs) > 0:
|
|
||||||
pic_archive_out = ctx.actions.declare_file(stemname + ".pic.a")
|
|
||||||
_create_archive_action(
|
|
||||||
ctx,
|
|
||||||
feature_configuration,
|
|
||||||
cc_toolchain_info,
|
|
||||||
pic_archive_out,
|
|
||||||
pic_objs,
|
|
||||||
)
|
|
||||||
outputs.append(pic_archive_out)
|
|
||||||
|
|
||||||
# For dynamic libraries, use the `cc_common.link` command to ensure
|
|
||||||
# everything gets built correctly according to toolchain definitions.
|
|
||||||
|
|
||||||
compilation_outputs = cc_common.create_compilation_outputs(
|
compilation_outputs = cc_common.create_compilation_outputs(
|
||||||
objects = depset(objs),
|
objects = depset(object_files),
|
||||||
pic_objects = depset(pic_objs),
|
pic_objects = depset(pic_object_files),
|
||||||
)
|
)
|
||||||
link_output = cc_common.link(
|
link_output = cc_common.link(
|
||||||
actions = ctx.actions,
|
actions = ctx.actions,
|
||||||
@ -134,6 +97,8 @@ def _cc_dist_library_impl(ctx):
|
|||||||
)
|
)
|
||||||
library_to_link = link_output.library_to_link
|
library_to_link = link_output.library_to_link
|
||||||
|
|
||||||
|
outputs = []
|
||||||
|
|
||||||
# Note: library_to_link.dynamic_library and interface_library are often
|
# Note: library_to_link.dynamic_library and interface_library are often
|
||||||
# symlinks in the solib directory. For DefaultInfo, prefer reporting
|
# symlinks in the solib directory. For DefaultInfo, prefer reporting
|
||||||
# the resolved artifact paths.
|
# the resolved artifact paths.
|
||||||
@ -147,6 +112,207 @@ def _cc_dist_library_impl(ctx):
|
|||||||
elif library_to_link.interface_library != None:
|
elif library_to_link.interface_library != None:
|
||||||
outputs.append(library_to_link.interface_library)
|
outputs.append(library_to_link.interface_library)
|
||||||
|
|
||||||
|
return outputs
|
||||||
|
|
||||||
|
################################################################################
|
||||||
|
# Source file/header support
|
||||||
|
################################################################################
|
||||||
|
|
||||||
|
CcFileList = provider(
|
||||||
|
doc = "List of files to be built into a library.",
|
||||||
|
fields = {
|
||||||
|
# As a rule of thumb, `hdrs` and `textual_hdrs` are the files that
|
||||||
|
# would be installed along with a prebuilt library.
|
||||||
|
"hdrs": "public header files, including those used by generated code",
|
||||||
|
"textual_hdrs": "files which are included but are not self-contained",
|
||||||
|
|
||||||
|
# The `internal_hdrs` are header files which appear in `srcs`.
|
||||||
|
# These are only used when compiling the library.
|
||||||
|
"internal_hdrs": "internal header files (only used to build .cc files)",
|
||||||
|
"srcs": "source files",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
def _flatten_target_files(targets):
|
||||||
|
return depset(transitive = [target.files for target in targets])
|
||||||
|
|
||||||
|
files = []
|
||||||
|
for target in targets:
|
||||||
|
files.extend(target.files.to_list())
|
||||||
|
return files
|
||||||
|
|
||||||
|
def _cc_file_list_aspect_impl(target, ctx):
|
||||||
|
# Extract sources from a `cc_library` (or similar):
|
||||||
|
if CcInfo not in target:
|
||||||
|
return []
|
||||||
|
|
||||||
|
# We're going to reach directly into the attrs on the traversed rule.
|
||||||
|
rule_attr = ctx.rule.attr
|
||||||
|
|
||||||
|
# CcInfo is a proxy for what we expect this rule to look like.
|
||||||
|
# However, some deps may expose `CcInfo` without having `srcs`,
|
||||||
|
# `hdrs`, etc., so we use `getattr` to handle that gracefully.
|
||||||
|
|
||||||
|
internal_hdrs = []
|
||||||
|
srcs = []
|
||||||
|
|
||||||
|
# Filter `srcs` so it only contains source files. Headers will go
|
||||||
|
# into `internal_headers`.
|
||||||
|
for src in _flatten_target_files(getattr(rule_attr, "srcs", [])).to_list():
|
||||||
|
if src.extension.lower() in ["c", "cc", "cpp", "cxx"]:
|
||||||
|
srcs.append(src)
|
||||||
|
else:
|
||||||
|
internal_hdrs.append(src)
|
||||||
|
|
||||||
|
return [CcFileList(
|
||||||
|
hdrs = _flatten_target_files(getattr(rule_attr, "hdrs", depset())),
|
||||||
|
textual_hdrs = _flatten_target_files(getattr(
|
||||||
|
rule_attr,
|
||||||
|
"textual_hdrs",
|
||||||
|
depset(),
|
||||||
|
)),
|
||||||
|
internal_hdrs = depset(internal_hdrs),
|
||||||
|
srcs = depset(srcs),
|
||||||
|
)]
|
||||||
|
|
||||||
|
cc_file_list_aspect = aspect(
|
||||||
|
doc = """
|
||||||
|
Aspect to provide the list of sources and headers from a rule.
|
||||||
|
|
||||||
|
Output is CcFileList. Example:
|
||||||
|
|
||||||
|
cc_library(
|
||||||
|
name = "foo",
|
||||||
|
srcs = [
|
||||||
|
"foo.cc",
|
||||||
|
"foo_internal.h",
|
||||||
|
],
|
||||||
|
hdrs = ["foo.h"],
|
||||||
|
textual_hdrs = ["foo_inl.inc"],
|
||||||
|
)
|
||||||
|
# produces:
|
||||||
|
# CcFileList(
|
||||||
|
# hdrs = depset([File("foo.h")]),
|
||||||
|
# textual_hdrs = depset([File("foo_inl.inc")]),
|
||||||
|
# internal_hdrs = depset([File("foo_internal.h")]),
|
||||||
|
# srcs = depset([File("foo.cc")]),
|
||||||
|
# )
|
||||||
|
""",
|
||||||
|
implementation = _cc_file_list_aspect_impl,
|
||||||
|
)
|
||||||
|
|
||||||
|
################################################################################
|
||||||
|
# Rule impl
|
||||||
|
################################################################################
|
||||||
|
|
||||||
|
def _collect_inputs(deps):
|
||||||
|
"""Collects files from a list of immediate deps.
|
||||||
|
|
||||||
|
This rule collects source files and linker inputs for C++ deps. Only
|
||||||
|
these immediate deps are considered, not transitive deps.
|
||||||
|
|
||||||
|
The return value is a struct with object files (linker inputs),
|
||||||
|
partitioned by PIC and non-pic, and the rules' source and header files:
|
||||||
|
|
||||||
|
struct(
|
||||||
|
objects = ..., # non-PIC object files
|
||||||
|
pic_objects = ..., # PIC objects
|
||||||
|
cc_file_list = ..., # a CcFileList
|
||||||
|
)
|
||||||
|
|
||||||
|
Args:
|
||||||
|
deps: Iterable of immediate deps. These will be treated as the "inputs,"
|
||||||
|
but not the transitive deps.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A struct with linker inputs, source files, and header files.
|
||||||
|
"""
|
||||||
|
|
||||||
|
objs = []
|
||||||
|
pic_objs = []
|
||||||
|
|
||||||
|
# The returned CcFileList will contain depsets of the deps' file lists.
|
||||||
|
# These lists hold `depset()`s from each of `deps`.
|
||||||
|
srcs = []
|
||||||
|
hdrs = []
|
||||||
|
internal_hdrs = []
|
||||||
|
textual_hdrs = []
|
||||||
|
|
||||||
|
for dep in deps:
|
||||||
|
if CcInfo in dep:
|
||||||
|
_collect_linker_input_objects(
|
||||||
|
dep.label,
|
||||||
|
dep[CcInfo],
|
||||||
|
objs,
|
||||||
|
pic_objs,
|
||||||
|
)
|
||||||
|
|
||||||
|
if CcFileList in dep:
|
||||||
|
cfl = dep[CcFileList]
|
||||||
|
srcs.append(cfl.srcs)
|
||||||
|
hdrs.append(cfl.hdrs)
|
||||||
|
internal_hdrs.append(cfl.internal_hdrs)
|
||||||
|
textual_hdrs.append(cfl.textual_hdrs)
|
||||||
|
|
||||||
|
return struct(
|
||||||
|
objects = objs,
|
||||||
|
pic_objects = pic_objs,
|
||||||
|
cc_file_list = CcFileList(
|
||||||
|
srcs = depset(transitive = srcs),
|
||||||
|
hdrs = depset(transitive = hdrs),
|
||||||
|
internal_hdrs = depset(transitive = internal_hdrs),
|
||||||
|
textual_hdrs = depset(transitive = textual_hdrs),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Implementation for cc_dist_library rule.
|
||||||
|
def _cc_dist_library_impl(ctx):
|
||||||
|
cc_toolchain_info = find_cc_toolchain(ctx)
|
||||||
|
|
||||||
|
feature_configuration = cc_common.configure_features(
|
||||||
|
ctx = ctx,
|
||||||
|
cc_toolchain = cc_toolchain_info,
|
||||||
|
)
|
||||||
|
|
||||||
|
inputs = _collect_inputs(ctx.attr.deps)
|
||||||
|
|
||||||
|
# For static libraries, build separately with and without pic.
|
||||||
|
|
||||||
|
stemname = "lib" + ctx.label.name
|
||||||
|
outputs = []
|
||||||
|
|
||||||
|
if len(inputs.objects) > 0:
|
||||||
|
archive_out = ctx.actions.declare_file(stemname + ".a")
|
||||||
|
_create_archive_action(
|
||||||
|
ctx,
|
||||||
|
feature_configuration,
|
||||||
|
cc_toolchain_info,
|
||||||
|
archive_out,
|
||||||
|
inputs.objects,
|
||||||
|
)
|
||||||
|
outputs.append(archive_out)
|
||||||
|
|
||||||
|
if len(inputs.pic_objects) > 0:
|
||||||
|
pic_archive_out = ctx.actions.declare_file(stemname + ".pic.a")
|
||||||
|
_create_archive_action(
|
||||||
|
ctx,
|
||||||
|
feature_configuration,
|
||||||
|
cc_toolchain_info,
|
||||||
|
pic_archive_out,
|
||||||
|
inputs.pic_objects,
|
||||||
|
)
|
||||||
|
outputs.append(pic_archive_out)
|
||||||
|
|
||||||
|
# For dynamic libraries, use the `cc_common.link` command to ensure
|
||||||
|
# everything gets built correctly according to toolchain definitions.
|
||||||
|
outputs.extend(_create_dso_link_action(
|
||||||
|
ctx,
|
||||||
|
feature_configuration,
|
||||||
|
cc_toolchain_info,
|
||||||
|
inputs.objects,
|
||||||
|
inputs.pic_objects,
|
||||||
|
))
|
||||||
|
|
||||||
# We could expose the libraries for use from cc rules:
|
# We could expose the libraries for use from cc rules:
|
||||||
#
|
#
|
||||||
# linking_context = cc_common.create_linking_context(
|
# linking_context = cc_common.create_linking_context(
|
||||||
@ -169,6 +335,7 @@ def _cc_dist_library_impl(ctx):
|
|||||||
|
|
||||||
return [
|
return [
|
||||||
DefaultInfo(files = depset(outputs)),
|
DefaultInfo(files = depset(outputs)),
|
||||||
|
inputs.cc_file_list,
|
||||||
]
|
]
|
||||||
|
|
||||||
cc_dist_library = rule(
|
cc_dist_library = rule(
|
||||||
@ -214,6 +381,7 @@ Example:
|
|||||||
"Only these targets' compilation outputs will be " +
|
"Only these targets' compilation outputs will be " +
|
||||||
"included (i.e., the transitive dependencies are not " +
|
"included (i.e., the transitive dependencies are not " +
|
||||||
"included in the output)."),
|
"included in the output)."),
|
||||||
|
aspects = [cc_file_list_aspect],
|
||||||
),
|
),
|
||||||
"linkopts": attr.string_list(
|
"linkopts": attr.string_list(
|
||||||
doc = ("Add these flags to the C++ linker command when creating " +
|
doc = ("Add these flags to the C++ linker command when creating " +
|
||||||
|
@ -114,6 +114,6 @@ def protobuf_deps():
|
|||||||
_github_archive(
|
_github_archive(
|
||||||
name = "upb",
|
name = "upb",
|
||||||
repo = "https://github.com/protocolbuffers/upb",
|
repo = "https://github.com/protocolbuffers/upb",
|
||||||
commit = "c3cfd09b0184bcbdade71a3d788df02c83e897f2",
|
commit = "12efc9b096f35b62055a217f45e6b0fe5fb1a099",
|
||||||
sha256 = "4a9f79385fc0c1e3e7ba5c34220db53f956c8c42d636bafc6a563da2facf8c3f",
|
sha256 = "de0ab4ee1e2d8f01b494de39cd70b611e190b63943f1d5c448d4ecb9560dc16f",
|
||||||
)
|
)
|
||||||
|
@ -1,3 +1,3 @@
|
|||||||
PROTOC_VERSION = '21.0-rc-1'
|
PROTOC_VERSION = '21.0-rc-2'
|
||||||
PROTOBUF_JAVA_VERSION = '3.21.0-rc-1'
|
PROTOBUF_JAVA_VERSION = '3.21.0-rc-2'
|
||||||
PROTOBUF_PYTHON_VERSION = '4.21.0-rc-1'
|
PROTOBUF_PYTHON_VERSION = '4.21.0-rc-2'
|
||||||
|
@ -8,7 +8,7 @@
|
|||||||
</parent>
|
</parent>
|
||||||
<groupId>com.google.protobuf</groupId>
|
<groupId>com.google.protobuf</groupId>
|
||||||
<artifactId>protoc</artifactId>
|
<artifactId>protoc</artifactId>
|
||||||
<version>21.0-rc-1</version>
|
<version>3.21.0-rc-2</version>
|
||||||
<packaging>pom</packaging>
|
<packaging>pom</packaging>
|
||||||
<name>Protobuf Compiler</name>
|
<name>Protobuf Compiler</name>
|
||||||
<description>
|
<description>
|
||||||
@ -71,11 +71,7 @@
|
|||||||
<type>exe</type>
|
<type>exe</type>
|
||||||
</artifact>
|
</artifact>
|
||||||
<artifact>
|
<artifact>
|
||||||
<!-- Reuse a compatible osx-x86_64 version until binary
|
<file>${basedir}/target/osx/aarch_64/protoc.exe</file>
|
||||||
support for osx-aarch_64 is added. TODO: use
|
|
||||||
<file>${basedir}/target/osx/aarch_64/protoc.exe</file>
|
|
||||||
-->
|
|
||||||
<file>${basedir}/target/osx/x86_64/protoc.exe</file>
|
|
||||||
<classifier>osx-aarch_64</classifier>
|
<classifier>osx-aarch_64</classifier>
|
||||||
<type>exe</type>
|
<type>exe</type>
|
||||||
</artifact>
|
</artifact>
|
||||||
|
@ -30,4 +30,4 @@
|
|||||||
|
|
||||||
# Copyright 2007 Google Inc. All Rights Reserved.
|
# Copyright 2007 Google Inc. All Rights Reserved.
|
||||||
|
|
||||||
__version__ = '4.21.0rc1'
|
__version__ = '4.21.0rc2'
|
||||||
|
@ -209,7 +209,7 @@ class TestConformanceCmd(_build_py):
|
|||||||
# Python 2.6 dodges these extra failures.
|
# Python 2.6 dodges these extra failures.
|
||||||
os.environ['CONFORMANCE_PYTHON_EXTRA_FAILURES'] = (
|
os.environ['CONFORMANCE_PYTHON_EXTRA_FAILURES'] = (
|
||||||
'--failure_list failure_list_python-post26.txt')
|
'--failure_list failure_list_python-post26.txt')
|
||||||
cmd = 'cd ../conformance && make %s' % (TestConformanceCmd.target)
|
cmd = 'cd ../conformance && make %s' % (TestConformanceCmd.target,)
|
||||||
subprocess.check_call(cmd, shell=True)
|
subprocess.check_call(cmd, shell=True)
|
||||||
|
|
||||||
|
|
||||||
@ -227,7 +227,7 @@ def _GetFlagValues(flag_long, flag_short):
|
|||||||
flag_res = [re.compile(r'--?%s(=(.*))?' %
|
flag_res = [re.compile(r'--?%s(=(.*))?' %
|
||||||
(flag_long[:-1] if expect_value else flag_long))]
|
(flag_long[:-1] if expect_value else flag_long))]
|
||||||
if flag_short:
|
if flag_short:
|
||||||
flag_res.append(re.compile(r'-%s(.*)?' % (flag_short)))
|
flag_res.append(re.compile(r'-%s(.*)?' % (flag_short,)))
|
||||||
|
|
||||||
flag_match = None
|
flag_match = None
|
||||||
for arg in sys.argv:
|
for arg in sys.argv:
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
Gem::Specification.new do |s|
|
Gem::Specification.new do |s|
|
||||||
s.name = "google-protobuf"
|
s.name = "google-protobuf"
|
||||||
s.version = "3.21.0.rc.1"
|
s.version = "3.21.0.rc.2"
|
||||||
git_tag = "v#{s.version.to_s.sub('.rc.', '-rc')}" # Converts X.Y.Z.rc.N to vX.Y.Z-rcN, used for the git tag
|
git_tag = "v#{s.version.to_s.sub('.rc.', '-rc')}" # Converts X.Y.Z.rc.N to vX.Y.Z-rcN, used for the git tag
|
||||||
s.licenses = ["BSD-3-Clause"]
|
s.licenses = ["BSD-3-Clause"]
|
||||||
s.summary = "Protocol Buffers"
|
s.summary = "Protocol Buffers"
|
||||||
|
@ -9,7 +9,7 @@
|
|||||||
|
|
||||||
<groupId>com.google.protobuf.jruby</groupId>
|
<groupId>com.google.protobuf.jruby</groupId>
|
||||||
<artifactId>protobuf-jruby</artifactId>
|
<artifactId>protobuf-jruby</artifactId>
|
||||||
<version>3.21.0-rc-1</version>
|
<version>3.21.0-rc-2</version>
|
||||||
<name>Protocol Buffer JRuby native extension</name>
|
<name>Protocol Buffer JRuby native extension</name>
|
||||||
<description>
|
<description>
|
||||||
Protocol Buffers are a way of encoding structured data in an efficient yet
|
Protocol Buffers are a way of encoding structured data in an efficient yet
|
||||||
@ -76,7 +76,7 @@
|
|||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.google.protobuf</groupId>
|
<groupId>com.google.protobuf</groupId>
|
||||||
<artifactId>protobuf-java-util</artifactId>
|
<artifactId>protobuf-java-util</artifactId>
|
||||||
<version>3.21.0-rc-1</version>
|
<version>3.21.0-rc-2</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.jruby</groupId>
|
<groupId>org.jruby</groupId>
|
||||||
|
@ -540,8 +540,18 @@ protoc_inputs = \
|
|||||||
|
|
||||||
EXTRA_DIST = \
|
EXTRA_DIST = \
|
||||||
$(protoc_inputs) \
|
$(protoc_inputs) \
|
||||||
|
BUILD.bazel \
|
||||||
README.md \
|
README.md \
|
||||||
|
google/protobuf/BUILD.bazel \
|
||||||
|
google/protobuf/compiler/BUILD.bazel \
|
||||||
|
google/protobuf/compiler/cpp/BUILD.bazel \
|
||||||
|
google/protobuf/compiler/csharp/BUILD.bazel \
|
||||||
|
google/protobuf/compiler/java/BUILD.bazel \
|
||||||
|
google/protobuf/compiler/objectivec/BUILD.bazel \
|
||||||
google/protobuf/compiler/package_info.h \
|
google/protobuf/compiler/package_info.h \
|
||||||
|
google/protobuf/compiler/php/BUILD.bazel \
|
||||||
|
google/protobuf/compiler/python/BUILD.bazel \
|
||||||
|
google/protobuf/compiler/ruby/BUILD.bazel \
|
||||||
google/protobuf/compiler/ruby/ruby_generated_code.proto \
|
google/protobuf/compiler/ruby/ruby_generated_code.proto \
|
||||||
google/protobuf/compiler/ruby/ruby_generated_code_pb.rb \
|
google/protobuf/compiler/ruby/ruby_generated_code_pb.rb \
|
||||||
google/protobuf/compiler/ruby/ruby_generated_code_proto2.proto \
|
google/protobuf/compiler/ruby/ruby_generated_code_proto2.proto \
|
||||||
@ -554,10 +564,12 @@ EXTRA_DIST = \
|
|||||||
google/protobuf/compiler/ruby/ruby_generated_pkg_implicit.proto \
|
google/protobuf/compiler/ruby/ruby_generated_pkg_implicit.proto \
|
||||||
google/protobuf/compiler/ruby/ruby_generated_pkg_implicit_pb.rb \
|
google/protobuf/compiler/ruby/ruby_generated_pkg_implicit_pb.rb \
|
||||||
google/protobuf/compiler/zip_output_unittest.sh \
|
google/protobuf/compiler/zip_output_unittest.sh \
|
||||||
|
google/protobuf/io/BUILD.bazel \
|
||||||
google/protobuf/io/gzip_stream.h \
|
google/protobuf/io/gzip_stream.h \
|
||||||
google/protobuf/io/gzip_stream_unittest.sh \
|
google/protobuf/io/gzip_stream_unittest.sh \
|
||||||
google/protobuf/io/package_info.h \
|
google/protobuf/io/package_info.h \
|
||||||
google/protobuf/package_info.h \
|
google/protobuf/package_info.h \
|
||||||
|
google/protobuf/stubs/BUILD.bazel \
|
||||||
google/protobuf/test_messages_proto2.proto \
|
google/protobuf/test_messages_proto2.proto \
|
||||||
google/protobuf/test_messages_proto3.proto \
|
google/protobuf/test_messages_proto3.proto \
|
||||||
google/protobuf/testdata/bad_utf8_string \
|
google/protobuf/testdata/bad_utf8_string \
|
||||||
@ -573,6 +585,9 @@ EXTRA_DIST = \
|
|||||||
google/protobuf/testdata/text_format_unittest_data_pointy_oneof.txt \
|
google/protobuf/testdata/text_format_unittest_data_pointy_oneof.txt \
|
||||||
google/protobuf/testdata/text_format_unittest_extensions_data.txt \
|
google/protobuf/testdata/text_format_unittest_extensions_data.txt \
|
||||||
google/protobuf/testdata/text_format_unittest_extensions_data_pointy.txt \
|
google/protobuf/testdata/text_format_unittest_extensions_data_pointy.txt \
|
||||||
|
google/protobuf/testing/BUILD.bazel \
|
||||||
|
google/protobuf/util/BUILD.bazel \
|
||||||
|
google/protobuf/util/internal/BUILD.bazel \
|
||||||
google/protobuf/util/package_info.h \
|
google/protobuf/util/package_info.h \
|
||||||
libprotobuf-lite.map \
|
libprotobuf-lite.map \
|
||||||
libprotobuf.map \
|
libprotobuf.map \
|
||||||
|
@ -160,6 +160,7 @@ cc_library(
|
|||||||
linkopts = LINK_OPTS,
|
linkopts = LINK_OPTS,
|
||||||
visibility = [
|
visibility = [
|
||||||
"//:__pkg__",
|
"//:__pkg__",
|
||||||
|
"//pkg:__pkg__",
|
||||||
"//src/google/protobuf:__subpackages__",
|
"//src/google/protobuf:__subpackages__",
|
||||||
],
|
],
|
||||||
# In Bazel 6.0+, these will be `interface_deps`:
|
# In Bazel 6.0+, these will be `interface_deps`:
|
||||||
@ -209,6 +210,7 @@ cc_library(
|
|||||||
linkopts = LINK_OPTS,
|
linkopts = LINK_OPTS,
|
||||||
visibility = [
|
visibility = [
|
||||||
"//:__pkg__",
|
"//:__pkg__",
|
||||||
|
"//pkg:__pkg__",
|
||||||
"//src/google/protobuf:__subpackages__",
|
"//src/google/protobuf:__subpackages__",
|
||||||
],
|
],
|
||||||
deps = [
|
deps = [
|
||||||
|
@ -315,6 +315,15 @@ class PROTOBUF_EXPORT PROTOBUF_ALIGNAS(8) Arena final {
|
|||||||
static_cast<Args&&>(args)...);
|
static_cast<Args&&>(args)...);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// API to delete any objects not on an arena. This can be used to safely
|
||||||
|
// clean up messages or repeated fields without knowing whether or not they're
|
||||||
|
// owned by an arena. The pointer passed to this function should not be used
|
||||||
|
// again.
|
||||||
|
template <typename T>
|
||||||
|
PROTOBUF_ALWAYS_INLINE static void Destroy(T* obj) {
|
||||||
|
if (InternalGetOwningArena(obj) == nullptr) delete obj;
|
||||||
|
}
|
||||||
|
|
||||||
// Allocates memory with the specific size and alignment.
|
// Allocates memory with the specific size and alignment.
|
||||||
void* AllocateAligned(size_t size, size_t align = 8) {
|
void* AllocateAligned(size_t size, size_t align = 8) {
|
||||||
if (align <= 8) {
|
if (align <= 8) {
|
||||||
@ -411,8 +420,23 @@ class PROTOBUF_EXPORT PROTOBUF_ALIGNAS(8) Arena final {
|
|||||||
template <typename T>
|
template <typename T>
|
||||||
class InternalHelper {
|
class InternalHelper {
|
||||||
private:
|
private:
|
||||||
// Provides access to protected GetOwningArena to generated messages.
|
struct Rank1 {};
|
||||||
static Arena* GetOwningArena(const T* p) { return p->GetOwningArena(); }
|
struct Rank0 : Rank1 {};
|
||||||
|
|
||||||
|
static Arena* GetOwningArena(const T* p) {
|
||||||
|
return GetOwningArena(Rank0{}, p);
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename U>
|
||||||
|
static auto GetOwningArena(Rank0, const U* p)
|
||||||
|
-> decltype(p->GetOwningArena()) {
|
||||||
|
return p->GetOwningArena();
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename U>
|
||||||
|
static Arena* GetOwningArena(Rank1, const U* p) {
|
||||||
|
return nullptr;
|
||||||
|
}
|
||||||
|
|
||||||
static void InternalSwap(T* a, T* b) { a->InternalSwap(b); }
|
static void InternalSwap(T* a, T* b) { a->InternalSwap(b); }
|
||||||
|
|
||||||
@ -771,25 +795,6 @@ class PROTOBUF_EXPORT PROTOBUF_ALIGNAS(8) Arena final {
|
|||||||
return nullptr;
|
return nullptr;
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename T>
|
|
||||||
PROTOBUF_ALWAYS_INLINE static Arena* GetOwningArena(const T* value) {
|
|
||||||
return GetOwningArenaInternal(
|
|
||||||
value, std::is_convertible<T*, MessageLite*>());
|
|
||||||
}
|
|
||||||
|
|
||||||
// Implementation for GetOwningArena(). All and only message objects have
|
|
||||||
// GetOwningArena() method.
|
|
||||||
template <typename T>
|
|
||||||
PROTOBUF_ALWAYS_INLINE static Arena* GetOwningArenaInternal(
|
|
||||||
const T* value, std::true_type) {
|
|
||||||
return InternalHelper<T>::GetOwningArena(value);
|
|
||||||
}
|
|
||||||
template <typename T>
|
|
||||||
PROTOBUF_ALWAYS_INLINE static Arena* GetOwningArenaInternal(
|
|
||||||
const T* /* value */, std::false_type) {
|
|
||||||
return nullptr;
|
|
||||||
}
|
|
||||||
|
|
||||||
void* AllocateAlignedWithHookForArray(size_t n, size_t align,
|
void* AllocateAlignedWithHookForArray(size_t n, size_t align,
|
||||||
const std::type_info* type) {
|
const std::type_info* type) {
|
||||||
if (align <= 8) {
|
if (align <= 8) {
|
||||||
|
@ -307,6 +307,26 @@ TEST(ArenaTest, InitialBlockTooSmall) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
TEST(ArenaTest, CreateDestroy) {
|
||||||
|
TestAllTypes original;
|
||||||
|
TestUtil::SetAllFields(&original);
|
||||||
|
|
||||||
|
// Test memory leak.
|
||||||
|
Arena arena;
|
||||||
|
TestAllTypes* heap_message = Arena::CreateMessage<TestAllTypes>(nullptr);
|
||||||
|
TestAllTypes* arena_message = Arena::CreateMessage<TestAllTypes>(&arena);
|
||||||
|
|
||||||
|
*heap_message = original;
|
||||||
|
*arena_message = original;
|
||||||
|
|
||||||
|
Arena::Destroy(heap_message);
|
||||||
|
Arena::Destroy(arena_message);
|
||||||
|
|
||||||
|
// The arena message should still exist.
|
||||||
|
EXPECT_EQ(strlen(original.optional_string().c_str()),
|
||||||
|
strlen(arena_message->optional_string().c_str()));
|
||||||
|
}
|
||||||
|
|
||||||
TEST(ArenaTest, Parsing) {
|
TEST(ArenaTest, Parsing) {
|
||||||
TestAllTypes original;
|
TestAllTypes original;
|
||||||
TestUtil::SetAllFields(&original);
|
TestUtil::SetAllFields(&original);
|
||||||
|
@ -1217,8 +1217,7 @@ TEST_F(CommandLineInterfaceTest, InsertWithAnnotationFixup) {
|
|||||||
"--plug_out=insert_endlines=test_generator,test_plugin:$tmpdir "
|
"--plug_out=insert_endlines=test_generator,test_plugin:$tmpdir "
|
||||||
"--proto_path=$tmpdir foo.proto");
|
"--proto_path=$tmpdir foo.proto");
|
||||||
|
|
||||||
ExpectWarningSubstring(
|
ExpectNoErrors();
|
||||||
"foo.proto:2:36: warning: Message name should be in UpperCamelCase.");
|
|
||||||
CheckGeneratedAnnotations("test_generator", "foo.proto");
|
CheckGeneratedAnnotations("test_generator", "foo.proto");
|
||||||
CheckGeneratedAnnotations("test_plugin", "foo.proto");
|
CheckGeneratedAnnotations("test_plugin", "foo.proto");
|
||||||
}
|
}
|
||||||
@ -2372,21 +2371,6 @@ TEST_F(CommandLineInterfaceTest, Warnings) {
|
|||||||
ExpectErrorSubstring("foo.proto:2:1: warning: Import bar.proto is unused.");
|
ExpectErrorSubstring("foo.proto:2:1: warning: Import bar.proto is unused.");
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(CommandLineInterfaceTest, ParserWarnings) {
|
|
||||||
// Test that parser warnings are propagated. See #9343.
|
|
||||||
|
|
||||||
CreateTempFile("foo.proto",
|
|
||||||
"syntax = \"proto2\";\n"
|
|
||||||
"message bad_to_the_bone {};\n");
|
|
||||||
|
|
||||||
Run("protocol_compiler --test_out=$tmpdir "
|
|
||||||
"--proto_path=$tmpdir foo.proto");
|
|
||||||
ExpectCapturedStderrSubstringWithZeroReturnCode(
|
|
||||||
"foo.proto:2:25: warning: Message name should be in UpperCamelCase. "
|
|
||||||
"Found: bad_to_the_bone. "
|
|
||||||
"See https://developers.google.com/protocol-buffers/docs/style");
|
|
||||||
}
|
|
||||||
|
|
||||||
// -------------------------------------------------------------------
|
// -------------------------------------------------------------------
|
||||||
// Flag parsing tests
|
// Flag parsing tests
|
||||||
|
|
||||||
@ -2707,6 +2691,7 @@ TEST_P(EncodeDecodeTest, Encode) {
|
|||||||
EXPECT_TRUE(Run(args + " --encode=protobuf_unittest.TestAllTypes"));
|
EXPECT_TRUE(Run(args + " --encode=protobuf_unittest.TestAllTypes"));
|
||||||
ExpectStdoutMatchesBinaryFile(TestUtil::GetTestDataPath(
|
ExpectStdoutMatchesBinaryFile(TestUtil::GetTestDataPath(
|
||||||
"third_party/protobuf/testdata/golden_message_oneof_implemented"));
|
"third_party/protobuf/testdata/golden_message_oneof_implemented"));
|
||||||
|
ExpectStderrMatchesText("");
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_P(EncodeDecodeTest, Decode) {
|
TEST_P(EncodeDecodeTest, Decode) {
|
||||||
@ -2718,6 +2703,7 @@ TEST_P(EncodeDecodeTest, Decode) {
|
|||||||
ExpectStdoutMatchesTextFile(TestUtil::GetTestDataPath(
|
ExpectStdoutMatchesTextFile(TestUtil::GetTestDataPath(
|
||||||
"third_party/protobuf/"
|
"third_party/protobuf/"
|
||||||
"testdata/text_format_unittest_data_oneof_implemented.txt"));
|
"testdata/text_format_unittest_data_oneof_implemented.txt"));
|
||||||
|
ExpectStderrMatchesText("");
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_P(EncodeDecodeTest, Partial) {
|
TEST_P(EncodeDecodeTest, Partial) {
|
||||||
@ -2726,7 +2712,7 @@ TEST_P(EncodeDecodeTest, Partial) {
|
|||||||
Run(TestUtil::MaybeTranslatePath("net/proto2/internal/unittest.proto") +
|
Run(TestUtil::MaybeTranslatePath("net/proto2/internal/unittest.proto") +
|
||||||
" --encode=protobuf_unittest.TestRequired"));
|
" --encode=protobuf_unittest.TestRequired"));
|
||||||
ExpectStdoutMatchesText("");
|
ExpectStdoutMatchesText("");
|
||||||
ExpectStderrContainsText(
|
ExpectStderrMatchesText(
|
||||||
"warning: Input message is missing required fields: a, b, c\n");
|
"warning: Input message is missing required fields: a, b, c\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2750,7 +2736,7 @@ TEST_P(EncodeDecodeTest, UnknownType) {
|
|||||||
Run(TestUtil::MaybeTranslatePath("net/proto2/internal/unittest.proto") +
|
Run(TestUtil::MaybeTranslatePath("net/proto2/internal/unittest.proto") +
|
||||||
" --encode=NoSuchType"));
|
" --encode=NoSuchType"));
|
||||||
ExpectStdoutMatchesText("");
|
ExpectStdoutMatchesText("");
|
||||||
ExpectStderrContainsText("Type not defined: NoSuchType\n");
|
ExpectStderrMatchesText("Type not defined: NoSuchType\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_P(EncodeDecodeTest, ProtoParseError) {
|
TEST_P(EncodeDecodeTest, ProtoParseError) {
|
||||||
@ -2775,6 +2761,7 @@ TEST_P(EncodeDecodeTest, EncodeDeterministicOutput) {
|
|||||||
args + " --encode=protobuf_unittest.TestAllTypes --deterministic_output"));
|
args + " --encode=protobuf_unittest.TestAllTypes --deterministic_output"));
|
||||||
ExpectStdoutMatchesBinaryFile(TestUtil::GetTestDataPath(
|
ExpectStdoutMatchesBinaryFile(TestUtil::GetTestDataPath(
|
||||||
"third_party/protobuf/testdata/golden_message_oneof_implemented"));
|
"third_party/protobuf/testdata/golden_message_oneof_implemented"));
|
||||||
|
ExpectStderrMatchesText("");
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_P(EncodeDecodeTest, DecodeDeterministicOutput) {
|
TEST_P(EncodeDecodeTest, DecodeDeterministicOutput) {
|
||||||
|
@ -48,7 +48,10 @@ cc_library(
|
|||||||
],
|
],
|
||||||
copts = COPTS,
|
copts = COPTS,
|
||||||
include_prefix = "google/protobuf/compiler/cpp",
|
include_prefix = "google/protobuf/compiler/cpp",
|
||||||
visibility = ["//src/google/protobuf/compiler:__pkg__"],
|
visibility = [
|
||||||
|
"//pkg:__pkg__",
|
||||||
|
"//src/google/protobuf/compiler:__pkg__",
|
||||||
|
],
|
||||||
deps = [
|
deps = [
|
||||||
"//:protobuf",
|
"//:protobuf",
|
||||||
"//src/google/protobuf/compiler:code_generator",
|
"//src/google/protobuf/compiler:code_generator",
|
||||||
|
@ -135,8 +135,6 @@ bool CppGenerator::Generate(const FileDescriptor* file,
|
|||||||
.insert(options[i].second.substr(pos, next_pos - pos));
|
.insert(options[i].second.substr(pos, next_pos - pos));
|
||||||
pos = next_pos + 1;
|
pos = next_pos + 1;
|
||||||
} while (pos < options[i].second.size());
|
} while (pos < options[i].second.size());
|
||||||
} else if (options[i].first == "verified_lazy") {
|
|
||||||
file_options.unverified_lazy = false;
|
|
||||||
} else if (options[i].first == "unverified_lazy_message_sets") {
|
} else if (options[i].first == "unverified_lazy_message_sets") {
|
||||||
file_options.unverified_lazy_message_sets = true;
|
file_options.unverified_lazy_message_sets = true;
|
||||||
} else if (options[i].first == "message_owned_arena_trial") {
|
} else if (options[i].first == "message_owned_arena_trial") {
|
||||||
|
@ -80,7 +80,6 @@ struct Options {
|
|||||||
bool annotate_accessor = false;
|
bool annotate_accessor = false;
|
||||||
bool unused_field_stripping = false;
|
bool unused_field_stripping = false;
|
||||||
bool unverified_lazy_message_sets = false;
|
bool unverified_lazy_message_sets = false;
|
||||||
bool unverified_lazy = false;
|
|
||||||
bool profile_driven_inline_string = true;
|
bool profile_driven_inline_string = true;
|
||||||
bool message_owned_arena_trial = false;
|
bool message_owned_arena_trial = false;
|
||||||
bool force_split = false;
|
bool force_split = false;
|
||||||
|
@ -51,7 +51,10 @@ cc_library(
|
|||||||
"//conditions:default": ["-Wno-overloaded-virtual"],
|
"//conditions:default": ["-Wno-overloaded-virtual"],
|
||||||
}),
|
}),
|
||||||
include_prefix = "google/protobuf/compiler/csharp",
|
include_prefix = "google/protobuf/compiler/csharp",
|
||||||
visibility = ["//src/google/protobuf/compiler:__pkg__"],
|
visibility = [
|
||||||
|
"//pkg:__pkg__",
|
||||||
|
"//src/google/protobuf/compiler:__pkg__",
|
||||||
|
],
|
||||||
deps = [
|
deps = [
|
||||||
"//:protobuf",
|
"//:protobuf",
|
||||||
"//src/google/protobuf/compiler:code_generator",
|
"//src/google/protobuf/compiler:code_generator",
|
||||||
@ -62,11 +65,11 @@ cc_test(
|
|||||||
name = "bootstrap_unittest",
|
name = "bootstrap_unittest",
|
||||||
srcs = ["csharp_bootstrap_unittest.cc"],
|
srcs = ["csharp_bootstrap_unittest.cc"],
|
||||||
data = [
|
data = [
|
||||||
"//src/google/protobuf:descriptor_proto_srcs",
|
|
||||||
"//:well_known_type_protos",
|
"//:well_known_type_protos",
|
||||||
"//conformance:all_files",
|
"//conformance:all_files",
|
||||||
"//conformance:conformance_proto",
|
"//conformance:conformance_proto",
|
||||||
"//csharp:wkt_cs_srcs",
|
"//csharp:wkt_cs_srcs",
|
||||||
|
"//src/google/protobuf:descriptor_proto_srcs",
|
||||||
"//src/google/protobuf:testdata",
|
"//src/google/protobuf:testdata",
|
||||||
],
|
],
|
||||||
deps = [
|
deps = [
|
||||||
|
@ -105,12 +105,6 @@ class SourceTreeDescriptorDatabase::SingleFileErrorCollector
|
|||||||
had_errors_ = true;
|
had_errors_ = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
void AddWarning(int line, int column, const std::string& message) override {
|
|
||||||
if (multi_file_error_collector_ != NULL) {
|
|
||||||
multi_file_error_collector_->AddWarning(filename_, line, column, message);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private:
|
private:
|
||||||
std::string filename_;
|
std::string filename_;
|
||||||
MultiFileErrorCollector* multi_file_error_collector_;
|
MultiFileErrorCollector* multi_file_error_collector_;
|
||||||
|
@ -74,7 +74,10 @@ cc_library(
|
|||||||
],
|
],
|
||||||
copts = COPTS,
|
copts = COPTS,
|
||||||
include_prefix = "google/protobuf/compiler/java",
|
include_prefix = "google/protobuf/compiler/java",
|
||||||
visibility = ["//src/google/protobuf/compiler:__pkg__"],
|
visibility = [
|
||||||
|
"//pkg:__pkg__",
|
||||||
|
"//src/google/protobuf/compiler:__pkg__",
|
||||||
|
],
|
||||||
deps = [
|
deps = [
|
||||||
"//:protobuf",
|
"//:protobuf",
|
||||||
"//src/google/protobuf/compiler:code_generator",
|
"//src/google/protobuf/compiler:code_generator",
|
||||||
@ -85,9 +88,9 @@ cc_test(
|
|||||||
name = "doc_comment_unittest",
|
name = "doc_comment_unittest",
|
||||||
srcs = ["doc_comment_unittest.cc"],
|
srcs = ["doc_comment_unittest.cc"],
|
||||||
data = [
|
data = [
|
||||||
"//src/google/protobuf:descriptor_proto_srcs",
|
|
||||||
"//:well_known_type_protos",
|
"//:well_known_type_protos",
|
||||||
"//conformance:conformance_proto",
|
"//conformance:conformance_proto",
|
||||||
|
"//src/google/protobuf:descriptor_proto_srcs",
|
||||||
],
|
],
|
||||||
deps = [
|
deps = [
|
||||||
":java",
|
":java",
|
||||||
|
@ -39,7 +39,10 @@ cc_library(
|
|||||||
],
|
],
|
||||||
copts = COPTS,
|
copts = COPTS,
|
||||||
include_prefix = "google/protobuf/compiler/objectivec",
|
include_prefix = "google/protobuf/compiler/objectivec",
|
||||||
visibility = ["//src/google/protobuf/compiler:__pkg__"],
|
visibility = [
|
||||||
|
"//pkg:__pkg__",
|
||||||
|
"//src/google/protobuf/compiler:__pkg__",
|
||||||
|
],
|
||||||
deps = [
|
deps = [
|
||||||
"//:protobuf",
|
"//:protobuf",
|
||||||
"//src/google/protobuf/compiler:code_generator",
|
"//src/google/protobuf/compiler:code_generator",
|
||||||
|
@ -643,11 +643,10 @@ bool Parser::Parse(io::Tokenizer* input, FileDescriptorProto* file) {
|
|||||||
// Store the syntax into the file.
|
// Store the syntax into the file.
|
||||||
if (file != nullptr) file->set_syntax(syntax_identifier_);
|
if (file != nullptr) file->set_syntax(syntax_identifier_);
|
||||||
} else if (!stop_after_syntax_identifier_) {
|
} else if (!stop_after_syntax_identifier_) {
|
||||||
AddWarning(
|
GOOGLE_LOG(WARNING) << "No syntax specified for the proto file: " << file->name()
|
||||||
"No syntax specified. Please use 'syntax = \"proto2\";' or "
|
<< ". Please use 'syntax = \"proto2\";' "
|
||||||
"'syntax = \"proto3\";' to specify a syntax version. "
|
<< "or 'syntax = \"proto3\";' to specify a syntax "
|
||||||
"(Defaulted to proto2 syntax.)"
|
<< "version. (Defaulted to proto2 syntax.)";
|
||||||
);
|
|
||||||
syntax_identifier_ = "proto2";
|
syntax_identifier_ = "proto2";
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1020,8 +1019,7 @@ bool Parser::ParseMessageFieldNoLabel(
|
|||||||
location.RecordLegacyLocation(field, DescriptorPool::ErrorCollector::NAME);
|
location.RecordLegacyLocation(field, DescriptorPool::ErrorCollector::NAME);
|
||||||
DO(ConsumeIdentifier(field->mutable_name(), "Expected field name."));
|
DO(ConsumeIdentifier(field->mutable_name(), "Expected field name."));
|
||||||
|
|
||||||
if (field->type() != FieldDescriptorProto::TYPE_GROUP &&
|
if (!IsLowerUnderscore(field->name())) {
|
||||||
!IsLowerUnderscore(field->name())) {
|
|
||||||
AddWarning(
|
AddWarning(
|
||||||
"Field name should be lowercase. Found: " + field->name() +
|
"Field name should be lowercase. Found: " + field->name() +
|
||||||
". See: https://developers.google.com/protocol-buffers/docs/style");
|
". See: https://developers.google.com/protocol-buffers/docs/style");
|
||||||
|
@ -221,8 +221,9 @@ TEST_F(ParserTest, StopAfterSyntaxIdentifierWithErrors) {
|
|||||||
TEST_F(ParserTest, WarnIfSyntaxIdentifierOmmitted) {
|
TEST_F(ParserTest, WarnIfSyntaxIdentifierOmmitted) {
|
||||||
SetupParser("message A {}");
|
SetupParser("message A {}");
|
||||||
FileDescriptorProto file;
|
FileDescriptorProto file;
|
||||||
|
CaptureTestStderr();
|
||||||
EXPECT_TRUE(parser_->Parse(input_.get(), &file));
|
EXPECT_TRUE(parser_->Parse(input_.get(), &file));
|
||||||
EXPECT_TRUE(error_collector_.warning_.find("No syntax specified") !=
|
EXPECT_TRUE(GetCapturedTestStderr().find("No syntax specified") !=
|
||||||
std::string::npos);
|
std::string::npos);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -12,7 +12,10 @@ cc_library(
|
|||||||
hdrs = ["php_generator.h"],
|
hdrs = ["php_generator.h"],
|
||||||
copts = COPTS,
|
copts = COPTS,
|
||||||
include_prefix = "google/protobuf/compiler/php",
|
include_prefix = "google/protobuf/compiler/php",
|
||||||
visibility = ["//src/google/protobuf/compiler:__pkg__"],
|
visibility = [
|
||||||
|
"//pkg:__pkg__",
|
||||||
|
"//src/google/protobuf/compiler:__pkg__",
|
||||||
|
],
|
||||||
deps = [
|
deps = [
|
||||||
"//:protobuf",
|
"//:protobuf",
|
||||||
"//src/google/protobuf/compiler:code_generator",
|
"//src/google/protobuf/compiler:code_generator",
|
||||||
|
@ -20,7 +20,10 @@ cc_library(
|
|||||||
],
|
],
|
||||||
copts = COPTS,
|
copts = COPTS,
|
||||||
include_prefix = "google/protobuf/compiler/python",
|
include_prefix = "google/protobuf/compiler/python",
|
||||||
visibility = ["//src/google/protobuf/compiler:__pkg__"],
|
visibility = [
|
||||||
|
"//pkg:__pkg__",
|
||||||
|
"//src/google/protobuf/compiler:__pkg__",
|
||||||
|
],
|
||||||
deps = [
|
deps = [
|
||||||
"//:protobuf",
|
"//:protobuf",
|
||||||
"//src/google/protobuf/compiler:code_generator",
|
"//src/google/protobuf/compiler:code_generator",
|
||||||
|
@ -12,7 +12,10 @@ cc_library(
|
|||||||
hdrs = ["ruby_generator.h"],
|
hdrs = ["ruby_generator.h"],
|
||||||
copts = COPTS,
|
copts = COPTS,
|
||||||
include_prefix = "google/protobuf/compiler/ruby",
|
include_prefix = "google/protobuf/compiler/ruby",
|
||||||
visibility = ["//src/google/protobuf/compiler:__pkg__"],
|
visibility = [
|
||||||
|
"//pkg:__pkg__",
|
||||||
|
"//src/google/protobuf/compiler:__pkg__",
|
||||||
|
],
|
||||||
deps = [
|
deps = [
|
||||||
"//:protobuf",
|
"//:protobuf",
|
||||||
"//src/google/protobuf/compiler:code_generator",
|
"//src/google/protobuf/compiler:code_generator",
|
||||||
|
@ -1261,8 +1261,6 @@ TEST(GeneratedMessageReflectionTest, UsageErrors) {
|
|||||||
const Reflection* reflection = message.GetReflection();
|
const Reflection* reflection = message.GetReflection();
|
||||||
const Descriptor* descriptor = message.GetDescriptor();
|
const Descriptor* descriptor = message.GetDescriptor();
|
||||||
|
|
||||||
#define f(NAME) descriptor->FindFieldByName(NAME)
|
|
||||||
|
|
||||||
// Testing every single failure mode would be too much work. Let's just
|
// Testing every single failure mode would be too much work. Let's just
|
||||||
// check a few.
|
// check a few.
|
||||||
EXPECT_DEATH(
|
EXPECT_DEATH(
|
||||||
@ -1301,8 +1299,6 @@ TEST(GeneratedMessageReflectionTest, UsageErrors) {
|
|||||||
" Message type: protobuf_unittest.TestAllTypes\n"
|
" Message type: protobuf_unittest.TestAllTypes\n"
|
||||||
" Field : protobuf_unittest.ForeignMessage.c\n"
|
" Field : protobuf_unittest.ForeignMessage.c\n"
|
||||||
" Problem : Field does not match message type.");
|
" Problem : Field does not match message type.");
|
||||||
|
|
||||||
#undef f
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#endif // PROTOBUF_HAS_DEATH_TEST
|
#endif // PROTOBUF_HAS_DEATH_TEST
|
||||||
|
@ -187,48 +187,48 @@ static_assert(kFmtShift + kFmtBits <= 16, "too many bits");
|
|||||||
// Convenience aliases (16 bits, with format):
|
// Convenience aliases (16 bits, with format):
|
||||||
enum FieldType : uint16_t {
|
enum FieldType : uint16_t {
|
||||||
// Numeric types:
|
// Numeric types:
|
||||||
kBool = kFkVarint | kRep8Bits,
|
kBool = 0 | kFkVarint | kRep8Bits,
|
||||||
|
|
||||||
kFixed32 = kFkFixed | kRep32Bits | kFmtUnsigned,
|
kFixed32 = 0 | kFkFixed | kRep32Bits | kFmtUnsigned,
|
||||||
kUInt32 = kFkVarint | kRep32Bits | kFmtUnsigned,
|
kUInt32 = 0 | kFkVarint | kRep32Bits | kFmtUnsigned,
|
||||||
kSFixed32 = kFkFixed | kRep32Bits | kFmtSigned,
|
kSFixed32 = 0 | kFkFixed | kRep32Bits | kFmtSigned,
|
||||||
kInt32 = kFkVarint | kRep32Bits | kFmtSigned,
|
kInt32 = 0 | kFkVarint | kRep32Bits | kFmtSigned,
|
||||||
kSInt32 = kFkVarint | kRep32Bits | kFmtSigned | kTvZigZag,
|
kSInt32 = 0 | kFkVarint | kRep32Bits | kFmtSigned | kTvZigZag,
|
||||||
kFloat = kFkFixed | kRep32Bits | kFmtFloating,
|
kFloat = 0 | kFkFixed | kRep32Bits | kFmtFloating,
|
||||||
kEnum = kFkVarint | kRep32Bits | kFmtEnum | kTvEnum,
|
kEnum = 0 | kFkVarint | kRep32Bits | kFmtEnum | kTvEnum,
|
||||||
kEnumRange = kFkVarint | kRep32Bits | kFmtEnum | kTvRange,
|
kEnumRange = 0 | kFkVarint | kRep32Bits | kFmtEnum | kTvRange,
|
||||||
kOpenEnum = kFkVarint | kRep32Bits | kFmtEnum,
|
kOpenEnum = 0 | kFkVarint | kRep32Bits | kFmtEnum,
|
||||||
|
|
||||||
kFixed64 = kFkFixed | kRep64Bits | kFmtUnsigned,
|
kFixed64 = 0 | kFkFixed | kRep64Bits | kFmtUnsigned,
|
||||||
kUInt64 = kFkVarint | kRep64Bits | kFmtUnsigned,
|
kUInt64 = 0 | kFkVarint | kRep64Bits | kFmtUnsigned,
|
||||||
kSFixed64 = kFkFixed | kRep64Bits | kFmtSigned,
|
kSFixed64 = 0 | kFkFixed | kRep64Bits | kFmtSigned,
|
||||||
kInt64 = kFkVarint | kRep64Bits | kFmtSigned,
|
kInt64 = 0 | kFkVarint | kRep64Bits | kFmtSigned,
|
||||||
kSInt64 = kFkVarint | kRep64Bits | kFmtSigned | kTvZigZag,
|
kSInt64 = 0 | kFkVarint | kRep64Bits | kFmtSigned | kTvZigZag,
|
||||||
kDouble = kFkFixed | kRep64Bits | kFmtFloating,
|
kDouble = 0 | kFkFixed | kRep64Bits | kFmtFloating,
|
||||||
|
|
||||||
kPackedBool = kFkPackedVarint | kRep8Bits,
|
kPackedBool = 0 | kFkPackedVarint | kRep8Bits,
|
||||||
|
|
||||||
kPackedFixed32 = kFkPackedFixed | kRep32Bits | kFmtUnsigned,
|
kPackedFixed32 = 0 | kFkPackedFixed | kRep32Bits | kFmtUnsigned,
|
||||||
kPackedUInt32 = kFkPackedVarint | kRep32Bits | kFmtUnsigned,
|
kPackedUInt32 = 0 | kFkPackedVarint | kRep32Bits | kFmtUnsigned,
|
||||||
kPackedSFixed32 = kFkPackedFixed | kRep32Bits | kFmtSigned,
|
kPackedSFixed32 = 0 | kFkPackedFixed | kRep32Bits | kFmtSigned,
|
||||||
kPackedInt32 = kFkPackedVarint | kRep32Bits | kFmtSigned,
|
kPackedInt32 = 0 | kFkPackedVarint | kRep32Bits | kFmtSigned,
|
||||||
kPackedSInt32 = kFkPackedVarint | kRep32Bits | kFmtSigned | kTvZigZag,
|
kPackedSInt32 = 0 | kFkPackedVarint | kRep32Bits | kFmtSigned | kTvZigZag,
|
||||||
kPackedFloat = kFkPackedFixed | kRep32Bits | kFmtFloating,
|
kPackedFloat = 0 | kFkPackedFixed | kRep32Bits | kFmtFloating,
|
||||||
kPackedEnum = kFkPackedVarint | kRep32Bits | kFmtEnum | kTvEnum,
|
kPackedEnum = 0 | kFkPackedVarint | kRep32Bits | kFmtEnum | kTvEnum,
|
||||||
kPackedEnumRange = kFkPackedVarint | kRep32Bits | kFmtEnum | kTvRange,
|
kPackedEnumRange = 0 | kFkPackedVarint | kRep32Bits | kFmtEnum | kTvRange,
|
||||||
kPackedOpenEnum = kFkPackedVarint | kRep32Bits | kFmtEnum,
|
kPackedOpenEnum = 0 | kFkPackedVarint | kRep32Bits | kFmtEnum,
|
||||||
|
|
||||||
kPackedFixed64 = kFkPackedFixed | kRep64Bits | kFmtUnsigned,
|
kPackedFixed64 = 0 | kFkPackedFixed | kRep64Bits | kFmtUnsigned,
|
||||||
kPackedUInt64 = kFkPackedVarint | kRep64Bits | kFmtUnsigned,
|
kPackedUInt64 = 0 | kFkPackedVarint | kRep64Bits | kFmtUnsigned,
|
||||||
kPackedSFixed64 = kFkPackedFixed | kRep64Bits | kFmtSigned,
|
kPackedSFixed64 = 0 | kFkPackedFixed | kRep64Bits | kFmtSigned,
|
||||||
kPackedInt64 = kFkPackedVarint | kRep64Bits | kFmtSigned,
|
kPackedInt64 = 0 | kFkPackedVarint | kRep64Bits | kFmtSigned,
|
||||||
kPackedSInt64 = kFkPackedVarint | kRep64Bits | kFmtSigned | kTvZigZag,
|
kPackedSInt64 = 0 | kFkPackedVarint | kRep64Bits | kFmtSigned | kTvZigZag,
|
||||||
kPackedDouble = kFkPackedFixed | kRep64Bits | kFmtFloating,
|
kPackedDouble = 0 | kFkPackedFixed | kRep64Bits | kFmtFloating,
|
||||||
|
|
||||||
// String types:
|
// String types:
|
||||||
kBytes = kFkString | kFmtArray,
|
kBytes = 0 | kFkString | kFmtArray,
|
||||||
kRawString = kFkString | kFmtUtf8 | kTvUtf8Debug,
|
kRawString = 0 | kFkString | kFmtUtf8 | kTvUtf8Debug,
|
||||||
kUtf8String = kFkString | kFmtUtf8 | kTvUtf8,
|
kUtf8String = 0 | kFkString | kFmtUtf8 | kTvUtf8,
|
||||||
|
|
||||||
// Message types:
|
// Message types:
|
||||||
kMessage = kFkMessage,
|
kMessage = kFkMessage,
|
||||||
@ -236,7 +236,6 @@ enum FieldType : uint16_t {
|
|||||||
// Map types:
|
// Map types:
|
||||||
kMap = kFkMap,
|
kMap = kFkMap,
|
||||||
};
|
};
|
||||||
|
|
||||||
// clang-format on
|
// clang-format on
|
||||||
} // namespace field_layout
|
} // namespace field_layout
|
||||||
|
|
||||||
|
@ -1364,7 +1364,8 @@ class Map {
|
|||||||
template <class InputIt>
|
template <class InputIt>
|
||||||
void insert(InputIt first, InputIt last) {
|
void insert(InputIt first, InputIt last) {
|
||||||
for (; first != last; ++first) {
|
for (; first != last; ++first) {
|
||||||
try_emplace(first->first, first->second);
|
auto&& pair = *first;
|
||||||
|
try_emplace(pair.first, pair.second);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
void insert(std::initializer_list<value_type> values) {
|
void insert(std::initializer_list<value_type> values) {
|
||||||
|
@ -193,7 +193,7 @@
|
|||||||
#ifdef PROTOBUF_VERSION_SUFFIX
|
#ifdef PROTOBUF_VERSION_SUFFIX
|
||||||
#error PROTOBUF_VERSION_SUFFIX was previously defined
|
#error PROTOBUF_VERSION_SUFFIX was previously defined
|
||||||
#endif
|
#endif
|
||||||
#define PROTOBUF_VERSION_SUFFIX "-rc1"
|
#define PROTOBUF_VERSION_SUFFIX "-rc2"
|
||||||
|
|
||||||
#if defined(PROTOBUF_NAMESPACE) || defined(PROTOBUF_NAMESPACE_ID)
|
#if defined(PROTOBUF_NAMESPACE) || defined(PROTOBUF_NAMESPACE_ID)
|
||||||
#error PROTOBUF_NAMESPACE or PROTOBUF_NAMESPACE_ID was previously defined
|
#error PROTOBUF_NAMESPACE or PROTOBUF_NAMESPACE_ID was previously defined
|
||||||
|
@ -246,7 +246,7 @@ class RepeatedField final {
|
|||||||
Element* mutable_data();
|
Element* mutable_data();
|
||||||
const Element* data() const;
|
const Element* data() const;
|
||||||
|
|
||||||
// Swaps entire contents with "other". If they are separate arenas then,
|
// Swaps entire contents with "other". If they are separate arenas, then
|
||||||
// copies data between each other.
|
// copies data between each other.
|
||||||
void Swap(RepeatedField* other);
|
void Swap(RepeatedField* other);
|
||||||
|
|
||||||
@ -313,8 +313,14 @@ class RepeatedField final {
|
|||||||
iterator erase(const_iterator first, const_iterator last);
|
iterator erase(const_iterator first, const_iterator last);
|
||||||
|
|
||||||
// Gets the Arena on which this RepeatedField stores its elements.
|
// Gets the Arena on which this RepeatedField stores its elements.
|
||||||
|
// Message-owned arenas are not exposed by this method, which will return
|
||||||
|
// nullptr for messages owned by MOAs.
|
||||||
inline Arena* GetArena() const {
|
inline Arena* GetArena() const {
|
||||||
return GetOwningArena();
|
Arena* arena = GetOwningArena();
|
||||||
|
if (arena == nullptr || arena->InternalIsMessageOwnedArena()) {
|
||||||
|
return nullptr;
|
||||||
|
}
|
||||||
|
return arena;
|
||||||
}
|
}
|
||||||
|
|
||||||
// For internal use only.
|
// For internal use only.
|
||||||
|
@ -612,7 +612,16 @@ class PROTOBUF_EXPORT RepeatedPtrFieldBase {
|
|||||||
temp.Destroy<TypeHandler>(); // Frees rep_ if `other` had no arena.
|
temp.Destroy<TypeHandler>(); // Frees rep_ if `other` had no arena.
|
||||||
}
|
}
|
||||||
|
|
||||||
inline Arena* GetArena() const { return arena_; }
|
// Gets the Arena on which this RepeatedPtrField stores its elements.
|
||||||
|
// Message-owned arenas are not exposed by this method, which will return
|
||||||
|
// nullptr for messages owned by MOAs.
|
||||||
|
inline Arena* GetArena() const {
|
||||||
|
Arena* arena = GetOwningArena();
|
||||||
|
if (arena == nullptr || arena->InternalIsMessageOwnedArena()) {
|
||||||
|
return nullptr;
|
||||||
|
}
|
||||||
|
return arena;
|
||||||
|
}
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
inline Arena* GetOwningArena() const { return arena_; }
|
inline Arena* GetOwningArena() const { return arena_; }
|
||||||
@ -762,7 +771,7 @@ class GenericTypeHandler {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
static inline Arena* GetOwningArena(GenericType* value) {
|
static inline Arena* GetOwningArena(GenericType* value) {
|
||||||
return Arena::GetOwningArena<Type>(value);
|
return Arena::InternalGetOwningArena(value);
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline void Clear(GenericType* value) { value->Clear(); }
|
static inline void Clear(GenericType* value) { value->Clear(); }
|
||||||
|
@ -178,7 +178,7 @@ void NullLogHandler(LogLevel /* level */, const char* /* filename */,
|
|||||||
}
|
}
|
||||||
|
|
||||||
static LogHandler* log_handler_ = &DefaultLogHandler;
|
static LogHandler* log_handler_ = &DefaultLogHandler;
|
||||||
static std::atomic<int> log_silencer_count_ = ATOMIC_VAR_INIT(0);
|
static std::atomic<int> log_silencer_count_{0};
|
||||||
|
|
||||||
LogMessage& LogMessage::operator<<(const std::string& value) {
|
LogMessage& LogMessage::operator<<(const std::string& value) {
|
||||||
message_ += value;
|
message_ += value;
|
||||||
|
@ -85,7 +85,7 @@ namespace internal {
|
|||||||
#define GOOGLE_PROTOBUF_VERSION 3021000
|
#define GOOGLE_PROTOBUF_VERSION 3021000
|
||||||
|
|
||||||
// A suffix string for alpha, beta or rc releases. Empty for stable releases.
|
// A suffix string for alpha, beta or rc releases. Empty for stable releases.
|
||||||
#define GOOGLE_PROTOBUF_VERSION_SUFFIX "-rc1"
|
#define GOOGLE_PROTOBUF_VERSION_SUFFIX "-rc2"
|
||||||
|
|
||||||
// The minimum header version which works with the current version of
|
// The minimum header version which works with the current version of
|
||||||
// the library. This constant should only be used by protoc's C++ code
|
// the library. This constant should only be used by protoc's C++ code
|
||||||
|
@ -77,9 +77,6 @@ class PROTOBUF_EXPORT ProtoStreamObjectSource : public ObjectSource {
|
|||||||
public:
|
public:
|
||||||
|
|
||||||
struct RenderOptions {
|
struct RenderOptions {
|
||||||
RenderOptions() = default;
|
|
||||||
RenderOptions(const RenderOptions&) = default;
|
|
||||||
|
|
||||||
// Sets whether or not to use lowerCamelCase casing for enum values. If set
|
// Sets whether or not to use lowerCamelCase casing for enum values. If set
|
||||||
// to false, enum values are output without any case conversions.
|
// to false, enum values are output without any case conversions.
|
||||||
//
|
//
|
||||||
|
@ -165,14 +165,14 @@ int64_t RoundTowardZero(int64_t value, int64_t divider) {
|
|||||||
// Actually define these static const integers. Required by C++ standard (but
|
// Actually define these static const integers. Required by C++ standard (but
|
||||||
// some compilers don't like it).
|
// some compilers don't like it).
|
||||||
#ifndef _MSC_VER
|
#ifndef _MSC_VER
|
||||||
const int64_t TimeUtil::kTimestampMinSeconds;
|
constexpr int64_t TimeUtil::kTimestampMinSeconds;
|
||||||
const int64_t TimeUtil::kTimestampMaxSeconds;
|
constexpr int64_t TimeUtil::kTimestampMaxSeconds;
|
||||||
const int32_t TimeUtil::kTimestampMinNanoseconds;
|
constexpr int32_t TimeUtil::kTimestampMinNanoseconds;
|
||||||
const int32_t TimeUtil::kTimestampMaxNanoseconds;
|
constexpr int32_t TimeUtil::kTimestampMaxNanoseconds;
|
||||||
const int64_t TimeUtil::kDurationMaxSeconds;
|
constexpr int64_t TimeUtil::kDurationMaxSeconds;
|
||||||
const int64_t TimeUtil::kDurationMinSeconds;
|
constexpr int64_t TimeUtil::kDurationMinSeconds;
|
||||||
const int32_t TimeUtil::kDurationMaxNanoseconds;
|
constexpr int32_t TimeUtil::kDurationMaxNanoseconds;
|
||||||
const int32_t TimeUtil::kDurationMinNanoseconds;
|
constexpr int32_t TimeUtil::kDurationMinNanoseconds;
|
||||||
#endif // !_MSC_VER
|
#endif // !_MSC_VER
|
||||||
|
|
||||||
std::string TimeUtil::ToString(const Timestamp& timestamp) {
|
std::string TimeUtil::ToString(const Timestamp& timestamp) {
|
||||||
|
Loading…
Reference in New Issue
Block a user