Down-integrate internal changes (#5467)
* Down-integrate internal changes * Update generated code for php, objc and csharp. * Add missing dependency in conformance_php
This commit is contained in:
parent
f93cf5a3cf
commit
cecba296b8
@ -21,7 +21,9 @@ add_custom_command(
|
||||
add_executable(conformance_test_runner
|
||||
${protobuf_source_dir}/conformance/conformance.pb.cc
|
||||
${protobuf_source_dir}/conformance/conformance_test.cc
|
||||
${protobuf_source_dir}/conformance/conformance_test_impl.cc
|
||||
${protobuf_source_dir}/conformance/binary_json_conformance_main.cc
|
||||
${protobuf_source_dir}/conformance/binary_json_conformance_suite.cc
|
||||
${protobuf_source_dir}/conformance/binary_json_conformance_suite.h
|
||||
${protobuf_source_dir}/conformance/conformance_test_runner.cc
|
||||
${protobuf_source_dir}/conformance/third_party/jsoncpp/json.h
|
||||
${protobuf_source_dir}/conformance/third_party/jsoncpp/jsoncpp.cpp
|
||||
|
@ -5,6 +5,7 @@ set(libprotobuf_lite_files
|
||||
${protobuf_source_dir}/src/google/protobuf/generated_message_table_driven_lite.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/generated_message_util.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/implicit_weak_message.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/parse_context.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/io/coded_stream.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/io/zero_copy_stream.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/io/zero_copy_stream_impl_lite.cc
|
||||
@ -30,6 +31,7 @@ set(libprotobuf_lite_includes
|
||||
${protobuf_source_dir}/src/google/protobuf/extension_set.h
|
||||
${protobuf_source_dir}/src/google/protobuf/generated_message_util.h
|
||||
${protobuf_source_dir}/src/google/protobuf/implicit_weak_message.h
|
||||
${protobuf_source_dir}/src/google/protobuf/parse_context.h
|
||||
${protobuf_source_dir}/src/google/protobuf/io/coded_stream.h
|
||||
${protobuf_source_dir}/src/google/protobuf/io/zero_copy_stream.h
|
||||
${protobuf_source_dir}/src/google/protobuf/io/zero_copy_stream_impl_lite.h
|
||||
|
@ -206,7 +206,9 @@ EXTRA_DIST = \
|
||||
|
||||
conformance_test_runner_LDADD = $(top_srcdir)/src/libprotobuf.la
|
||||
conformance_test_runner_SOURCES = conformance_test.h conformance_test.cc \
|
||||
conformance_test_impl.cc \
|
||||
binary_json_conformance_main.cc \
|
||||
binary_json_conformance_suite.h \
|
||||
binary_json_conformance_suite.cc \
|
||||
conformance_test_runner.cc \
|
||||
third_party/jsoncpp/json.h \
|
||||
third_party/jsoncpp/jsoncpp.cpp
|
||||
|
37
conformance/binary_json_conformance_main.cc
Normal file
37
conformance/binary_json_conformance_main.cc
Normal file
@ -0,0 +1,37 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
#include "binary_json_conformance_suite.h"
|
||||
#include "conformance_test.h"
|
||||
|
||||
int main(int argc, char *argv[]) {
|
||||
google::protobuf::BinaryAndJsonConformanceSuite suite;
|
||||
return google::protobuf::ForkPipeRunner::Run(argc, argv, &suite);
|
||||
}
|
@ -28,6 +28,7 @@
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
#include "binary_json_conformance_suite.h"
|
||||
#include "conformance_test.h"
|
||||
#include "third_party/jsoncpp/json.h"
|
||||
|
||||
@ -37,11 +38,13 @@
|
||||
#include <google/protobuf/stubs/common.h>
|
||||
#include <google/protobuf/stubs/strutil.h>
|
||||
#include <google/protobuf/text_format.h>
|
||||
#include <google/protobuf/util/json_util.h>
|
||||
#include <google/protobuf/util/type_resolver_util.h>
|
||||
#include <google/protobuf/wire_format_lite.h>
|
||||
|
||||
using conformance::ConformanceRequest;
|
||||
using conformance::ConformanceResponse;
|
||||
using conformance::WireFormat;
|
||||
using google::protobuf::Descriptor;
|
||||
using google::protobuf::FieldDescriptor;
|
||||
using google::protobuf::Message;
|
||||
@ -189,73 +192,83 @@ std::unique_ptr<Message> NewTestMessage(bool is_proto3) {
|
||||
namespace google {
|
||||
namespace protobuf {
|
||||
|
||||
class ConformanceTestSuiteImpl : public ConformanceTestSuite {
|
||||
public:
|
||||
ConformanceTestSuiteImpl() {}
|
||||
bool BinaryAndJsonConformanceSuite::ParseJsonResponse(
|
||||
const ConformanceResponse& response,
|
||||
Message* test_message) {
|
||||
string binary_protobuf;
|
||||
util::Status status =
|
||||
JsonToBinaryString(type_resolver_.get(), type_url_,
|
||||
response.json_payload(), &binary_protobuf);
|
||||
|
||||
private:
|
||||
void RunSuiteImpl();
|
||||
void RunValidJsonTest(const string& test_name,
|
||||
ConformanceLevel level,
|
||||
const string& input_json,
|
||||
const string& equivalent_text_format);
|
||||
void RunValidJsonTestWithProtobufInput(
|
||||
const string& test_name,
|
||||
ConformanceLevel level,
|
||||
const protobuf_test_messages::proto3::TestAllTypesProto3& input,
|
||||
const string& equivalent_text_format);
|
||||
void RunValidJsonIgnoreUnknownTest(
|
||||
const string& test_name, ConformanceLevel level, const string& input_json,
|
||||
const string& equivalent_text_format);
|
||||
void RunValidProtobufTest(const string& test_name, ConformanceLevel level,
|
||||
const string& input_protobuf,
|
||||
const string& equivalent_text_format,
|
||||
bool is_proto3);
|
||||
void RunValidBinaryProtobufTest(const string& test_name,
|
||||
ConformanceLevel level,
|
||||
const string& input_protobuf,
|
||||
bool is_proto3);
|
||||
void RunValidProtobufTestWithMessage(
|
||||
const string& test_name, ConformanceLevel level,
|
||||
const Message *input,
|
||||
const string& equivalent_text_format,
|
||||
bool is_proto3);
|
||||
if (!status.ok()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
typedef std::function<bool(const Json::Value&)> Validator;
|
||||
void RunValidJsonTestWithValidator(const string& test_name,
|
||||
ConformanceLevel level,
|
||||
const string& input_json,
|
||||
const Validator& validator);
|
||||
void ExpectParseFailureForJson(const string& test_name,
|
||||
ConformanceLevel level,
|
||||
const string& input_json);
|
||||
void ExpectSerializeFailureForJson(const string& test_name,
|
||||
ConformanceLevel level,
|
||||
const string& text_format);
|
||||
void ExpectParseFailureForProtoWithProtoVersion (const string& proto,
|
||||
const string& test_name,
|
||||
ConformanceLevel level,
|
||||
bool is_proto3);
|
||||
void ExpectParseFailureForProto(const std::string& proto,
|
||||
const std::string& test_name,
|
||||
ConformanceLevel level);
|
||||
void ExpectHardParseFailureForProto(const std::string& proto,
|
||||
const std::string& test_name,
|
||||
ConformanceLevel level);
|
||||
void TestPrematureEOFForType(google::protobuf::FieldDescriptor::Type type);
|
||||
void TestIllegalTags();
|
||||
template <class MessageType>
|
||||
void TestOneofMessage (MessageType &message,
|
||||
bool is_proto3);
|
||||
template <class MessageType>
|
||||
void TestUnknownMessage (MessageType &message,
|
||||
bool is_proto3);
|
||||
void TestValidDataForType(
|
||||
google::protobuf::FieldDescriptor::Type,
|
||||
std::vector<std::pair<std::string, std::string>> values);
|
||||
};
|
||||
if (!test_message->ParseFromString(binary_protobuf)) {
|
||||
GOOGLE_LOG(FATAL)
|
||||
<< "INTERNAL ERROR: internal JSON->protobuf transcode "
|
||||
<< "yielded unparseable proto.";
|
||||
return false;
|
||||
}
|
||||
|
||||
void ConformanceTestSuiteImpl::ExpectParseFailureForProtoWithProtoVersion (
|
||||
return true;
|
||||
}
|
||||
|
||||
bool BinaryAndJsonConformanceSuite::ParseResponse(
|
||||
const ConformanceResponse& response,
|
||||
const ConformanceRequestSetting& setting,
|
||||
Message* test_message) {
|
||||
const ConformanceRequest& request = setting.GetRequest();
|
||||
WireFormat requested_output = request.requested_output_format();
|
||||
const string& test_name = setting.GetTestName();
|
||||
ConformanceLevel level = setting.GetLevel();
|
||||
|
||||
switch (response.result_case()) {
|
||||
case ConformanceResponse::kProtobufPayload: {
|
||||
if (requested_output != conformance::PROTOBUF) {
|
||||
ReportFailure(
|
||||
test_name, level, request, response,
|
||||
StrCat("Test was asked for ", WireFormatToString(requested_output),
|
||||
" output but provided PROTOBUF instead.").c_str());
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!test_message->ParseFromString(response.protobuf_payload())) {
|
||||
ReportFailure(test_name, level, request, response,
|
||||
"Protobuf output we received from test was unparseable.");
|
||||
return false;
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
case ConformanceResponse::kJsonPayload: {
|
||||
if (requested_output != conformance::JSON) {
|
||||
ReportFailure(
|
||||
test_name, level, request, response,
|
||||
StrCat("Test was asked for ", WireFormatToString(requested_output),
|
||||
" output but provided JSON instead.").c_str());
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!ParseJsonResponse(response, test_message)) {
|
||||
ReportFailure(test_name, level, request, response,
|
||||
"JSON output we received from test was unparseable.");
|
||||
return false;
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
GOOGLE_LOG(FATAL) << test_name << ": unknown payload type: "
|
||||
<< response.result_case();
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void BinaryAndJsonConformanceSuite::ExpectParseFailureForProtoWithProtoVersion (
|
||||
const string& proto, const string& test_name, ConformanceLevel level,
|
||||
bool is_proto3) {
|
||||
std::unique_ptr<Message> prototype = NewTestMessage(is_proto3);
|
||||
@ -285,7 +298,7 @@ void ConformanceTestSuiteImpl::ExpectParseFailureForProtoWithProtoVersion (
|
||||
}
|
||||
|
||||
// Expect that this precise protobuf will cause a parse error.
|
||||
void ConformanceTestSuiteImpl::ExpectParseFailureForProto(
|
||||
void BinaryAndJsonConformanceSuite::ExpectParseFailureForProto(
|
||||
const string& proto, const string& test_name, ConformanceLevel level) {
|
||||
ExpectParseFailureForProtoWithProtoVersion(proto, test_name, level, true);
|
||||
ExpectParseFailureForProtoWithProtoVersion(proto, test_name, level, false);
|
||||
@ -296,12 +309,12 @@ void ConformanceTestSuiteImpl::ExpectParseFailureForProto(
|
||||
// data verbatim and once with this data followed by some valid data.
|
||||
//
|
||||
// TODO(haberman): implement the second of these.
|
||||
void ConformanceTestSuiteImpl::ExpectHardParseFailureForProto(
|
||||
void BinaryAndJsonConformanceSuite::ExpectHardParseFailureForProto(
|
||||
const string& proto, const string& test_name, ConformanceLevel level) {
|
||||
return ExpectParseFailureForProto(proto, test_name, level);
|
||||
}
|
||||
|
||||
void ConformanceTestSuiteImpl::RunValidJsonTest(
|
||||
void BinaryAndJsonConformanceSuite::RunValidJsonTest(
|
||||
const string& test_name, ConformanceLevel level, const string& input_json,
|
||||
const string& equivalent_text_format) {
|
||||
TestAllTypesProto3 prototype;
|
||||
@ -317,7 +330,7 @@ void ConformanceTestSuiteImpl::RunValidJsonTest(
|
||||
RunValidInputTest(setting2, equivalent_text_format);
|
||||
}
|
||||
|
||||
void ConformanceTestSuiteImpl::RunValidJsonTestWithProtobufInput(
|
||||
void BinaryAndJsonConformanceSuite::RunValidJsonTestWithProtobufInput(
|
||||
const string& test_name, ConformanceLevel level, const TestAllTypesProto3& input,
|
||||
const string& equivalent_text_format) {
|
||||
ConformanceRequestSetting setting(
|
||||
@ -327,7 +340,7 @@ void ConformanceTestSuiteImpl::RunValidJsonTestWithProtobufInput(
|
||||
RunValidInputTest(setting, equivalent_text_format);
|
||||
}
|
||||
|
||||
void ConformanceTestSuiteImpl::RunValidJsonIgnoreUnknownTest(
|
||||
void BinaryAndJsonConformanceSuite::RunValidJsonIgnoreUnknownTest(
|
||||
const string& test_name, ConformanceLevel level, const string& input_json,
|
||||
const string& equivalent_text_format) {
|
||||
TestAllTypesProto3 prototype;
|
||||
@ -338,7 +351,7 @@ void ConformanceTestSuiteImpl::RunValidJsonIgnoreUnknownTest(
|
||||
RunValidInputTest(setting, equivalent_text_format);
|
||||
}
|
||||
|
||||
void ConformanceTestSuiteImpl::RunValidProtobufTest(
|
||||
void BinaryAndJsonConformanceSuite::RunValidProtobufTest(
|
||||
const string& test_name, ConformanceLevel level,
|
||||
const string& input_protobuf, const string& equivalent_text_format,
|
||||
bool is_proto3) {
|
||||
@ -359,7 +372,7 @@ void ConformanceTestSuiteImpl::RunValidProtobufTest(
|
||||
}
|
||||
}
|
||||
|
||||
void ConformanceTestSuiteImpl::RunValidBinaryProtobufTest(
|
||||
void BinaryAndJsonConformanceSuite::RunValidBinaryProtobufTest(
|
||||
const string& test_name, ConformanceLevel level,
|
||||
const string& input_protobuf, bool is_proto3) {
|
||||
std::unique_ptr<Message> prototype = NewTestMessage(is_proto3);
|
||||
@ -370,7 +383,7 @@ void ConformanceTestSuiteImpl::RunValidBinaryProtobufTest(
|
||||
RunValidBinaryInputTest(setting, input_protobuf);
|
||||
}
|
||||
|
||||
void ConformanceTestSuiteImpl::RunValidProtobufTestWithMessage(
|
||||
void BinaryAndJsonConformanceSuite::RunValidProtobufTestWithMessage(
|
||||
const string& test_name, ConformanceLevel level, const Message *input,
|
||||
const string& equivalent_text_format, bool is_proto3) {
|
||||
RunValidProtobufTest(test_name, level, input->SerializeAsString(),
|
||||
@ -382,7 +395,7 @@ void ConformanceTestSuiteImpl::RunValidProtobufTestWithMessage(
|
||||
// numbers while the parser is allowed to accept them as JSON strings). This
|
||||
// method allows strict checking on a proto3 JSON serializer by inspecting
|
||||
// the JSON output directly.
|
||||
void ConformanceTestSuiteImpl::RunValidJsonTestWithValidator(
|
||||
void BinaryAndJsonConformanceSuite::RunValidJsonTestWithValidator(
|
||||
const string& test_name, ConformanceLevel level, const string& input_json,
|
||||
const Validator& validator) {
|
||||
TestAllTypesProto3 prototype;
|
||||
@ -426,7 +439,7 @@ void ConformanceTestSuiteImpl::RunValidJsonTestWithValidator(
|
||||
ReportSuccess(effective_test_name);
|
||||
}
|
||||
|
||||
void ConformanceTestSuiteImpl::ExpectParseFailureForJson(
|
||||
void BinaryAndJsonConformanceSuite::ExpectParseFailureForJson(
|
||||
const string& test_name, ConformanceLevel level, const string& input_json) {
|
||||
TestAllTypesProto3 prototype;
|
||||
// We don't expect output, but if the program erroneously accepts the protobuf
|
||||
@ -452,7 +465,7 @@ void ConformanceTestSuiteImpl::ExpectParseFailureForJson(
|
||||
}
|
||||
}
|
||||
|
||||
void ConformanceTestSuiteImpl::ExpectSerializeFailureForJson(
|
||||
void BinaryAndJsonConformanceSuite::ExpectSerializeFailureForJson(
|
||||
const string& test_name, ConformanceLevel level, const string& text_format) {
|
||||
TestAllTypesProto3 payload_message;
|
||||
GOOGLE_CHECK(
|
||||
@ -482,7 +495,7 @@ void ConformanceTestSuiteImpl::ExpectSerializeFailureForJson(
|
||||
}
|
||||
|
||||
//TODO: proto2?
|
||||
void ConformanceTestSuiteImpl::TestPrematureEOFForType(
|
||||
void BinaryAndJsonConformanceSuite::TestPrematureEOFForType(
|
||||
FieldDescriptor::Type type) {
|
||||
// Incomplete values for each wire type.
|
||||
static const string incompletes[6] = {
|
||||
@ -570,7 +583,7 @@ void ConformanceTestSuiteImpl::TestPrematureEOFForType(
|
||||
}
|
||||
}
|
||||
|
||||
void ConformanceTestSuiteImpl::TestValidDataForType(
|
||||
void BinaryAndJsonConformanceSuite::TestValidDataForType(
|
||||
FieldDescriptor::Type type,
|
||||
std::vector<std::pair<std::string, std::string>> values) {
|
||||
for (int is_proto3 = 0; is_proto3 < 2; is_proto3++) {
|
||||
@ -606,7 +619,7 @@ void ConformanceTestSuiteImpl::TestValidDataForType(
|
||||
}
|
||||
|
||||
// TODO: proto2?
|
||||
void ConformanceTestSuiteImpl::TestIllegalTags() {
|
||||
void BinaryAndJsonConformanceSuite::TestIllegalTags() {
|
||||
// field num 0 is illegal
|
||||
string nullfield[] = {
|
||||
"\1DEADBEEF",
|
||||
@ -621,7 +634,7 @@ void ConformanceTestSuiteImpl::TestIllegalTags() {
|
||||
}
|
||||
}
|
||||
template <class MessageType>
|
||||
void ConformanceTestSuiteImpl::TestOneofMessage (
|
||||
void BinaryAndJsonConformanceSuite::TestOneofMessage (
|
||||
MessageType &message, bool is_proto3) {
|
||||
message.set_oneof_uint32(0);
|
||||
RunValidProtobufTestWithMessage(
|
||||
@ -660,14 +673,14 @@ void ConformanceTestSuiteImpl::TestOneofMessage (
|
||||
}
|
||||
|
||||
template <class MessageType>
|
||||
void ConformanceTestSuiteImpl::TestUnknownMessage(
|
||||
void BinaryAndJsonConformanceSuite::TestUnknownMessage(
|
||||
MessageType& message, bool is_proto3) {
|
||||
message.ParseFromString("\xA8\x1F\x01");
|
||||
RunValidBinaryProtobufTest("UnknownVarint", REQUIRED,
|
||||
message.SerializeAsString(), is_proto3);
|
||||
}
|
||||
|
||||
void ConformanceTestSuiteImpl::RunSuiteImpl() {
|
||||
void BinaryAndJsonConformanceSuite::RunSuiteImpl() {
|
||||
type_resolver_.reset(NewTypeResolverForDescriptorPool(
|
||||
kTypeUrlPrefix, DescriptorPool::generated_pool()));
|
||||
type_url_ = GetTypeUrl(TestAllTypesProto3::descriptor());
|
||||
@ -2360,8 +2373,3 @@ void ConformanceTestSuiteImpl::RunSuiteImpl() {
|
||||
|
||||
} // namespace protobuf
|
||||
} // namespace google
|
||||
|
||||
int main(int argc, char *argv[]) {
|
||||
google::protobuf::ConformanceTestSuiteImpl suite;
|
||||
return google::protobuf::ForkPipeRunner::Run(argc, argv, &suite);
|
||||
}
|
121
conformance/binary_json_conformance_suite.h
Normal file
121
conformance/binary_json_conformance_suite.h
Normal file
@ -0,0 +1,121 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
#ifndef CONFORMANCE_BINARY_JSON_CONFORMANCE_SUITE_H
|
||||
#define CONFORMANCE_BINARY_JSON_CONFORMANCE_SUITE_H
|
||||
|
||||
#include "conformance_test.h"
|
||||
#include "third_party/jsoncpp/json.h"
|
||||
|
||||
namespace google {
|
||||
namespace protobuf {
|
||||
|
||||
class BinaryAndJsonConformanceSuite : public ConformanceTestSuite {
|
||||
public:
|
||||
BinaryAndJsonConformanceSuite() {}
|
||||
|
||||
private:
|
||||
void RunSuiteImpl();
|
||||
void RunValidJsonTest(const string& test_name,
|
||||
ConformanceLevel level,
|
||||
const string& input_json,
|
||||
const string& equivalent_text_format);
|
||||
void RunValidJsonTestWithProtobufInput(
|
||||
const string& test_name,
|
||||
ConformanceLevel level,
|
||||
const protobuf_test_messages::proto3::TestAllTypesProto3& input,
|
||||
const string& equivalent_text_format);
|
||||
void RunValidJsonIgnoreUnknownTest(
|
||||
const string& test_name, ConformanceLevel level, const string& input_json,
|
||||
const string& equivalent_text_format);
|
||||
void RunValidProtobufTest(const string& test_name, ConformanceLevel level,
|
||||
const string& input_protobuf,
|
||||
const string& equivalent_text_format,
|
||||
bool is_proto3);
|
||||
void RunValidBinaryProtobufTest(const string& test_name,
|
||||
ConformanceLevel level,
|
||||
const string& input_protobuf,
|
||||
bool is_proto3);
|
||||
void RunValidProtobufTestWithMessage(
|
||||
const string& test_name, ConformanceLevel level,
|
||||
const Message *input,
|
||||
const string& equivalent_text_format,
|
||||
bool is_proto3);
|
||||
|
||||
bool ParseJsonResponse(
|
||||
const conformance::ConformanceResponse& response,
|
||||
Message* test_message);
|
||||
bool ParseResponse(
|
||||
const conformance::ConformanceResponse& response,
|
||||
const ConformanceRequestSetting& setting,
|
||||
Message* test_message) override;
|
||||
|
||||
typedef std::function<bool(const Json::Value&)> Validator;
|
||||
void RunValidJsonTestWithValidator(const string& test_name,
|
||||
ConformanceLevel level,
|
||||
const string& input_json,
|
||||
const Validator& validator);
|
||||
void ExpectParseFailureForJson(const string& test_name,
|
||||
ConformanceLevel level,
|
||||
const string& input_json);
|
||||
void ExpectSerializeFailureForJson(const string& test_name,
|
||||
ConformanceLevel level,
|
||||
const string& text_format);
|
||||
void ExpectParseFailureForProtoWithProtoVersion (const string& proto,
|
||||
const string& test_name,
|
||||
ConformanceLevel level,
|
||||
bool is_proto3);
|
||||
void ExpectParseFailureForProto(const std::string& proto,
|
||||
const std::string& test_name,
|
||||
ConformanceLevel level);
|
||||
void ExpectHardParseFailureForProto(const std::string& proto,
|
||||
const std::string& test_name,
|
||||
ConformanceLevel level);
|
||||
void TestPrematureEOFForType(google::protobuf::FieldDescriptor::Type type);
|
||||
void TestIllegalTags();
|
||||
template <class MessageType>
|
||||
void TestOneofMessage (MessageType &message,
|
||||
bool is_proto3);
|
||||
template <class MessageType>
|
||||
void TestUnknownMessage (MessageType &message,
|
||||
bool is_proto3);
|
||||
void TestValidDataForType(
|
||||
google::protobuf::FieldDescriptor::Type,
|
||||
std::vector<std::pair<std::string, std::string>> values);
|
||||
|
||||
std::unique_ptr<google::protobuf::util::TypeResolver>
|
||||
type_resolver_;
|
||||
std::string type_url_;
|
||||
};
|
||||
|
||||
} // namespace protobuf
|
||||
} // namespace google
|
||||
|
||||
#endif // CONFORMANCE_BINARY_JSON_CONFORMANCE_SUITE_H
|
@ -55,6 +55,7 @@ enum WireFormat {
|
||||
UNSPECIFIED = 0;
|
||||
PROTOBUF = 1;
|
||||
JSON = 2;
|
||||
JSPB = 3; // Google internal only. Opensource testees just skip it.
|
||||
}
|
||||
|
||||
enum TestCategory {
|
||||
@ -67,6 +68,8 @@ enum TestCategory {
|
||||
// https://developers.google.com/protocol-buffers/docs/proto3#json_options
|
||||
// for more detail.
|
||||
JSON_IGNORE_UNKNOWN_PARSING_TEST = 3;
|
||||
JSPB_TEST = 4; // Test jspb wire format. Google internal only.
|
||||
// Opensource testees just skip it.
|
||||
}
|
||||
|
||||
// Represents a single test case's input. The testee should:
|
||||
@ -85,6 +88,8 @@ message ConformanceRequest {
|
||||
oneof payload {
|
||||
bytes protobuf_payload = 1;
|
||||
string json_payload = 2;
|
||||
string jspb_payload = 7; // Google internal only.
|
||||
// Opensource testees just skip it.
|
||||
}
|
||||
|
||||
// Which format should the testee serialize its message to?
|
||||
@ -99,6 +104,9 @@ message ConformanceRequest {
|
||||
// spedific support in testee programs. Refer to the defintion of TestCategory
|
||||
// for more information.
|
||||
TestCategory test_category = 5;
|
||||
|
||||
// Specify details for how to encode jspb.
|
||||
JspbEncodingConfig jspb_encoding_options = 6;
|
||||
}
|
||||
|
||||
// Represents a single test case's output.
|
||||
@ -132,5 +140,16 @@ message ConformanceResponse {
|
||||
// For when the testee skipped the test, likely because a certain feature
|
||||
// wasn't supported, like JSON input/output.
|
||||
string skipped = 5;
|
||||
|
||||
// If the input was successfully parsed and the requested output was JSPB,
|
||||
// serialize to JSPB and set it in this field. JSPB is google internal only
|
||||
// format. Opensource testees can just skip it.
|
||||
string jspb_payload = 7;
|
||||
}
|
||||
}
|
||||
|
||||
// Encoding options for jspb format.
|
||||
message JspbEncodingConfig {
|
||||
// Encode the value field of Any as jspb array if ture, otherwise binary.
|
||||
bool use_jspb_array_any_format = 1;
|
||||
}
|
||||
|
@ -137,6 +137,11 @@ void DoTest(const ConformanceRequest& request, ConformanceResponse* response) {
|
||||
case ConformanceRequest::PAYLOAD_NOT_SET:
|
||||
GOOGLE_LOG(FATAL) << "Request didn't have payload.";
|
||||
break;
|
||||
|
||||
default:
|
||||
GOOGLE_LOG(FATAL) << "unknown payload type: "
|
||||
<< request.payload_case();
|
||||
break;
|
||||
}
|
||||
|
||||
switch (request.requested_output_format()) {
|
||||
|
@ -3,6 +3,7 @@
|
||||
require_once("Conformance/WireFormat.php");
|
||||
require_once("Conformance/ConformanceResponse.php");
|
||||
require_once("Conformance/ConformanceRequest.php");
|
||||
require_once("Conformance/JspbEncodingConfig.php");
|
||||
require_once("Conformance/TestCategory.php");
|
||||
require_once("Protobuf_test_messages/Proto3/ForeignMessage.php");
|
||||
require_once("Protobuf_test_messages/Proto3/ForeignEnum.php");
|
||||
|
@ -80,6 +80,11 @@ ConformanceTestSuite::ConformanceRequestSetting::ConformanceRequestSetting(
|
||||
break;
|
||||
}
|
||||
|
||||
case conformance::JSPB: {
|
||||
request_.set_jspb_payload(input);
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
GOOGLE_LOG(FATAL) << "Unspecified input format";
|
||||
}
|
||||
@ -215,23 +220,27 @@ void ConformanceTestSuite::RunValidInputTest(
|
||||
void ConformanceTestSuite::RunValidBinaryInputTest(
|
||||
const ConformanceRequestSetting& setting,
|
||||
const string& equivalent_wire_format) {
|
||||
const ConformanceRequest& request = setting.GetRequest();
|
||||
ConformanceResponse response;
|
||||
RunTest(setting.GetTestName(), request, &response);
|
||||
VerifyResponse(setting, equivalent_wire_format, response, true);
|
||||
}
|
||||
|
||||
void ConformanceTestSuite::VerifyResponse(
|
||||
const ConformanceRequestSetting& setting,
|
||||
const string& equivalent_wire_format,
|
||||
const ConformanceResponse& response,
|
||||
bool need_report_success) {
|
||||
Message* test_message = setting.GetTestMessage();
|
||||
const ConformanceRequest& request = setting.GetRequest();
|
||||
const string& test_name = setting.GetTestName();
|
||||
ConformanceLevel level = setting.GetLevel();
|
||||
|
||||
Message* reference_message = setting.GetTestMessage();
|
||||
|
||||
GOOGLE_CHECK(
|
||||
reference_message->ParseFromString(equivalent_wire_format))
|
||||
<< "Failed to parse wire data for test case: " << test_name;
|
||||
|
||||
const ConformanceRequest& request = setting.GetRequest();
|
||||
ConformanceResponse response;
|
||||
|
||||
RunTest(test_name, request, &response);
|
||||
|
||||
Message* test_message = setting.GetTestMessage();
|
||||
|
||||
WireFormat requested_output = request.requested_output_format();
|
||||
|
||||
switch (response.result_case()) {
|
||||
case ConformanceResponse::RESULT_NOT_SET:
|
||||
ReportFailure(test_name, level, request, response,
|
||||
@ -249,53 +258,8 @@ void ConformanceTestSuite::RunValidBinaryInputTest(
|
||||
ReportSkip(test_name, request, response);
|
||||
return;
|
||||
|
||||
case ConformanceResponse::kJsonPayload: {
|
||||
if (requested_output != conformance::JSON) {
|
||||
ReportFailure(
|
||||
test_name, level, request, response,
|
||||
"Test was asked for protobuf output but provided JSON instead.");
|
||||
return;
|
||||
}
|
||||
string binary_protobuf;
|
||||
Status status =
|
||||
JsonToBinaryString(type_resolver_.get(), type_url_,
|
||||
response.json_payload(), &binary_protobuf);
|
||||
if (!status.ok()) {
|
||||
ReportFailure(test_name, level, request, response,
|
||||
"JSON output we received from test was unparseable.");
|
||||
return;
|
||||
}
|
||||
|
||||
if (!test_message->ParseFromString(binary_protobuf)) {
|
||||
ReportFailure(test_name, level, request, response,
|
||||
"INTERNAL ERROR: internal JSON->protobuf transcode "
|
||||
"yielded unparseable proto.");
|
||||
return;
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
case ConformanceResponse::kProtobufPayload: {
|
||||
if (requested_output != conformance::PROTOBUF) {
|
||||
ReportFailure(
|
||||
test_name, level, request, response,
|
||||
"Test was asked for JSON output but provided protobuf instead.");
|
||||
return;
|
||||
}
|
||||
|
||||
if (!test_message->ParseFromString(response.protobuf_payload())) {
|
||||
ReportFailure(test_name, level, request, response,
|
||||
"Protobuf output we received from test was unparseable.");
|
||||
return;
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
GOOGLE_LOG(FATAL) << test_name << ": unknown payload type: "
|
||||
<< response.result_case();
|
||||
if (!ParseResponse(response, setting, test_message)) return;
|
||||
}
|
||||
|
||||
MessageDifferencer differencer;
|
||||
@ -308,7 +272,9 @@ void ConformanceTestSuite::RunValidBinaryInputTest(
|
||||
bool check;
|
||||
check = differencer.Compare(*reference_message, *test_message);
|
||||
if (check) {
|
||||
ReportSuccess(test_name);
|
||||
if (need_report_success) {
|
||||
ReportSuccess(test_name);
|
||||
}
|
||||
} else {
|
||||
ReportFailure(test_name, level, request, response,
|
||||
"Output was not equivalent to reference message: %s.",
|
||||
@ -375,6 +341,24 @@ bool ConformanceTestSuite::CheckSetEmpty(
|
||||
}
|
||||
}
|
||||
|
||||
string ConformanceTestSuite::WireFormatToString(
|
||||
WireFormat wire_format) {
|
||||
switch (wire_format) {
|
||||
case conformance::PROTOBUF:
|
||||
return "PROTOBUF";
|
||||
case conformance::JSON:
|
||||
return "JSON";
|
||||
case conformance::JSPB:
|
||||
return "JSPB";
|
||||
case conformance::UNSPECIFIED:
|
||||
return "UNSPECIFIED";
|
||||
default:
|
||||
GOOGLE_LOG(FATAL) << "unknown wire type: "
|
||||
<< wire_format;
|
||||
}
|
||||
return "";
|
||||
}
|
||||
|
||||
bool ConformanceTestSuite::RunSuite(
|
||||
ConformanceTestRunner* runner, std::string* output) {
|
||||
runner_ = runner;
|
||||
|
@ -87,7 +87,6 @@ class ForkPipeRunner : public ConformanceTestRunner {
|
||||
static int Run(int argc, char *argv[],
|
||||
ConformanceTestSuite* suite);
|
||||
|
||||
private:
|
||||
ForkPipeRunner(const std::string &executable)
|
||||
: child_pid_(-1), executable_(executable) {}
|
||||
|
||||
@ -97,24 +96,7 @@ class ForkPipeRunner : public ConformanceTestRunner {
|
||||
const std::string& request,
|
||||
std::string* response);
|
||||
|
||||
// TODO(haberman): make this work on Windows, instead of using these
|
||||
// UNIX-specific APIs.
|
||||
//
|
||||
// There is a platform-agnostic API in
|
||||
// src/google/protobuf/compiler/subprocess.h
|
||||
//
|
||||
// However that API only supports sending a single message to the subprocess.
|
||||
// We really want to be able to send messages and receive responses one at a
|
||||
// time:
|
||||
//
|
||||
// 1. Spawning a new process for each test would take way too long for thousands
|
||||
// of tests and subprocesses like java that can take 100ms or more to start
|
||||
// up.
|
||||
//
|
||||
// 2. Sending all the tests in one big message and receiving all results in one
|
||||
// big message would take away our visibility about which test(s) caused a
|
||||
// crash or other fatal error. It would also give us only a single failure
|
||||
// instead of all of them.
|
||||
private:
|
||||
void SpawnTestProgram();
|
||||
|
||||
void CheckedWrite(int fd, const void *buf, size_t len);
|
||||
@ -237,6 +219,7 @@ class ConformanceTestSuite {
|
||||
protected:
|
||||
virtual string InputFormatString(conformance::WireFormat format) const;
|
||||
virtual string OutputFormatString(conformance::WireFormat format) const;
|
||||
conformance::ConformanceRequest request_;
|
||||
|
||||
private:
|
||||
ConformanceLevel level_;
|
||||
@ -244,11 +227,24 @@ class ConformanceTestSuite {
|
||||
::conformance::WireFormat output_format_;
|
||||
const Message& prototype_message_;
|
||||
string test_name_;
|
||||
conformance::ConformanceRequest request_;
|
||||
};
|
||||
|
||||
bool CheckSetEmpty(const std::set<string>& set_to_check,
|
||||
const std::string& write_to_file, const std::string& msg);
|
||||
string WireFormatToString(conformance::WireFormat wire_format);
|
||||
|
||||
// Parse payload in the response to the given message. Returns true on
|
||||
// success.
|
||||
virtual bool ParseResponse(
|
||||
const conformance::ConformanceResponse& response,
|
||||
const ConformanceRequestSetting& setting,
|
||||
Message* test_message) = 0;
|
||||
|
||||
void VerifyResponse(
|
||||
const ConformanceRequestSetting& setting,
|
||||
const string& equivalent_wire_format,
|
||||
const conformance::ConformanceResponse& response,
|
||||
bool need_report_success);
|
||||
|
||||
void ReportSuccess(const std::string& test_name);
|
||||
void ReportFailure(const string& test_name,
|
||||
@ -295,10 +291,6 @@ class ConformanceTestSuite {
|
||||
|
||||
// The set of tests that the testee opted out of;
|
||||
std::set<std::string> skipped_;
|
||||
|
||||
std::unique_ptr<google::protobuf::util::TypeResolver>
|
||||
type_resolver_;
|
||||
std::string type_url_;
|
||||
};
|
||||
|
||||
} // namespace protobuf
|
||||
|
@ -212,6 +212,24 @@ int ForkPipeRunner::Run(
|
||||
return ok ? EXIT_SUCCESS : EXIT_FAILURE;
|
||||
}
|
||||
|
||||
// TODO(haberman): make this work on Windows, instead of using these
|
||||
// UNIX-specific APIs.
|
||||
//
|
||||
// There is a platform-agnostic API in
|
||||
// src/google/protobuf/compiler/subprocess.h
|
||||
//
|
||||
// However that API only supports sending a single message to the subprocess.
|
||||
// We really want to be able to send messages and receive responses one at a
|
||||
// time:
|
||||
//
|
||||
// 1. Spawning a new process for each test would take way too long for thousands
|
||||
// of tests and subprocesses like java that can take 100ms or more to start
|
||||
// up.
|
||||
//
|
||||
// 2. Sending all the tests in one big message and receiving all results in one
|
||||
// big message would take away our visibility about which test(s) caused a
|
||||
// crash or other fatal error. It would also give us only a single failure
|
||||
// instead of all of them.
|
||||
void ForkPipeRunner::SpawnTestProgram() {
|
||||
int toproc_pipe_fd[2];
|
||||
int fromproc_pipe_fd[2];
|
||||
|
@ -24,26 +24,31 @@ namespace Conformance {
|
||||
static ConformanceReflection() {
|
||||
byte[] descriptorData = global::System.Convert.FromBase64String(
|
||||
string.Concat(
|
||||
"ChFjb25mb3JtYW5jZS5wcm90bxILY29uZm9ybWFuY2Ui1QEKEkNvbmZvcm1h",
|
||||
"ChFjb25mb3JtYW5jZS5wcm90bxILY29uZm9ybWFuY2UirQIKEkNvbmZvcm1h",
|
||||
"bmNlUmVxdWVzdBIaChBwcm90b2J1Zl9wYXlsb2FkGAEgASgMSAASFgoManNv",
|
||||
"bl9wYXlsb2FkGAIgASgJSAASOAoXcmVxdWVzdGVkX291dHB1dF9mb3JtYXQY",
|
||||
"AyABKA4yFy5jb25mb3JtYW5jZS5XaXJlRm9ybWF0EhQKDG1lc3NhZ2VfdHlw",
|
||||
"ZRgEIAEoCRIwCg10ZXN0X2NhdGVnb3J5GAUgASgOMhkuY29uZm9ybWFuY2Uu",
|
||||
"VGVzdENhdGVnb3J5QgkKB3BheWxvYWQisQEKE0NvbmZvcm1hbmNlUmVzcG9u",
|
||||
"c2USFQoLcGFyc2VfZXJyb3IYASABKAlIABIZCg9zZXJpYWxpemVfZXJyb3IY",
|
||||
"BiABKAlIABIXCg1ydW50aW1lX2Vycm9yGAIgASgJSAASGgoQcHJvdG9idWZf",
|
||||
"cGF5bG9hZBgDIAEoDEgAEhYKDGpzb25fcGF5bG9hZBgEIAEoCUgAEhEKB3Nr",
|
||||
"aXBwZWQYBSABKAlIAEIICgZyZXN1bHQqNQoKV2lyZUZvcm1hdBIPCgtVTlNQ",
|
||||
"RUNJRklFRBAAEgwKCFBST1RPQlVGEAESCAoESlNPThACKmoKDFRlc3RDYXRl",
|
||||
"Z29yeRIUChBVTlNQRUNJRklFRF9URVNUEAASDwoLQklOQVJZX1RFU1QQARIN",
|
||||
"CglKU09OX1RFU1QQAhIkCiBKU09OX0lHTk9SRV9VTktOT1dOX1BBUlNJTkdf",
|
||||
"VEVTVBADQiEKH2NvbS5nb29nbGUucHJvdG9idWYuY29uZm9ybWFuY2ViBnBy",
|
||||
"b3RvMw=="));
|
||||
"bl9wYXlsb2FkGAIgASgJSAASFgoManNwYl9wYXlsb2FkGAcgASgJSAASOAoX",
|
||||
"cmVxdWVzdGVkX291dHB1dF9mb3JtYXQYAyABKA4yFy5jb25mb3JtYW5jZS5X",
|
||||
"aXJlRm9ybWF0EhQKDG1lc3NhZ2VfdHlwZRgEIAEoCRIwCg10ZXN0X2NhdGVn",
|
||||
"b3J5GAUgASgOMhkuY29uZm9ybWFuY2UuVGVzdENhdGVnb3J5Ej4KFWpzcGJf",
|
||||
"ZW5jb2Rpbmdfb3B0aW9ucxgGIAEoCzIfLmNvbmZvcm1hbmNlLkpzcGJFbmNv",
|
||||
"ZGluZ0NvbmZpZ0IJCgdwYXlsb2FkIskBChNDb25mb3JtYW5jZVJlc3BvbnNl",
|
||||
"EhUKC3BhcnNlX2Vycm9yGAEgASgJSAASGQoPc2VyaWFsaXplX2Vycm9yGAYg",
|
||||
"ASgJSAASFwoNcnVudGltZV9lcnJvchgCIAEoCUgAEhoKEHByb3RvYnVmX3Bh",
|
||||
"eWxvYWQYAyABKAxIABIWCgxqc29uX3BheWxvYWQYBCABKAlIABIRCgdza2lw",
|
||||
"cGVkGAUgASgJSAASFgoManNwYl9wYXlsb2FkGAcgASgJSABCCAoGcmVzdWx0",
|
||||
"IjcKEkpzcGJFbmNvZGluZ0NvbmZpZxIhChl1c2VfanNwYl9hcnJheV9hbnlf",
|
||||
"Zm9ybWF0GAEgASgIKj8KCldpcmVGb3JtYXQSDwoLVU5TUEVDSUZJRUQQABIM",
|
||||
"CghQUk9UT0JVRhABEggKBEpTT04QAhIICgRKU1BCEAMqeQoMVGVzdENhdGVn",
|
||||
"b3J5EhQKEFVOU1BFQ0lGSUVEX1RFU1QQABIPCgtCSU5BUllfVEVTVBABEg0K",
|
||||
"CUpTT05fVEVTVBACEiQKIEpTT05fSUdOT1JFX1VOS05PV05fUEFSU0lOR19U",
|
||||
"RVNUEAMSDQoJSlNQQl9URVNUEARCIQofY29tLmdvb2dsZS5wcm90b2J1Zi5j",
|
||||
"b25mb3JtYW5jZWIGcHJvdG8z"));
|
||||
descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
|
||||
new pbr::FileDescriptor[] { },
|
||||
new pbr::GeneratedClrTypeInfo(new[] {typeof(global::Conformance.WireFormat), typeof(global::Conformance.TestCategory), }, new pbr::GeneratedClrTypeInfo[] {
|
||||
new pbr::GeneratedClrTypeInfo(typeof(global::Conformance.ConformanceRequest), global::Conformance.ConformanceRequest.Parser, new[]{ "ProtobufPayload", "JsonPayload", "RequestedOutputFormat", "MessageType", "TestCategory" }, new[]{ "Payload" }, null, null),
|
||||
new pbr::GeneratedClrTypeInfo(typeof(global::Conformance.ConformanceResponse), global::Conformance.ConformanceResponse.Parser, new[]{ "ParseError", "SerializeError", "RuntimeError", "ProtobufPayload", "JsonPayload", "Skipped" }, new[]{ "Result" }, null, null)
|
||||
new pbr::GeneratedClrTypeInfo(typeof(global::Conformance.ConformanceRequest), global::Conformance.ConformanceRequest.Parser, new[]{ "ProtobufPayload", "JsonPayload", "JspbPayload", "RequestedOutputFormat", "MessageType", "TestCategory", "JspbEncodingOptions" }, new[]{ "Payload" }, null, null),
|
||||
new pbr::GeneratedClrTypeInfo(typeof(global::Conformance.ConformanceResponse), global::Conformance.ConformanceResponse.Parser, new[]{ "ParseError", "SerializeError", "RuntimeError", "ProtobufPayload", "JsonPayload", "Skipped", "JspbPayload" }, new[]{ "Result" }, null, null),
|
||||
new pbr::GeneratedClrTypeInfo(typeof(global::Conformance.JspbEncodingConfig), global::Conformance.JspbEncodingConfig.Parser, new[]{ "UseJspbArrayAnyFormat" }, null, null, null)
|
||||
}));
|
||||
}
|
||||
#endregion
|
||||
@ -54,6 +59,10 @@ namespace Conformance {
|
||||
[pbr::OriginalName("UNSPECIFIED")] Unspecified = 0,
|
||||
[pbr::OriginalName("PROTOBUF")] Protobuf = 1,
|
||||
[pbr::OriginalName("JSON")] Json = 2,
|
||||
/// <summary>
|
||||
/// Google internal only. Opensource testees just skip it.
|
||||
/// </summary>
|
||||
[pbr::OriginalName("JSPB")] Jspb = 3,
|
||||
}
|
||||
|
||||
public enum TestCategory {
|
||||
@ -74,6 +83,10 @@ namespace Conformance {
|
||||
/// for more detail.
|
||||
/// </summary>
|
||||
[pbr::OriginalName("JSON_IGNORE_UNKNOWN_PARSING_TEST")] JsonIgnoreUnknownParsingTest = 3,
|
||||
/// <summary>
|
||||
/// Test jspb wire format. Google internal only.
|
||||
/// </summary>
|
||||
[pbr::OriginalName("JSPB_TEST")] JspbTest = 4,
|
||||
}
|
||||
|
||||
#endregion
|
||||
@ -114,6 +127,7 @@ namespace Conformance {
|
||||
requestedOutputFormat_ = other.requestedOutputFormat_;
|
||||
messageType_ = other.messageType_;
|
||||
testCategory_ = other.testCategory_;
|
||||
jspbEncodingOptions_ = other.jspbEncodingOptions_ != null ? other.jspbEncodingOptions_.Clone() : null;
|
||||
switch (other.PayloadCase) {
|
||||
case PayloadOneofCase.ProtobufPayload:
|
||||
ProtobufPayload = other.ProtobufPayload;
|
||||
@ -121,6 +135,9 @@ namespace Conformance {
|
||||
case PayloadOneofCase.JsonPayload:
|
||||
JsonPayload = other.JsonPayload;
|
||||
break;
|
||||
case PayloadOneofCase.JspbPayload:
|
||||
JspbPayload = other.JspbPayload;
|
||||
break;
|
||||
}
|
||||
|
||||
_unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields);
|
||||
@ -153,6 +170,20 @@ namespace Conformance {
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>Field number for the "jspb_payload" field.</summary>
|
||||
public const int JspbPayloadFieldNumber = 7;
|
||||
/// <summary>
|
||||
/// Google internal only.
|
||||
/// </summary>
|
||||
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
|
||||
public string JspbPayload {
|
||||
get { return payloadCase_ == PayloadOneofCase.JspbPayload ? (string) payload_ : ""; }
|
||||
set {
|
||||
payload_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
|
||||
payloadCase_ = PayloadOneofCase.JspbPayload;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>Field number for the "requested_output_format" field.</summary>
|
||||
public const int RequestedOutputFormatFieldNumber = 3;
|
||||
private global::Conformance.WireFormat requestedOutputFormat_ = 0;
|
||||
@ -199,12 +230,27 @@ namespace Conformance {
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>Field number for the "jspb_encoding_options" field.</summary>
|
||||
public const int JspbEncodingOptionsFieldNumber = 6;
|
||||
private global::Conformance.JspbEncodingConfig jspbEncodingOptions_;
|
||||
/// <summary>
|
||||
/// Specify details for how to encode jspb.
|
||||
/// </summary>
|
||||
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
|
||||
public global::Conformance.JspbEncodingConfig JspbEncodingOptions {
|
||||
get { return jspbEncodingOptions_; }
|
||||
set {
|
||||
jspbEncodingOptions_ = value;
|
||||
}
|
||||
}
|
||||
|
||||
private object payload_;
|
||||
/// <summary>Enum of possible cases for the "payload" oneof.</summary>
|
||||
public enum PayloadOneofCase {
|
||||
None = 0,
|
||||
ProtobufPayload = 1,
|
||||
JsonPayload = 2,
|
||||
JspbPayload = 7,
|
||||
}
|
||||
private PayloadOneofCase payloadCase_ = PayloadOneofCase.None;
|
||||
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
|
||||
@ -233,9 +279,11 @@ namespace Conformance {
|
||||
}
|
||||
if (ProtobufPayload != other.ProtobufPayload) return false;
|
||||
if (JsonPayload != other.JsonPayload) return false;
|
||||
if (JspbPayload != other.JspbPayload) return false;
|
||||
if (RequestedOutputFormat != other.RequestedOutputFormat) return false;
|
||||
if (MessageType != other.MessageType) return false;
|
||||
if (TestCategory != other.TestCategory) return false;
|
||||
if (!object.Equals(JspbEncodingOptions, other.JspbEncodingOptions)) return false;
|
||||
if (PayloadCase != other.PayloadCase) return false;
|
||||
return Equals(_unknownFields, other._unknownFields);
|
||||
}
|
||||
@ -245,9 +293,11 @@ namespace Conformance {
|
||||
int hash = 1;
|
||||
if (payloadCase_ == PayloadOneofCase.ProtobufPayload) hash ^= ProtobufPayload.GetHashCode();
|
||||
if (payloadCase_ == PayloadOneofCase.JsonPayload) hash ^= JsonPayload.GetHashCode();
|
||||
if (payloadCase_ == PayloadOneofCase.JspbPayload) hash ^= JspbPayload.GetHashCode();
|
||||
if (RequestedOutputFormat != 0) hash ^= RequestedOutputFormat.GetHashCode();
|
||||
if (MessageType.Length != 0) hash ^= MessageType.GetHashCode();
|
||||
if (TestCategory != 0) hash ^= TestCategory.GetHashCode();
|
||||
if (jspbEncodingOptions_ != null) hash ^= JspbEncodingOptions.GetHashCode();
|
||||
hash ^= (int) payloadCase_;
|
||||
if (_unknownFields != null) {
|
||||
hash ^= _unknownFields.GetHashCode();
|
||||
@ -282,6 +332,14 @@ namespace Conformance {
|
||||
output.WriteRawTag(40);
|
||||
output.WriteEnum((int) TestCategory);
|
||||
}
|
||||
if (jspbEncodingOptions_ != null) {
|
||||
output.WriteRawTag(50);
|
||||
output.WriteMessage(JspbEncodingOptions);
|
||||
}
|
||||
if (payloadCase_ == PayloadOneofCase.JspbPayload) {
|
||||
output.WriteRawTag(58);
|
||||
output.WriteString(JspbPayload);
|
||||
}
|
||||
if (_unknownFields != null) {
|
||||
_unknownFields.WriteTo(output);
|
||||
}
|
||||
@ -296,6 +354,9 @@ namespace Conformance {
|
||||
if (payloadCase_ == PayloadOneofCase.JsonPayload) {
|
||||
size += 1 + pb::CodedOutputStream.ComputeStringSize(JsonPayload);
|
||||
}
|
||||
if (payloadCase_ == PayloadOneofCase.JspbPayload) {
|
||||
size += 1 + pb::CodedOutputStream.ComputeStringSize(JspbPayload);
|
||||
}
|
||||
if (RequestedOutputFormat != 0) {
|
||||
size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) RequestedOutputFormat);
|
||||
}
|
||||
@ -305,6 +366,9 @@ namespace Conformance {
|
||||
if (TestCategory != 0) {
|
||||
size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) TestCategory);
|
||||
}
|
||||
if (jspbEncodingOptions_ != null) {
|
||||
size += 1 + pb::CodedOutputStream.ComputeMessageSize(JspbEncodingOptions);
|
||||
}
|
||||
if (_unknownFields != null) {
|
||||
size += _unknownFields.CalculateSize();
|
||||
}
|
||||
@ -325,6 +389,12 @@ namespace Conformance {
|
||||
if (other.TestCategory != 0) {
|
||||
TestCategory = other.TestCategory;
|
||||
}
|
||||
if (other.jspbEncodingOptions_ != null) {
|
||||
if (jspbEncodingOptions_ == null) {
|
||||
JspbEncodingOptions = new global::Conformance.JspbEncodingConfig();
|
||||
}
|
||||
JspbEncodingOptions.MergeFrom(other.JspbEncodingOptions);
|
||||
}
|
||||
switch (other.PayloadCase) {
|
||||
case PayloadOneofCase.ProtobufPayload:
|
||||
ProtobufPayload = other.ProtobufPayload;
|
||||
@ -332,6 +402,9 @@ namespace Conformance {
|
||||
case PayloadOneofCase.JsonPayload:
|
||||
JsonPayload = other.JsonPayload;
|
||||
break;
|
||||
case PayloadOneofCase.JspbPayload:
|
||||
JspbPayload = other.JspbPayload;
|
||||
break;
|
||||
}
|
||||
|
||||
_unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields);
|
||||
@ -367,6 +440,17 @@ namespace Conformance {
|
||||
TestCategory = (global::Conformance.TestCategory) input.ReadEnum();
|
||||
break;
|
||||
}
|
||||
case 50: {
|
||||
if (jspbEncodingOptions_ == null) {
|
||||
JspbEncodingOptions = new global::Conformance.JspbEncodingConfig();
|
||||
}
|
||||
input.ReadMessage(JspbEncodingOptions);
|
||||
break;
|
||||
}
|
||||
case 58: {
|
||||
JspbPayload = input.ReadString();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -420,6 +504,9 @@ namespace Conformance {
|
||||
case ResultOneofCase.Skipped:
|
||||
Skipped = other.Skipped;
|
||||
break;
|
||||
case ResultOneofCase.JspbPayload:
|
||||
JspbPayload = other.JspbPayload;
|
||||
break;
|
||||
}
|
||||
|
||||
_unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields);
|
||||
@ -525,6 +612,22 @@ namespace Conformance {
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>Field number for the "jspb_payload" field.</summary>
|
||||
public const int JspbPayloadFieldNumber = 7;
|
||||
/// <summary>
|
||||
/// If the input was successfully parsed and the requested output was JSPB,
|
||||
/// serialize to JSPB and set it in this field. JSPB is google internal only
|
||||
/// format. Opensource testees can just skip it.
|
||||
/// </summary>
|
||||
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
|
||||
public string JspbPayload {
|
||||
get { return resultCase_ == ResultOneofCase.JspbPayload ? (string) result_ : ""; }
|
||||
set {
|
||||
result_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
|
||||
resultCase_ = ResultOneofCase.JspbPayload;
|
||||
}
|
||||
}
|
||||
|
||||
private object result_;
|
||||
/// <summary>Enum of possible cases for the "result" oneof.</summary>
|
||||
public enum ResultOneofCase {
|
||||
@ -535,6 +638,7 @@ namespace Conformance {
|
||||
ProtobufPayload = 3,
|
||||
JsonPayload = 4,
|
||||
Skipped = 5,
|
||||
JspbPayload = 7,
|
||||
}
|
||||
private ResultOneofCase resultCase_ = ResultOneofCase.None;
|
||||
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
|
||||
@ -567,6 +671,7 @@ namespace Conformance {
|
||||
if (ProtobufPayload != other.ProtobufPayload) return false;
|
||||
if (JsonPayload != other.JsonPayload) return false;
|
||||
if (Skipped != other.Skipped) return false;
|
||||
if (JspbPayload != other.JspbPayload) return false;
|
||||
if (ResultCase != other.ResultCase) return false;
|
||||
return Equals(_unknownFields, other._unknownFields);
|
||||
}
|
||||
@ -580,6 +685,7 @@ namespace Conformance {
|
||||
if (resultCase_ == ResultOneofCase.ProtobufPayload) hash ^= ProtobufPayload.GetHashCode();
|
||||
if (resultCase_ == ResultOneofCase.JsonPayload) hash ^= JsonPayload.GetHashCode();
|
||||
if (resultCase_ == ResultOneofCase.Skipped) hash ^= Skipped.GetHashCode();
|
||||
if (resultCase_ == ResultOneofCase.JspbPayload) hash ^= JspbPayload.GetHashCode();
|
||||
hash ^= (int) resultCase_;
|
||||
if (_unknownFields != null) {
|
||||
hash ^= _unknownFields.GetHashCode();
|
||||
@ -618,6 +724,10 @@ namespace Conformance {
|
||||
output.WriteRawTag(50);
|
||||
output.WriteString(SerializeError);
|
||||
}
|
||||
if (resultCase_ == ResultOneofCase.JspbPayload) {
|
||||
output.WriteRawTag(58);
|
||||
output.WriteString(JspbPayload);
|
||||
}
|
||||
if (_unknownFields != null) {
|
||||
_unknownFields.WriteTo(output);
|
||||
}
|
||||
@ -644,6 +754,9 @@ namespace Conformance {
|
||||
if (resultCase_ == ResultOneofCase.Skipped) {
|
||||
size += 1 + pb::CodedOutputStream.ComputeStringSize(Skipped);
|
||||
}
|
||||
if (resultCase_ == ResultOneofCase.JspbPayload) {
|
||||
size += 1 + pb::CodedOutputStream.ComputeStringSize(JspbPayload);
|
||||
}
|
||||
if (_unknownFields != null) {
|
||||
size += _unknownFields.CalculateSize();
|
||||
}
|
||||
@ -674,6 +787,9 @@ namespace Conformance {
|
||||
case ResultOneofCase.Skipped:
|
||||
Skipped = other.Skipped;
|
||||
break;
|
||||
case ResultOneofCase.JspbPayload:
|
||||
JspbPayload = other.JspbPayload;
|
||||
break;
|
||||
}
|
||||
|
||||
_unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields);
|
||||
@ -713,6 +829,147 @@ namespace Conformance {
|
||||
SerializeError = input.ReadString();
|
||||
break;
|
||||
}
|
||||
case 58: {
|
||||
JspbPayload = input.ReadString();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Encoding options for jspb format.
|
||||
/// </summary>
|
||||
public sealed partial class JspbEncodingConfig : pb::IMessage<JspbEncodingConfig> {
|
||||
private static readonly pb::MessageParser<JspbEncodingConfig> _parser = new pb::MessageParser<JspbEncodingConfig>(() => new JspbEncodingConfig());
|
||||
private pb::UnknownFieldSet _unknownFields;
|
||||
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
|
||||
public static pb::MessageParser<JspbEncodingConfig> Parser { get { return _parser; } }
|
||||
|
||||
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
|
||||
public static pbr::MessageDescriptor Descriptor {
|
||||
get { return global::Conformance.ConformanceReflection.Descriptor.MessageTypes[2]; }
|
||||
}
|
||||
|
||||
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
|
||||
pbr::MessageDescriptor pb::IMessage.Descriptor {
|
||||
get { return Descriptor; }
|
||||
}
|
||||
|
||||
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
|
||||
public JspbEncodingConfig() {
|
||||
OnConstruction();
|
||||
}
|
||||
|
||||
partial void OnConstruction();
|
||||
|
||||
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
|
||||
public JspbEncodingConfig(JspbEncodingConfig other) : this() {
|
||||
useJspbArrayAnyFormat_ = other.useJspbArrayAnyFormat_;
|
||||
_unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields);
|
||||
}
|
||||
|
||||
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
|
||||
public JspbEncodingConfig Clone() {
|
||||
return new JspbEncodingConfig(this);
|
||||
}
|
||||
|
||||
/// <summary>Field number for the "use_jspb_array_any_format" field.</summary>
|
||||
public const int UseJspbArrayAnyFormatFieldNumber = 1;
|
||||
private bool useJspbArrayAnyFormat_;
|
||||
/// <summary>
|
||||
/// Encode the value field of Any as jspb array if ture, otherwise binary.
|
||||
/// </summary>
|
||||
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
|
||||
public bool UseJspbArrayAnyFormat {
|
||||
get { return useJspbArrayAnyFormat_; }
|
||||
set {
|
||||
useJspbArrayAnyFormat_ = value;
|
||||
}
|
||||
}
|
||||
|
||||
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
|
||||
public override bool Equals(object other) {
|
||||
return Equals(other as JspbEncodingConfig);
|
||||
}
|
||||
|
||||
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
|
||||
public bool Equals(JspbEncodingConfig other) {
|
||||
if (ReferenceEquals(other, null)) {
|
||||
return false;
|
||||
}
|
||||
if (ReferenceEquals(other, this)) {
|
||||
return true;
|
||||
}
|
||||
if (UseJspbArrayAnyFormat != other.UseJspbArrayAnyFormat) return false;
|
||||
return Equals(_unknownFields, other._unknownFields);
|
||||
}
|
||||
|
||||
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
|
||||
public override int GetHashCode() {
|
||||
int hash = 1;
|
||||
if (UseJspbArrayAnyFormat != false) hash ^= UseJspbArrayAnyFormat.GetHashCode();
|
||||
if (_unknownFields != null) {
|
||||
hash ^= _unknownFields.GetHashCode();
|
||||
}
|
||||
return hash;
|
||||
}
|
||||
|
||||
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
|
||||
public override string ToString() {
|
||||
return pb::JsonFormatter.ToDiagnosticString(this);
|
||||
}
|
||||
|
||||
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
|
||||
public void WriteTo(pb::CodedOutputStream output) {
|
||||
if (UseJspbArrayAnyFormat != false) {
|
||||
output.WriteRawTag(8);
|
||||
output.WriteBool(UseJspbArrayAnyFormat);
|
||||
}
|
||||
if (_unknownFields != null) {
|
||||
_unknownFields.WriteTo(output);
|
||||
}
|
||||
}
|
||||
|
||||
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
|
||||
public int CalculateSize() {
|
||||
int size = 0;
|
||||
if (UseJspbArrayAnyFormat != false) {
|
||||
size += 1 + 1;
|
||||
}
|
||||
if (_unknownFields != null) {
|
||||
size += _unknownFields.CalculateSize();
|
||||
}
|
||||
return size;
|
||||
}
|
||||
|
||||
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
|
||||
public void MergeFrom(JspbEncodingConfig other) {
|
||||
if (other == null) {
|
||||
return;
|
||||
}
|
||||
if (other.UseJspbArrayAnyFormat != false) {
|
||||
UseJspbArrayAnyFormat = other.UseJspbArrayAnyFormat;
|
||||
}
|
||||
_unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields);
|
||||
}
|
||||
|
||||
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
|
||||
public void MergeFrom(pb::CodedInputStream input) {
|
||||
uint tag;
|
||||
while ((tag = input.ReadTag()) != 0) {
|
||||
switch(tag) {
|
||||
default:
|
||||
if (!pb::UnknownFieldSet.MergeFieldFrom(ref _unknownFields, input)) {
|
||||
return;
|
||||
}
|
||||
break;
|
||||
case 8: {
|
||||
UseJspbArrayAnyFormat = input.ReadBool();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -22,6 +22,11 @@
|
||||
<groupId>com.google.guava</groupId>
|
||||
<artifactId>guava</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.errorprone</groupId>
|
||||
<artifactId>error_prone_annotations</artifactId>
|
||||
<version>2.3.2</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.guava</groupId>
|
||||
<artifactId>guava-testlib</artifactId>
|
||||
|
@ -41,6 +41,7 @@ class Label
|
||||
return self::$valueToName[$value];
|
||||
}
|
||||
|
||||
|
||||
public static function value($name)
|
||||
{
|
||||
$const = __CLASS__ . '::' . strtoupper($name);
|
||||
|
@ -136,6 +136,7 @@ class Type
|
||||
return self::$valueToName[$value];
|
||||
}
|
||||
|
||||
|
||||
public static function value($name)
|
||||
{
|
||||
$const = __CLASS__ . '::' . strtoupper($name);
|
||||
|
@ -41,6 +41,7 @@ class CType
|
||||
return self::$valueToName[$value];
|
||||
}
|
||||
|
||||
|
||||
public static function value($name)
|
||||
{
|
||||
$const = __CLASS__ . '::' . strtoupper($name);
|
||||
|
@ -45,6 +45,7 @@ class JSType
|
||||
return self::$valueToName[$value];
|
||||
}
|
||||
|
||||
|
||||
public static function value($name)
|
||||
{
|
||||
$const = __CLASS__ . '::' . strtoupper($name);
|
||||
|
@ -47,6 +47,7 @@ class OptimizeMode
|
||||
return self::$valueToName[$value];
|
||||
}
|
||||
|
||||
|
||||
public static function value($name)
|
||||
{
|
||||
$const = __CLASS__ . '::' . strtoupper($name);
|
||||
|
@ -47,6 +47,7 @@ class IdempotencyLevel
|
||||
return self::$valueToName[$value];
|
||||
}
|
||||
|
||||
|
||||
public static function value($name)
|
||||
{
|
||||
$const = __CLASS__ . '::' . strtoupper($name);
|
||||
|
743
protobuf.bzl
743
protobuf.bzl
@ -1,174 +1,177 @@
|
||||
load("@bazel_skylib//lib:versions.bzl", "versions")
|
||||
|
||||
def _GetPath(ctx, path):
|
||||
if ctx.label.workspace_root:
|
||||
return ctx.label.workspace_root + '/' + path
|
||||
else:
|
||||
return path
|
||||
if ctx.label.workspace_root:
|
||||
return ctx.label.workspace_root + "/" + path
|
||||
else:
|
||||
return path
|
||||
|
||||
def _IsNewExternal(ctx):
|
||||
# Bazel 0.4.4 and older have genfiles paths that look like:
|
||||
# bazel-out/local-fastbuild/genfiles/external/repo/foo
|
||||
# After the exec root rearrangement, they look like:
|
||||
# ../repo/bazel-out/local-fastbuild/genfiles/foo
|
||||
return ctx.label.workspace_root.startswith("../")
|
||||
# Bazel 0.4.4 and older have genfiles paths that look like:
|
||||
# bazel-out/local-fastbuild/genfiles/external/repo/foo
|
||||
# After the exec root rearrangement, they look like:
|
||||
# ../repo/bazel-out/local-fastbuild/genfiles/foo
|
||||
return ctx.label.workspace_root.startswith("../")
|
||||
|
||||
def _GenDir(ctx):
|
||||
if _IsNewExternal(ctx):
|
||||
# We are using the fact that Bazel 0.4.4+ provides repository-relative paths
|
||||
# for ctx.genfiles_dir.
|
||||
return ctx.genfiles_dir.path + (
|
||||
"/" + ctx.attr.includes[0] if ctx.attr.includes and ctx.attr.includes[0] else "")
|
||||
# This means that we're either in the old version OR the new version in the local repo.
|
||||
# Either way, appending the source path to the genfiles dir works.
|
||||
return ctx.var["GENDIR"] + "/" + _SourceDir(ctx)
|
||||
|
||||
def _SourceDir(ctx):
|
||||
if not ctx.attr.includes:
|
||||
return ctx.label.workspace_root
|
||||
if not ctx.attr.includes[0]:
|
||||
return _GetPath(ctx, ctx.label.package)
|
||||
if not ctx.label.package:
|
||||
return _GetPath(ctx, ctx.attr.includes[0])
|
||||
return _GetPath(ctx, ctx.label.package + '/' + ctx.attr.includes[0])
|
||||
|
||||
def _CcHdrs(srcs, use_grpc_plugin=False):
|
||||
ret = [s[:-len(".proto")] + ".pb.h" for s in srcs]
|
||||
if use_grpc_plugin:
|
||||
ret += [s[:-len(".proto")] + ".grpc.pb.h" for s in srcs]
|
||||
return ret
|
||||
|
||||
def _CcSrcs(srcs, use_grpc_plugin=False):
|
||||
ret = [s[:-len(".proto")] + ".pb.cc" for s in srcs]
|
||||
if use_grpc_plugin:
|
||||
ret += [s[:-len(".proto")] + ".grpc.pb.cc" for s in srcs]
|
||||
return ret
|
||||
|
||||
def _CcOuts(srcs, use_grpc_plugin=False):
|
||||
return _CcHdrs(srcs, use_grpc_plugin) + _CcSrcs(srcs, use_grpc_plugin)
|
||||
|
||||
def _PyOuts(srcs, use_grpc_plugin=False):
|
||||
ret = [s[:-len(".proto")] + "_pb2.py" for s in srcs]
|
||||
if use_grpc_plugin:
|
||||
ret += [s[:-len(".proto")] + "_pb2_grpc.py" for s in srcs]
|
||||
return ret
|
||||
|
||||
def _RelativeOutputPath(path, include, dest=""):
|
||||
if include == None:
|
||||
return path
|
||||
|
||||
if not path.startswith(include):
|
||||
fail("Include path %s isn't part of the path %s." % (include, path))
|
||||
|
||||
if include and include[-1] != '/':
|
||||
include = include + '/'
|
||||
if dest and dest[-1] != '/':
|
||||
dest = dest + '/'
|
||||
|
||||
path = path[len(include):]
|
||||
return dest + path
|
||||
|
||||
def _proto_gen_impl(ctx):
|
||||
"""General implementation for generating protos"""
|
||||
srcs = ctx.files.srcs
|
||||
deps = []
|
||||
deps += ctx.files.srcs
|
||||
source_dir = _SourceDir(ctx)
|
||||
gen_dir = _GenDir(ctx).rstrip('/')
|
||||
if source_dir:
|
||||
import_flags = ["-I" + source_dir, "-I" + gen_dir]
|
||||
else:
|
||||
import_flags = ["-I."]
|
||||
|
||||
for dep in ctx.attr.deps:
|
||||
import_flags += dep.proto.import_flags
|
||||
deps += dep.proto.deps
|
||||
|
||||
if not ctx.attr.gen_cc and not ctx.attr.gen_py and not ctx.executable.plugin:
|
||||
return struct(
|
||||
proto=struct(
|
||||
srcs=srcs,
|
||||
import_flags=import_flags,
|
||||
deps=deps,
|
||||
),
|
||||
)
|
||||
|
||||
for src in srcs:
|
||||
args = []
|
||||
|
||||
in_gen_dir = src.root.path == gen_dir
|
||||
if in_gen_dir:
|
||||
import_flags_real = []
|
||||
for f in depset(import_flags):
|
||||
path = f.replace('-I', '')
|
||||
import_flags_real.append('-I$(realpath -s %s)' % path)
|
||||
|
||||
outs = []
|
||||
use_grpc_plugin = (ctx.attr.plugin_language == "grpc" and ctx.attr.plugin)
|
||||
path_tpl = "$(realpath %s)" if in_gen_dir else "%s"
|
||||
if ctx.attr.gen_cc:
|
||||
args += [("--cpp_out=" + path_tpl) % gen_dir]
|
||||
outs.extend(_CcOuts([src.basename], use_grpc_plugin=use_grpc_plugin))
|
||||
if ctx.attr.gen_py:
|
||||
args += [("--python_out=" + path_tpl) % gen_dir]
|
||||
outs.extend(_PyOuts([src.basename], use_grpc_plugin=use_grpc_plugin))
|
||||
|
||||
outs = [ctx.actions.declare_file(out, sibling=src) for out in outs]
|
||||
inputs = [src] + deps
|
||||
if ctx.executable.plugin:
|
||||
plugin = ctx.executable.plugin
|
||||
lang = ctx.attr.plugin_language
|
||||
if not lang and plugin.basename.startswith('protoc-gen-'):
|
||||
lang = plugin.basename[len('protoc-gen-'):]
|
||||
if not lang:
|
||||
fail("cannot infer the target language of plugin", "plugin_language")
|
||||
|
||||
outdir = "." if in_gen_dir else gen_dir
|
||||
|
||||
if ctx.attr.plugin_options:
|
||||
outdir = ",".join(ctx.attr.plugin_options) + ":" + outdir
|
||||
args += [("--plugin=protoc-gen-%s=" + path_tpl) % (lang, plugin.path)]
|
||||
args += ["--%s_out=%s" % (lang, outdir)]
|
||||
inputs += [plugin]
|
||||
|
||||
if not in_gen_dir:
|
||||
ctx.actions.run(
|
||||
inputs=inputs,
|
||||
outputs=outs,
|
||||
arguments=args + import_flags + [src.path],
|
||||
executable=ctx.executable.protoc,
|
||||
mnemonic="ProtoCompile",
|
||||
use_default_shell_env=True,
|
||||
)
|
||||
else:
|
||||
for out in outs:
|
||||
orig_command = " ".join(
|
||||
["$(realpath %s)" % ctx.executable.protoc.path] + args +
|
||||
import_flags_real + ["-I.", src.basename])
|
||||
command = ";".join([
|
||||
'CMD="%s"' % orig_command,
|
||||
"cd %s" % src.dirname,
|
||||
"${CMD}",
|
||||
"cd -",
|
||||
])
|
||||
generated_out = '/'.join([gen_dir, out.basename])
|
||||
if generated_out != out.path:
|
||||
command += ";mv %s %s" % (generated_out, out.path)
|
||||
ctx.actions.run_shell(
|
||||
inputs=inputs + [ctx.executable.protoc],
|
||||
outputs=[out],
|
||||
command=command,
|
||||
mnemonic="ProtoCompile",
|
||||
use_default_shell_env=True,
|
||||
if _IsNewExternal(ctx):
|
||||
# We are using the fact that Bazel 0.4.4+ provides repository-relative paths
|
||||
# for ctx.genfiles_dir.
|
||||
return ctx.genfiles_dir.path + (
|
||||
"/" + ctx.attr.includes[0] if ctx.attr.includes and ctx.attr.includes[0] else ""
|
||||
)
|
||||
|
||||
return struct(
|
||||
proto=struct(
|
||||
srcs=srcs,
|
||||
import_flags=import_flags,
|
||||
deps=deps,
|
||||
),
|
||||
)
|
||||
# This means that we're either in the old version OR the new version in the local repo.
|
||||
# Either way, appending the source path to the genfiles dir works.
|
||||
return ctx.var["GENDIR"] + "/" + _SourceDir(ctx)
|
||||
|
||||
def _SourceDir(ctx):
|
||||
if not ctx.attr.includes:
|
||||
return ctx.label.workspace_root
|
||||
if not ctx.attr.includes[0]:
|
||||
return _GetPath(ctx, ctx.label.package)
|
||||
if not ctx.label.package:
|
||||
return _GetPath(ctx, ctx.attr.includes[0])
|
||||
return _GetPath(ctx, ctx.label.package + "/" + ctx.attr.includes[0])
|
||||
|
||||
def _CcHdrs(srcs, use_grpc_plugin = False):
|
||||
ret = [s[:-len(".proto")] + ".pb.h" for s in srcs]
|
||||
if use_grpc_plugin:
|
||||
ret += [s[:-len(".proto")] + ".grpc.pb.h" for s in srcs]
|
||||
return ret
|
||||
|
||||
def _CcSrcs(srcs, use_grpc_plugin = False):
|
||||
ret = [s[:-len(".proto")] + ".pb.cc" for s in srcs]
|
||||
if use_grpc_plugin:
|
||||
ret += [s[:-len(".proto")] + ".grpc.pb.cc" for s in srcs]
|
||||
return ret
|
||||
|
||||
def _CcOuts(srcs, use_grpc_plugin = False):
|
||||
return _CcHdrs(srcs, use_grpc_plugin) + _CcSrcs(srcs, use_grpc_plugin)
|
||||
|
||||
def _PyOuts(srcs, use_grpc_plugin = False):
|
||||
ret = [s[:-len(".proto")] + "_pb2.py" for s in srcs]
|
||||
if use_grpc_plugin:
|
||||
ret += [s[:-len(".proto")] + "_pb2_grpc.py" for s in srcs]
|
||||
return ret
|
||||
|
||||
def _RelativeOutputPath(path, include, dest = ""):
|
||||
if include == None:
|
||||
return path
|
||||
|
||||
if not path.startswith(include):
|
||||
fail("Include path %s isn't part of the path %s." % (include, path))
|
||||
|
||||
if include and include[-1] != "/":
|
||||
include = include + "/"
|
||||
if dest and dest[-1] != "/":
|
||||
dest = dest + "/"
|
||||
|
||||
path = path[len(include):]
|
||||
return dest + path
|
||||
|
||||
def _proto_gen_impl(ctx):
|
||||
"""General implementation for generating protos"""
|
||||
srcs = ctx.files.srcs
|
||||
deps = []
|
||||
deps += ctx.files.srcs
|
||||
source_dir = _SourceDir(ctx)
|
||||
gen_dir = _GenDir(ctx).rstrip("/")
|
||||
if source_dir:
|
||||
import_flags = ["-I" + source_dir, "-I" + gen_dir]
|
||||
else:
|
||||
import_flags = ["-I."]
|
||||
|
||||
for dep in ctx.attr.deps:
|
||||
import_flags += dep.proto.import_flags
|
||||
deps += dep.proto.deps
|
||||
|
||||
if not ctx.attr.gen_cc and not ctx.attr.gen_py and not ctx.executable.plugin:
|
||||
return struct(
|
||||
proto = struct(
|
||||
srcs = srcs,
|
||||
import_flags = import_flags,
|
||||
deps = deps,
|
||||
),
|
||||
)
|
||||
|
||||
for src in srcs:
|
||||
args = []
|
||||
|
||||
in_gen_dir = src.root.path == gen_dir
|
||||
if in_gen_dir:
|
||||
import_flags_real = []
|
||||
for f in depset(import_flags).to_list():
|
||||
path = f.replace("-I", "")
|
||||
import_flags_real.append("-I$(realpath -s %s)" % path)
|
||||
|
||||
outs = []
|
||||
use_grpc_plugin = (ctx.attr.plugin_language == "grpc" and ctx.attr.plugin)
|
||||
path_tpl = "$(realpath %s)" if in_gen_dir else "%s"
|
||||
if ctx.attr.gen_cc:
|
||||
args += [("--cpp_out=" + path_tpl) % gen_dir]
|
||||
outs.extend(_CcOuts([src.basename], use_grpc_plugin = use_grpc_plugin))
|
||||
if ctx.attr.gen_py:
|
||||
args += [("--python_out=" + path_tpl) % gen_dir]
|
||||
outs.extend(_PyOuts([src.basename], use_grpc_plugin = use_grpc_plugin))
|
||||
|
||||
outs = [ctx.actions.declare_file(out, sibling = src) for out in outs]
|
||||
inputs = [src] + deps
|
||||
if ctx.executable.plugin:
|
||||
plugin = ctx.executable.plugin
|
||||
lang = ctx.attr.plugin_language
|
||||
if not lang and plugin.basename.startswith("protoc-gen-"):
|
||||
lang = plugin.basename[len("protoc-gen-"):]
|
||||
if not lang:
|
||||
fail("cannot infer the target language of plugin", "plugin_language")
|
||||
|
||||
outdir = "." if in_gen_dir else gen_dir
|
||||
|
||||
if ctx.attr.plugin_options:
|
||||
outdir = ",".join(ctx.attr.plugin_options) + ":" + outdir
|
||||
args += [("--plugin=protoc-gen-%s=" + path_tpl) % (lang, plugin.path)]
|
||||
args += ["--%s_out=%s" % (lang, outdir)]
|
||||
inputs += [plugin]
|
||||
|
||||
if not in_gen_dir:
|
||||
ctx.actions.run(
|
||||
inputs = inputs,
|
||||
outputs = outs,
|
||||
arguments = args + import_flags + [src.path],
|
||||
executable = ctx.executable.protoc,
|
||||
mnemonic = "ProtoCompile",
|
||||
use_default_shell_env = True,
|
||||
)
|
||||
else:
|
||||
for out in outs:
|
||||
orig_command = " ".join(
|
||||
["$(realpath %s)" % ctx.executable.protoc.path] + args +
|
||||
import_flags_real + ["-I.", src.basename],
|
||||
)
|
||||
command = ";".join([
|
||||
'CMD="%s"' % orig_command,
|
||||
"cd %s" % src.dirname,
|
||||
"${CMD}",
|
||||
"cd -",
|
||||
])
|
||||
generated_out = "/".join([gen_dir, out.basename])
|
||||
if generated_out != out.path:
|
||||
command += ";mv %s %s" % (generated_out, out.path)
|
||||
ctx.actions.run_shell(
|
||||
inputs = inputs + [ctx.executable.protoc],
|
||||
outputs = [out],
|
||||
command = command,
|
||||
mnemonic = "ProtoCompile",
|
||||
use_default_shell_env = True,
|
||||
)
|
||||
|
||||
return struct(
|
||||
proto = struct(
|
||||
srcs = srcs,
|
||||
import_flags = import_flags,
|
||||
deps = deps,
|
||||
),
|
||||
)
|
||||
|
||||
proto_gen = rule(
|
||||
attrs = {
|
||||
@ -218,245 +221,251 @@ Args:
|
||||
|
||||
def cc_proto_library(
|
||||
name,
|
||||
srcs=[],
|
||||
deps=[],
|
||||
cc_libs=[],
|
||||
include=None,
|
||||
protoc="@com_google_protobuf//:protoc",
|
||||
internal_bootstrap_hack=False,
|
||||
use_grpc_plugin=False,
|
||||
default_runtime="@com_google_protobuf//:protobuf",
|
||||
srcs = [],
|
||||
deps = [],
|
||||
cc_libs = [],
|
||||
include = None,
|
||||
protoc = "@com_google_protobuf//:protoc",
|
||||
internal_bootstrap_hack = False,
|
||||
use_grpc_plugin = False,
|
||||
default_runtime = "@com_google_protobuf//:protobuf",
|
||||
**kargs):
|
||||
"""Bazel rule to create a C++ protobuf library from proto source files
|
||||
"""Bazel rule to create a C++ protobuf library from proto source files
|
||||
|
||||
NOTE: the rule is only an internal workaround to generate protos. The
|
||||
interface may change and the rule may be removed when bazel has introduced
|
||||
the native rule.
|
||||
NOTE: the rule is only an internal workaround to generate protos. The
|
||||
interface may change and the rule may be removed when bazel has introduced
|
||||
the native rule.
|
||||
|
||||
Args:
|
||||
name: the name of the cc_proto_library.
|
||||
srcs: the .proto files of the cc_proto_library.
|
||||
deps: a list of dependency labels; must be cc_proto_library.
|
||||
cc_libs: a list of other cc_library targets depended by the generated
|
||||
cc_library.
|
||||
include: a string indicating the include path of the .proto files.
|
||||
protoc: the label of the protocol compiler to generate the sources.
|
||||
internal_bootstrap_hack: a flag indicate the cc_proto_library is used only
|
||||
for bootstraping. When it is set to True, no files will be generated.
|
||||
The rule will simply be a provider for .proto files, so that other
|
||||
cc_proto_library can depend on it.
|
||||
use_grpc_plugin: a flag to indicate whether to call the grpc C++ plugin
|
||||
when processing the proto files.
|
||||
default_runtime: the implicitly default runtime which will be depended on by
|
||||
the generated cc_library target.
|
||||
**kargs: other keyword arguments that are passed to cc_library.
|
||||
Args:
|
||||
name: the name of the cc_proto_library.
|
||||
srcs: the .proto files of the cc_proto_library.
|
||||
deps: a list of dependency labels; must be cc_proto_library.
|
||||
cc_libs: a list of other cc_library targets depended by the generated
|
||||
cc_library.
|
||||
include: a string indicating the include path of the .proto files.
|
||||
protoc: the label of the protocol compiler to generate the sources.
|
||||
internal_bootstrap_hack: a flag indicate the cc_proto_library is used only
|
||||
for bootstraping. When it is set to True, no files will be generated.
|
||||
The rule will simply be a provider for .proto files, so that other
|
||||
cc_proto_library can depend on it.
|
||||
use_grpc_plugin: a flag to indicate whether to call the grpc C++ plugin
|
||||
when processing the proto files.
|
||||
default_runtime: the implicitly default runtime which will be depended on by
|
||||
the generated cc_library target.
|
||||
**kargs: other keyword arguments that are passed to cc_library.
|
||||
|
||||
"""
|
||||
"""
|
||||
|
||||
includes = []
|
||||
if include != None:
|
||||
includes = [include]
|
||||
includes = []
|
||||
if include != None:
|
||||
includes = [include]
|
||||
|
||||
if internal_bootstrap_hack:
|
||||
# For pre-checked-in generated files, we add the internal_bootstrap_hack
|
||||
# which will skip the codegen action.
|
||||
proto_gen(
|
||||
name = name + "_genproto",
|
||||
srcs = srcs,
|
||||
deps = [s + "_genproto" for s in deps],
|
||||
includes = includes,
|
||||
protoc = protoc,
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
|
||||
# An empty cc_library to make rule dependency consistent.
|
||||
native.cc_library(
|
||||
name = name,
|
||||
**kargs
|
||||
)
|
||||
return
|
||||
|
||||
grpc_cpp_plugin = None
|
||||
if use_grpc_plugin:
|
||||
grpc_cpp_plugin = "//external:grpc_cpp_plugin"
|
||||
|
||||
gen_srcs = _CcSrcs(srcs, use_grpc_plugin)
|
||||
gen_hdrs = _CcHdrs(srcs, use_grpc_plugin)
|
||||
outs = gen_srcs + gen_hdrs
|
||||
|
||||
if internal_bootstrap_hack:
|
||||
# For pre-checked-in generated files, we add the internal_bootstrap_hack
|
||||
# which will skip the codegen action.
|
||||
proto_gen(
|
||||
name=name + "_genproto",
|
||||
srcs=srcs,
|
||||
deps=[s + "_genproto" for s in deps],
|
||||
includes=includes,
|
||||
protoc=protoc,
|
||||
visibility=["//visibility:public"],
|
||||
name = name + "_genproto",
|
||||
srcs = srcs,
|
||||
deps = [s + "_genproto" for s in deps],
|
||||
includes = includes,
|
||||
protoc = protoc,
|
||||
plugin = grpc_cpp_plugin,
|
||||
plugin_language = "grpc",
|
||||
gen_cc = 1,
|
||||
outs = outs,
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
# An empty cc_library to make rule dependency consistent.
|
||||
|
||||
if default_runtime and not default_runtime in cc_libs:
|
||||
cc_libs = cc_libs + [default_runtime]
|
||||
if use_grpc_plugin:
|
||||
cc_libs = cc_libs + ["//external:grpc_lib"]
|
||||
|
||||
native.cc_library(
|
||||
name=name,
|
||||
**kargs)
|
||||
return
|
||||
|
||||
grpc_cpp_plugin = None
|
||||
if use_grpc_plugin:
|
||||
grpc_cpp_plugin = "//external:grpc_cpp_plugin"
|
||||
|
||||
gen_srcs = _CcSrcs(srcs, use_grpc_plugin)
|
||||
gen_hdrs = _CcHdrs(srcs, use_grpc_plugin)
|
||||
outs = gen_srcs + gen_hdrs
|
||||
|
||||
proto_gen(
|
||||
name=name + "_genproto",
|
||||
srcs=srcs,
|
||||
deps=[s + "_genproto" for s in deps],
|
||||
includes=includes,
|
||||
protoc=protoc,
|
||||
plugin=grpc_cpp_plugin,
|
||||
plugin_language="grpc",
|
||||
gen_cc=1,
|
||||
outs=outs,
|
||||
visibility=["//visibility:public"],
|
||||
)
|
||||
|
||||
if default_runtime and not default_runtime in cc_libs:
|
||||
cc_libs = cc_libs + [default_runtime]
|
||||
if use_grpc_plugin:
|
||||
cc_libs = cc_libs + ["//external:grpc_lib"]
|
||||
|
||||
native.cc_library(
|
||||
name=name,
|
||||
srcs=gen_srcs,
|
||||
hdrs=gen_hdrs,
|
||||
deps=cc_libs + deps,
|
||||
includes=includes,
|
||||
**kargs)
|
||||
name = name,
|
||||
srcs = gen_srcs,
|
||||
hdrs = gen_hdrs,
|
||||
deps = cc_libs + deps,
|
||||
includes = includes,
|
||||
**kargs
|
||||
)
|
||||
|
||||
def internal_gen_well_known_protos_java(srcs):
|
||||
"""Bazel rule to generate the gen_well_known_protos_java genrule
|
||||
"""Bazel rule to generate the gen_well_known_protos_java genrule
|
||||
|
||||
Args:
|
||||
srcs: the well known protos
|
||||
"""
|
||||
root = Label("%s//protobuf_java" % (native.repository_name())).workspace_root
|
||||
pkg = native.package_name() + "/" if native.package_name() else ""
|
||||
if root == "":
|
||||
include = " -I%ssrc " % pkg
|
||||
else:
|
||||
include = " -I%s/%ssrc " % (root, pkg)
|
||||
native.genrule(
|
||||
name = "gen_well_known_protos_java",
|
||||
srcs = srcs,
|
||||
outs = [
|
||||
"wellknown.srcjar",
|
||||
],
|
||||
cmd = "$(location :protoc) --java_out=$(@D)/wellknown.jar" +
|
||||
" %s $(SRCS) " % include +
|
||||
" && mv $(@D)/wellknown.jar $(@D)/wellknown.srcjar",
|
||||
tools = [":protoc"],
|
||||
)
|
||||
Args:
|
||||
srcs: the well known protos
|
||||
"""
|
||||
root = Label("%s//protobuf_java" % (native.repository_name())).workspace_root
|
||||
pkg = native.package_name() + "/" if native.package_name() else ""
|
||||
if root == "":
|
||||
include = " -I%ssrc " % pkg
|
||||
else:
|
||||
include = " -I%s/%ssrc " % (root, pkg)
|
||||
native.genrule(
|
||||
name = "gen_well_known_protos_java",
|
||||
srcs = srcs,
|
||||
outs = [
|
||||
"wellknown.srcjar",
|
||||
],
|
||||
cmd = "$(location :protoc) --java_out=$(@D)/wellknown.jar" +
|
||||
" %s $(SRCS) " % include +
|
||||
" && mv $(@D)/wellknown.jar $(@D)/wellknown.srcjar",
|
||||
tools = [":protoc"],
|
||||
)
|
||||
|
||||
def internal_copied_filegroup(name, srcs, strip_prefix, dest, **kwargs):
|
||||
"""Macro to copy files to a different directory and then create a filegroup.
|
||||
"""Macro to copy files to a different directory and then create a filegroup.
|
||||
|
||||
This is used by the //:protobuf_python py_proto_library target to work around
|
||||
an issue caused by Python source files that are part of the same Python
|
||||
package being in separate directories.
|
||||
This is used by the //:protobuf_python py_proto_library target to work around
|
||||
an issue caused by Python source files that are part of the same Python
|
||||
package being in separate directories.
|
||||
|
||||
Args:
|
||||
srcs: The source files to copy and add to the filegroup.
|
||||
strip_prefix: Path to the root of the files to copy.
|
||||
dest: The directory to copy the source files into.
|
||||
**kwargs: extra arguments that will be passesd to the filegroup.
|
||||
"""
|
||||
outs = [_RelativeOutputPath(s, strip_prefix, dest) for s in srcs]
|
||||
Args:
|
||||
srcs: The source files to copy and add to the filegroup.
|
||||
strip_prefix: Path to the root of the files to copy.
|
||||
dest: The directory to copy the source files into.
|
||||
**kwargs: extra arguments that will be passesd to the filegroup.
|
||||
"""
|
||||
outs = [_RelativeOutputPath(s, strip_prefix, dest) for s in srcs]
|
||||
|
||||
native.genrule(
|
||||
name = name + "_genrule",
|
||||
srcs = srcs,
|
||||
outs = outs,
|
||||
cmd = " && ".join(
|
||||
["cp $(location %s) $(location %s)" %
|
||||
(s, _RelativeOutputPath(s, strip_prefix, dest)) for s in srcs]),
|
||||
)
|
||||
native.genrule(
|
||||
name = name + "_genrule",
|
||||
srcs = srcs,
|
||||
outs = outs,
|
||||
cmd = " && ".join(
|
||||
["cp $(location %s) $(location %s)" %
|
||||
(s, _RelativeOutputPath(s, strip_prefix, dest)) for s in srcs],
|
||||
),
|
||||
)
|
||||
|
||||
native.filegroup(
|
||||
name = name,
|
||||
srcs = outs,
|
||||
**kwargs)
|
||||
native.filegroup(
|
||||
name = name,
|
||||
srcs = outs,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
def py_proto_library(
|
||||
name,
|
||||
srcs=[],
|
||||
deps=[],
|
||||
py_libs=[],
|
||||
py_extra_srcs=[],
|
||||
include=None,
|
||||
default_runtime="@com_google_protobuf//:protobuf_python",
|
||||
protoc="@com_google_protobuf//:protoc",
|
||||
use_grpc_plugin=False,
|
||||
srcs = [],
|
||||
deps = [],
|
||||
py_libs = [],
|
||||
py_extra_srcs = [],
|
||||
include = None,
|
||||
default_runtime = "@com_google_protobuf//:protobuf_python",
|
||||
protoc = "@com_google_protobuf//:protoc",
|
||||
use_grpc_plugin = False,
|
||||
**kargs):
|
||||
"""Bazel rule to create a Python protobuf library from proto source files
|
||||
"""Bazel rule to create a Python protobuf library from proto source files
|
||||
|
||||
NOTE: the rule is only an internal workaround to generate protos. The
|
||||
interface may change and the rule may be removed when bazel has introduced
|
||||
the native rule.
|
||||
NOTE: the rule is only an internal workaround to generate protos. The
|
||||
interface may change and the rule may be removed when bazel has introduced
|
||||
the native rule.
|
||||
|
||||
Args:
|
||||
name: the name of the py_proto_library.
|
||||
srcs: the .proto files of the py_proto_library.
|
||||
deps: a list of dependency labels; must be py_proto_library.
|
||||
py_libs: a list of other py_library targets depended by the generated
|
||||
py_library.
|
||||
py_extra_srcs: extra source files that will be added to the output
|
||||
py_library. This attribute is used for internal bootstrapping.
|
||||
include: a string indicating the include path of the .proto files.
|
||||
default_runtime: the implicitly default runtime which will be depended on by
|
||||
the generated py_library target.
|
||||
protoc: the label of the protocol compiler to generate the sources.
|
||||
use_grpc_plugin: a flag to indicate whether to call the Python C++ plugin
|
||||
when processing the proto files.
|
||||
**kargs: other keyword arguments that are passed to cc_library.
|
||||
Args:
|
||||
name: the name of the py_proto_library.
|
||||
srcs: the .proto files of the py_proto_library.
|
||||
deps: a list of dependency labels; must be py_proto_library.
|
||||
py_libs: a list of other py_library targets depended by the generated
|
||||
py_library.
|
||||
py_extra_srcs: extra source files that will be added to the output
|
||||
py_library. This attribute is used for internal bootstrapping.
|
||||
include: a string indicating the include path of the .proto files.
|
||||
default_runtime: the implicitly default runtime which will be depended on by
|
||||
the generated py_library target.
|
||||
protoc: the label of the protocol compiler to generate the sources.
|
||||
use_grpc_plugin: a flag to indicate whether to call the Python C++ plugin
|
||||
when processing the proto files.
|
||||
**kargs: other keyword arguments that are passed to cc_library.
|
||||
|
||||
"""
|
||||
outs = _PyOuts(srcs, use_grpc_plugin)
|
||||
"""
|
||||
outs = _PyOuts(srcs, use_grpc_plugin)
|
||||
|
||||
includes = []
|
||||
if include != None:
|
||||
includes = [include]
|
||||
includes = []
|
||||
if include != None:
|
||||
includes = [include]
|
||||
|
||||
grpc_python_plugin = None
|
||||
if use_grpc_plugin:
|
||||
grpc_python_plugin = "//external:grpc_python_plugin"
|
||||
# Note: Generated grpc code depends on Python grpc module. This dependency
|
||||
# is not explicitly listed in py_libs. Instead, host system is assumed to
|
||||
# have grpc installed.
|
||||
grpc_python_plugin = None
|
||||
if use_grpc_plugin:
|
||||
grpc_python_plugin = "//external:grpc_python_plugin"
|
||||
# Note: Generated grpc code depends on Python grpc module. This dependency
|
||||
# is not explicitly listed in py_libs. Instead, host system is assumed to
|
||||
# have grpc installed.
|
||||
|
||||
proto_gen(
|
||||
name=name + "_genproto",
|
||||
srcs=srcs,
|
||||
deps=[s + "_genproto" for s in deps],
|
||||
includes=includes,
|
||||
protoc=protoc,
|
||||
gen_py=1,
|
||||
outs=outs,
|
||||
visibility=["//visibility:public"],
|
||||
plugin=grpc_python_plugin,
|
||||
plugin_language="grpc"
|
||||
)
|
||||
proto_gen(
|
||||
name = name + "_genproto",
|
||||
srcs = srcs,
|
||||
deps = [s + "_genproto" for s in deps],
|
||||
includes = includes,
|
||||
protoc = protoc,
|
||||
gen_py = 1,
|
||||
outs = outs,
|
||||
visibility = ["//visibility:public"],
|
||||
plugin = grpc_python_plugin,
|
||||
plugin_language = "grpc",
|
||||
)
|
||||
|
||||
if default_runtime and not default_runtime in py_libs + deps:
|
||||
py_libs = py_libs + [default_runtime]
|
||||
if default_runtime and not default_runtime in py_libs + deps:
|
||||
py_libs = py_libs + [default_runtime]
|
||||
|
||||
native.py_library(
|
||||
name=name,
|
||||
srcs=outs+py_extra_srcs,
|
||||
deps=py_libs+deps,
|
||||
imports=includes,
|
||||
**kargs)
|
||||
native.py_library(
|
||||
name = name,
|
||||
srcs = outs + py_extra_srcs,
|
||||
deps = py_libs + deps,
|
||||
imports = includes,
|
||||
**kargs
|
||||
)
|
||||
|
||||
def internal_protobuf_py_tests(
|
||||
name,
|
||||
modules=[],
|
||||
**kargs):
|
||||
"""Bazel rules to create batch tests for protobuf internal.
|
||||
name,
|
||||
modules = [],
|
||||
**kargs):
|
||||
"""Bazel rules to create batch tests for protobuf internal.
|
||||
|
||||
Args:
|
||||
name: the name of the rule.
|
||||
modules: a list of modules for tests. The macro will create a py_test for
|
||||
each of the parameter with the source "google/protobuf/%s.py"
|
||||
kargs: extra parameters that will be passed into the py_test.
|
||||
|
||||
"""
|
||||
for m in modules:
|
||||
s = "python/google/protobuf/internal/%s.py" % m
|
||||
native.py_test(
|
||||
name="py_%s" % m,
|
||||
srcs=[s],
|
||||
main=s,
|
||||
**kargs)
|
||||
Args:
|
||||
name: the name of the rule.
|
||||
modules: a list of modules for tests. The macro will create a py_test for
|
||||
each of the parameter with the source "google/protobuf/%s.py"
|
||||
kargs: extra parameters that will be passed into the py_test.
|
||||
|
||||
"""
|
||||
for m in modules:
|
||||
s = "python/google/protobuf/internal/%s.py" % m
|
||||
native.py_test(
|
||||
name = "py_%s" % m,
|
||||
srcs = [s],
|
||||
main = s,
|
||||
**kargs
|
||||
)
|
||||
|
||||
def check_protobuf_required_bazel_version():
|
||||
"""For WORKSPACE files, to check the installed version of bazel.
|
||||
"""For WORKSPACE files, to check the installed version of bazel.
|
||||
|
||||
This ensures bazel supports our approach to proto_library() depending on a
|
||||
copied filegroup. (Fixed in bazel 0.5.4)
|
||||
"""
|
||||
versions.check(minimum_bazel_version = "0.5.4")
|
||||
This ensures bazel supports our approach to proto_library() depending on a
|
||||
copied filegroup. (Fixed in bazel 0.5.4)
|
||||
"""
|
||||
versions.check(minimum_bazel_version = "0.5.4")
|
||||
|
@ -208,6 +208,7 @@ libprotobuf_lite_la_SOURCES = \
|
||||
google/protobuf/generated_message_table_driven_lite.cc \
|
||||
google/protobuf/implicit_weak_message.cc \
|
||||
google/protobuf/message_lite.cc \
|
||||
google/protobuf/parse_context.cc \
|
||||
google/protobuf/repeated_field.cc \
|
||||
google/protobuf/wire_format_lite.cc \
|
||||
google/protobuf/io/coded_stream.cc \
|
||||
|
@ -431,7 +431,11 @@ class BigEndian {
|
||||
} // namespace protobuf
|
||||
} // namespace google
|
||||
|
||||
#ifdef PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
|
||||
#define GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER 1
|
||||
#else
|
||||
#define GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER 0
|
||||
#endif
|
||||
|
||||
#include <google/protobuf/port_undef.inc>
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user