Merge pull request #1229 from keveman/unlimited_binary_proto

Added ALLOW_UNLIMITED_BINARY_PROTO macro and setting it when
This commit is contained in:
Joshua Haberman 2016-02-17 16:09:35 -08:00
commit c40f8c1f54
2 changed files with 23 additions and 4 deletions

14
BUILD
View File

@ -462,7 +462,7 @@ genrule(
name = "gen_well_known_protos_java",
srcs = WELL_KNOWN_PROTOS,
outs = [
"wellknown.srcjar"
"wellknown.srcjar",
],
cmd = "$(location :protoc) --java_out=$(@D)/wellknown.jar" +
" -Isrc $(SRCS) " +
@ -539,7 +539,10 @@ cc_binary(
]),
copts = COPTS + [
"-DGOOGLE_PROTOBUF_HAS_ONEOF=1",
],
] + select({
"//conditions:default": [],
":allow_oversize_protos": ["-DPROTOBUF_PYTHON_ALLOW_OVERSIZE_PROTOS=1"],
}),
includes = [
"python/",
"src/",
@ -561,6 +564,13 @@ config_setting(
},
)
config_setting(
name = "allow_oversize_protos",
values = {
"define": "allow_oversize_protos=true",
},
)
py_proto_library(
name = "protobuf_python",
srcs = WELL_KNOWN_PROTOS,

View File

@ -1921,6 +1921,15 @@ static PyObject* MergeFromString(CMessage* self, PyObject* arg) {
AssureWritable(self);
io::CodedInputStream input(
reinterpret_cast<const uint8*>(data), data_length);
#if PROTOBUF_PYTHON_ALLOW_OVERSIZE_PROTOS
// Protobuf has a 64MB limit built in, this code will override this. Please do
// not enable this unless you fully understand the implications: protobufs
// must all be kept in memory at the same time, so if they grow too big you
// may get OOM errors. The protobuf APIs do not provide any tools for
// processing protobufs in chunks. If you have protos this big you should
// break them up if it is at all convenient to do so.
input.SetTotalBytesLimit(INT_MAX, INT_MAX);
#endif // PROTOBUF_PYTHON_ALLOW_OVERSIZE_PROTOS
PyDescriptorPool* pool = GetDescriptorPoolForMessage(self);
input.SetExtensionRegistry(pool->pool, pool->message_factory);
bool success = self->message->MergePartialFromCodedStream(&input);