Merge branch 'beta-1' of github.com:google/protobuf into manual-merge
Change-Id: I83a93fdb119a643fbc884e6ec3624493f6270370
This commit is contained in:
commit
db45aa117a
@ -1,103 +1,103 @@
|
||||
include(GNUInstallDirs)
|
||||
|
||||
foreach(_library
|
||||
libprotobuf-lite
|
||||
libprotobuf
|
||||
libprotoc)
|
||||
set_property(TARGET ${_library}
|
||||
PROPERTY INTERFACE_INCLUDE_DIRECTORIES
|
||||
$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>)
|
||||
install(TARGETS ${_library} EXPORT protobuf-targets
|
||||
RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT ${_library}
|
||||
LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR} COMPONENT ${_library}
|
||||
ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR} COMPONENT ${_library})
|
||||
endforeach()
|
||||
|
||||
install(TARGETS protoc EXPORT protobuf-targets
|
||||
RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT protoc)
|
||||
|
||||
if(TRUE)
|
||||
file(STRINGS extract_includes.bat.in _extract_strings
|
||||
REGEX "^copy")
|
||||
foreach(_extract_string ${_extract_strings})
|
||||
string(REPLACE "copy \${PROTOBUF_SOURCE_WIN32_PATH}\\" ""
|
||||
_extract_string ${_extract_string})
|
||||
string(REPLACE "\\" "/" _extract_string ${_extract_string})
|
||||
string(REGEX MATCH "^[^ ]+"
|
||||
_extract_from ${_extract_string})
|
||||
string(REGEX REPLACE "^${_extract_from} ([^$]+)" "\\1"
|
||||
_extract_to ${_extract_string})
|
||||
get_filename_component(_extract_from "${protobuf_SOURCE_DIR}/${_extract_from}" ABSOLUTE)
|
||||
get_filename_component(_extract_name ${_extract_to} NAME)
|
||||
get_filename_component(_extract_to ${_extract_to} PATH)
|
||||
string(REPLACE "include/" "${CMAKE_INSTALL_INCLUDEDIR}/"
|
||||
_extract_to "${_extract_to}")
|
||||
if(EXISTS "${_extract_from}")
|
||||
install(FILES "${_extract_from}"
|
||||
DESTINATION "${_extract_to}"
|
||||
COMPONENT protobuf-headers
|
||||
RENAME "${_extract_name}")
|
||||
else()
|
||||
message(AUTHOR_WARNING "The file \"${_extract_from}\" is listed in "
|
||||
"\"${protobuf_SOURCE_DIR}/cmake/extract_includes.bat.in\" "
|
||||
"but there not exists. The file will not be installed.")
|
||||
endif()
|
||||
endforeach()
|
||||
endif()
|
||||
|
||||
# Internal function for parsing auto tools scripts
|
||||
function(_protobuf_auto_list FILE_NAME VARIABLE)
|
||||
file(STRINGS ${FILE_NAME} _strings)
|
||||
set(_list)
|
||||
foreach(_string ${_strings})
|
||||
set(_found)
|
||||
string(REGEX MATCH "^[ \t]*${VARIABLE}[ \t]*=[ \t]*" _found "${_string}")
|
||||
if(_found)
|
||||
string(LENGTH "${_found}" _length)
|
||||
string(SUBSTRING "${_string}" ${_length} -1 _draft_list)
|
||||
foreach(_item ${_draft_list})
|
||||
string(STRIP "${_item}" _item)
|
||||
list(APPEND _list "${_item}")
|
||||
endforeach()
|
||||
endif()
|
||||
endforeach()
|
||||
set(${VARIABLE} ${_list} PARENT_SCOPE)
|
||||
endfunction()
|
||||
|
||||
# Install well-known type proto files
|
||||
_protobuf_auto_list("../src/Makefile.am" nobase_dist_proto_DATA)
|
||||
foreach(_file ${nobase_dist_proto_DATA})
|
||||
get_filename_component(_file_from "../src/${_file}" ABSOLUTE)
|
||||
get_filename_component(_file_name ${_file} NAME)
|
||||
get_filename_component(_file_path ${_file} PATH)
|
||||
if(EXISTS "${_file_from}")
|
||||
install(FILES "${_file_from}"
|
||||
DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}/${_file_path}"
|
||||
COMPONENT protobuf-protos
|
||||
RENAME "${_file_name}")
|
||||
else()
|
||||
message(AUTHOR_WARNING "The file \"${_file_from}\" is listed in "
|
||||
"\"${protobuf_SOURCE_DIR}/../src/Makefile.am\" as nobase_dist_proto_DATA "
|
||||
"but there not exists. The file will not be installed.")
|
||||
endif()
|
||||
endforeach()
|
||||
|
||||
# Export configuration
|
||||
|
||||
install(EXPORT protobuf-targets
|
||||
DESTINATION "lib/cmake/protobuf"
|
||||
COMPONENT protobuf-export)
|
||||
|
||||
configure_file(protobuf-config.cmake.in
|
||||
protobuf-config.cmake @ONLY)
|
||||
configure_file(protobuf-config-version.cmake.in
|
||||
protobuf-config-version.cmake @ONLY)
|
||||
configure_file(protobuf-module.cmake.in
|
||||
protobuf-module.cmake @ONLY)
|
||||
|
||||
install(FILES
|
||||
"${protobuf_BINARY_DIR}/protobuf-config.cmake"
|
||||
"${protobuf_BINARY_DIR}/protobuf-config-version.cmake"
|
||||
"${protobuf_BINARY_DIR}/protobuf-module.cmake"
|
||||
DESTINATION "lib/cmake/protobuf"
|
||||
COMPONENT protobuf-export)
|
||||
include(GNUInstallDirs)
|
||||
|
||||
foreach(_library
|
||||
libprotobuf-lite
|
||||
libprotobuf
|
||||
libprotoc)
|
||||
set_property(TARGET ${_library}
|
||||
PROPERTY INTERFACE_INCLUDE_DIRECTORIES
|
||||
$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>)
|
||||
install(TARGETS ${_library} EXPORT protobuf-targets
|
||||
RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT ${_library}
|
||||
LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR} COMPONENT ${_library}
|
||||
ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR} COMPONENT ${_library})
|
||||
endforeach()
|
||||
|
||||
install(TARGETS protoc EXPORT protobuf-targets
|
||||
RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT protoc)
|
||||
|
||||
if(TRUE)
|
||||
file(STRINGS extract_includes.bat.in _extract_strings
|
||||
REGEX "^copy")
|
||||
foreach(_extract_string ${_extract_strings})
|
||||
string(REPLACE "copy \${PROTOBUF_SOURCE_WIN32_PATH}\\" ""
|
||||
_extract_string ${_extract_string})
|
||||
string(REPLACE "\\" "/" _extract_string ${_extract_string})
|
||||
string(REGEX MATCH "^[^ ]+"
|
||||
_extract_from ${_extract_string})
|
||||
string(REGEX REPLACE "^${_extract_from} ([^$]+)" "\\1"
|
||||
_extract_to ${_extract_string})
|
||||
get_filename_component(_extract_from "${protobuf_SOURCE_DIR}/${_extract_from}" ABSOLUTE)
|
||||
get_filename_component(_extract_name ${_extract_to} NAME)
|
||||
get_filename_component(_extract_to ${_extract_to} PATH)
|
||||
string(REPLACE "include/" "${CMAKE_INSTALL_INCLUDEDIR}/"
|
||||
_extract_to "${_extract_to}")
|
||||
if(EXISTS "${_extract_from}")
|
||||
install(FILES "${_extract_from}"
|
||||
DESTINATION "${_extract_to}"
|
||||
COMPONENT protobuf-headers
|
||||
RENAME "${_extract_name}")
|
||||
else()
|
||||
message(AUTHOR_WARNING "The file \"${_extract_from}\" is listed in "
|
||||
"\"${protobuf_SOURCE_DIR}/cmake/extract_includes.bat.in\" "
|
||||
"but there not exists. The file will not be installed.")
|
||||
endif()
|
||||
endforeach()
|
||||
endif()
|
||||
|
||||
# Internal function for parsing auto tools scripts
|
||||
function(_protobuf_auto_list FILE_NAME VARIABLE)
|
||||
file(STRINGS ${FILE_NAME} _strings)
|
||||
set(_list)
|
||||
foreach(_string ${_strings})
|
||||
set(_found)
|
||||
string(REGEX MATCH "^[ \t]*${VARIABLE}[ \t]*=[ \t]*" _found "${_string}")
|
||||
if(_found)
|
||||
string(LENGTH "${_found}" _length)
|
||||
string(SUBSTRING "${_string}" ${_length} -1 _draft_list)
|
||||
foreach(_item ${_draft_list})
|
||||
string(STRIP "${_item}" _item)
|
||||
list(APPEND _list "${_item}")
|
||||
endforeach()
|
||||
endif()
|
||||
endforeach()
|
||||
set(${VARIABLE} ${_list} PARENT_SCOPE)
|
||||
endfunction()
|
||||
|
||||
# Install well-known type proto files
|
||||
_protobuf_auto_list("../src/Makefile.am" nobase_dist_proto_DATA)
|
||||
foreach(_file ${nobase_dist_proto_DATA})
|
||||
get_filename_component(_file_from "../src/${_file}" ABSOLUTE)
|
||||
get_filename_component(_file_name ${_file} NAME)
|
||||
get_filename_component(_file_path ${_file} PATH)
|
||||
if(EXISTS "${_file_from}")
|
||||
install(FILES "${_file_from}"
|
||||
DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}/${_file_path}"
|
||||
COMPONENT protobuf-protos
|
||||
RENAME "${_file_name}")
|
||||
else()
|
||||
message(AUTHOR_WARNING "The file \"${_file_from}\" is listed in "
|
||||
"\"${protobuf_SOURCE_DIR}/../src/Makefile.am\" as nobase_dist_proto_DATA "
|
||||
"but there not exists. The file will not be installed.")
|
||||
endif()
|
||||
endforeach()
|
||||
|
||||
# Export configuration
|
||||
|
||||
install(EXPORT protobuf-targets
|
||||
DESTINATION "lib/cmake/protobuf"
|
||||
COMPONENT protobuf-export)
|
||||
|
||||
configure_file(protobuf-config.cmake.in
|
||||
protobuf-config.cmake @ONLY)
|
||||
configure_file(protobuf-config-version.cmake.in
|
||||
protobuf-config-version.cmake @ONLY)
|
||||
configure_file(protobuf-module.cmake.in
|
||||
protobuf-module.cmake @ONLY)
|
||||
|
||||
install(FILES
|
||||
"${protobuf_BINARY_DIR}/protobuf-config.cmake"
|
||||
"${protobuf_BINARY_DIR}/protobuf-config-version.cmake"
|
||||
"${protobuf_BINARY_DIR}/protobuf-module.cmake"
|
||||
DESTINATION "lib/cmake/protobuf"
|
||||
COMPONENT protobuf-export)
|
||||
|
@ -1 +1 @@
|
||||
set(PACKAGE_VERSION @protobuf_VERSION@)
|
||||
set(PACKAGE_VERSION @protobuf_VERSION@)
|
||||
|
@ -1,27 +1,27 @@
|
||||
# Version info variables
|
||||
set(PROTOBUF_VERSION "@protobuf_VERSION@")
|
||||
set(PROTOBUF_VERSION_STRING "@protobuf_VERSION_STRING@")
|
||||
|
||||
# Current dir
|
||||
get_filename_component(_PROTOBUF_PACKAGE_PREFIX
|
||||
"${CMAKE_CURRENT_LIST_FILE}" PATH)
|
||||
|
||||
# Imported targets
|
||||
include("${_PROTOBUF_PACKAGE_PREFIX}/protobuf-targets.cmake")
|
||||
|
||||
# Compute the installation prefix relative to this file.
|
||||
get_filename_component(_PROTOBUF_IMPORT_PREFIX
|
||||
"${_PROTOBUF_PACKAGE_PREFIX}" PATH)
|
||||
get_filename_component(_PROTOBUF_IMPORT_PREFIX
|
||||
"${_PROTOBUF_IMPORT_PREFIX}" PATH)
|
||||
get_filename_component(_PROTOBUF_IMPORT_PREFIX
|
||||
"${_PROTOBUF_IMPORT_PREFIX}" PATH)
|
||||
|
||||
# CMake FindProtobuf module compatible file
|
||||
if(NOT DEFINED PROTOBUF_MODULE_COMPATIBLE OR "${PROTOBUF_MODULE_COMPATIBLE}")
|
||||
include("${_PROTOBUF_PACKAGE_PREFIX}/protobuf-module.cmake")
|
||||
endif()
|
||||
|
||||
# Cleanup temporary variables.
|
||||
set(_PROTOBUF_PACKAGE_PREFIX)
|
||||
set(_PROTOBUF_IMPORT_PREFIX)
|
||||
# Version info variables
|
||||
set(PROTOBUF_VERSION "@protobuf_VERSION@")
|
||||
set(PROTOBUF_VERSION_STRING "@protobuf_VERSION_STRING@")
|
||||
|
||||
# Current dir
|
||||
get_filename_component(_PROTOBUF_PACKAGE_PREFIX
|
||||
"${CMAKE_CURRENT_LIST_FILE}" PATH)
|
||||
|
||||
# Imported targets
|
||||
include("${_PROTOBUF_PACKAGE_PREFIX}/protobuf-targets.cmake")
|
||||
|
||||
# Compute the installation prefix relative to this file.
|
||||
get_filename_component(_PROTOBUF_IMPORT_PREFIX
|
||||
"${_PROTOBUF_PACKAGE_PREFIX}" PATH)
|
||||
get_filename_component(_PROTOBUF_IMPORT_PREFIX
|
||||
"${_PROTOBUF_IMPORT_PREFIX}" PATH)
|
||||
get_filename_component(_PROTOBUF_IMPORT_PREFIX
|
||||
"${_PROTOBUF_IMPORT_PREFIX}" PATH)
|
||||
|
||||
# CMake FindProtobuf module compatible file
|
||||
if(NOT DEFINED PROTOBUF_MODULE_COMPATIBLE OR "${PROTOBUF_MODULE_COMPATIBLE}")
|
||||
include("${_PROTOBUF_PACKAGE_PREFIX}/protobuf-module.cmake")
|
||||
endif()
|
||||
|
||||
# Cleanup temporary variables.
|
||||
set(_PROTOBUF_PACKAGE_PREFIX)
|
||||
set(_PROTOBUF_IMPORT_PREFIX)
|
||||
|
@ -99,6 +99,12 @@ set(common_test_files
|
||||
${protobuf_source_dir}/src/google/protobuf/testing/googletest.cc
|
||||
)
|
||||
|
||||
set(common_lite_test_files
|
||||
${protobuf_source_dir}/src/google/protobuf/arena_test_util.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/map_lite_test_util.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/test_util_lite.cc
|
||||
)
|
||||
|
||||
set(tests_files
|
||||
${protobuf_source_dir}/src/google/protobuf/any_test.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/arena_unittest.cc
|
||||
@ -179,10 +185,13 @@ add_executable(test_plugin ${test_plugin_files})
|
||||
target_link_libraries(test_plugin libprotoc libprotobuf gmock)
|
||||
|
||||
set(lite_test_files
|
||||
${protobuf_source_dir}/src/google/protobuf/arena_test_util.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/lite_unittest.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/map_lite_test_util.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/test_util_lite.cc
|
||||
)
|
||||
add_executable(lite-test ${lite_test_files} ${lite_test_proto_files})
|
||||
add_executable(lite-test ${lite_test_files} ${common_lite_test_files} ${lite_test_proto_files})
|
||||
target_link_libraries(lite-test libprotobuf-lite)
|
||||
|
||||
set(lite_arena_test_files
|
||||
${protobuf_source_dir}/src/google/protobuf/lite_arena_unittest.cc
|
||||
)
|
||||
add_executable(lite-arena-test ${lite_arena_test_files} ${common_lite_test_files} ${lite_test_proto_files})
|
||||
target_link_libraries(lite-arena-test libprotobuf-lite gmock_main)
|
||||
|
@ -142,6 +142,7 @@
|
||||
<arg value="../src/google/protobuf/unittest_enormous_descriptor.proto" />
|
||||
<arg value="../src/google/protobuf/unittest_no_generic_services.proto" />
|
||||
<arg value="../src/google/protobuf/unittest_well_known_types.proto" />
|
||||
<arg value="src/test/java/com/google/protobuf/any_test.proto" />
|
||||
<arg value="src/test/java/com/google/protobuf/field_presence_test.proto" />
|
||||
<arg value="src/test/java/com/google/protobuf/map_for_proto2_lite_test.proto" />
|
||||
<arg value="src/test/java/com/google/protobuf/map_for_proto2_test.proto" />
|
||||
|
@ -294,10 +294,10 @@ public abstract class ByteString implements Iterable<Byte>, Serializable {
|
||||
* <b>Performance notes:</b> The returned {@code ByteString} is an
|
||||
* immutable tree of byte arrays ("chunks") of the stream data. The
|
||||
* first chunk is small, with subsequent chunks each being double
|
||||
* the size, up to 8K. If the caller knows the precise length of
|
||||
* the stream and wishes to avoid all unnecessary copies and
|
||||
* allocations, consider using the two-argument version of this
|
||||
* method, below.
|
||||
* the size, up to 8K.
|
||||
*
|
||||
* <p>Each byte read from the input stream will be copied twice to ensure
|
||||
* that the resulting ByteString is truly immutable.
|
||||
*
|
||||
* @param streamToDrain The source stream, which is read completely
|
||||
* but not closed.
|
||||
@ -320,12 +320,10 @@ public abstract class ByteString implements Iterable<Byte>, Serializable {
|
||||
*
|
||||
* <b>Performance notes:</b> The returned {@code ByteString} is an
|
||||
* immutable tree of byte arrays ("chunks") of the stream data. The
|
||||
* chunkSize parameter sets the size of these byte arrays. In
|
||||
* particular, if the chunkSize is precisely the same as the length
|
||||
* of the stream, unnecessary allocations and copies will be
|
||||
* avoided. Otherwise, the chunks will be of the given size, except
|
||||
* for the last chunk, which will be resized (via a reallocation and
|
||||
* copy) to contain the remainder of the stream.
|
||||
* chunkSize parameter sets the size of these byte arrays.
|
||||
*
|
||||
* <p>Each byte read from the input stream will be copied twice to ensure
|
||||
* that the resulting ByteString is truly immutable.
|
||||
*
|
||||
* @param streamToDrain The source stream, which is read completely
|
||||
* but not closed.
|
||||
@ -386,6 +384,7 @@ public abstract class ByteString implements Iterable<Byte>, Serializable {
|
||||
if (bytesRead == 0) {
|
||||
return null;
|
||||
} else {
|
||||
// Always make a copy since InputStream could steal a reference to buf.
|
||||
return ByteString.copyFrom(buf, 0, bytesRead);
|
||||
}
|
||||
}
|
||||
@ -736,7 +735,8 @@ public abstract class ByteString implements Iterable<Byte>, Serializable {
|
||||
* returns the number of bytes remaining in the stream. The methods
|
||||
* {@link InputStream#read(byte[])}, {@link InputStream#read(byte[],int,int)}
|
||||
* and {@link InputStream#skip(long)} will read/skip as many bytes as are
|
||||
* available.
|
||||
* available. The method {@link InputStream#markSupported()} returns
|
||||
* {@code true}.
|
||||
* <p>
|
||||
* The methods in the returned {@link InputStream} might <b>not</b> be
|
||||
* thread safe.
|
||||
|
@ -30,9 +30,13 @@
|
||||
|
||||
package com.google.protobuf;
|
||||
|
||||
import com.google.protobuf.Utf8.UnpairedSurrogateException;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
|
||||
/**
|
||||
* Encodes and writes protocol message fields.
|
||||
@ -49,6 +53,10 @@ import java.nio.ByteBuffer;
|
||||
* @author kneton@google.com Kenton Varda
|
||||
*/
|
||||
public final class CodedOutputStream {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(CodedOutputStream.class.getName());
|
||||
|
||||
// TODO(dweis): Consider migrating to a ByteBuffer.
|
||||
private final byte[] buffer;
|
||||
private final int limit;
|
||||
private int position;
|
||||
@ -415,15 +423,87 @@ public final class CodedOutputStream {
|
||||
}
|
||||
|
||||
/** Write a {@code string} field to the stream. */
|
||||
// TODO(dweis): Document behavior on ill-formed UTF-16 input.
|
||||
public void writeStringNoTag(final String value) throws IOException {
|
||||
try {
|
||||
efficientWriteStringNoTag(value);
|
||||
} catch (UnpairedSurrogateException e) {
|
||||
logger.log(Level.WARNING,
|
||||
"Converting ill-formed UTF-16. Your Protocol Buffer will not round trip correctly!", e);
|
||||
inefficientWriteStringNoTag(value);
|
||||
}
|
||||
}
|
||||
|
||||
/** Write a {@code string} field to the stream. */
|
||||
private void inefficientWriteStringNoTag(final String value) throws IOException {
|
||||
// Unfortunately there does not appear to be any way to tell Java to encode
|
||||
// UTF-8 directly into our buffer, so we have to let it create its own byte
|
||||
// array and then copy.
|
||||
// TODO(dweis): Consider using nio Charset methods instead.
|
||||
final byte[] bytes = value.getBytes(Internal.UTF_8);
|
||||
writeRawVarint32(bytes.length);
|
||||
writeRawBytes(bytes);
|
||||
}
|
||||
|
||||
/**
|
||||
* Write a {@code string} field to the stream efficiently. If the {@code string} is malformed,
|
||||
* this method rolls back its changes and throws an {@link UnpairedSurrogateException} with the
|
||||
* intent that the caller will catch and retry with {@link #inefficientWriteStringNoTag(String)}.
|
||||
*
|
||||
* @param value the string to write to the stream
|
||||
*
|
||||
* @throws UnpairedSurrogateException when {@code value} is ill-formed UTF-16.
|
||||
*/
|
||||
private void efficientWriteStringNoTag(final String value) throws IOException {
|
||||
// UTF-8 byte length of the string is at least its UTF-16 code unit length (value.length()),
|
||||
// and at most 3 times of it. We take advantage of this in both branches below.
|
||||
final int maxLength = value.length() * Utf8.MAX_BYTES_PER_CHAR;
|
||||
final int maxLengthVarIntSize = computeRawVarint32Size(maxLength);
|
||||
|
||||
// If we are streaming and the potential length is too big to fit in our buffer, we take the
|
||||
// slower path. Otherwise, we're good to try the fast path.
|
||||
if (output != null && maxLengthVarIntSize + maxLength > limit - position) {
|
||||
// Allocate a byte[] that we know can fit the string and encode into it. String.getBytes()
|
||||
// does the same internally and then does *another copy* to return a byte[] of exactly the
|
||||
// right size. We can skip that copy and just writeRawBytes up to the actualLength of the
|
||||
// UTF-8 encoded bytes.
|
||||
final byte[] encodedBytes = new byte[maxLength];
|
||||
int actualLength = Utf8.encode(value, encodedBytes, 0, maxLength);
|
||||
writeRawVarint32(actualLength);
|
||||
writeRawBytes(encodedBytes, 0, actualLength);
|
||||
} else {
|
||||
// Optimize for the case where we know this length results in a constant varint length as this
|
||||
// saves a pass for measuring the length of the string.
|
||||
final int minLengthVarIntSize = computeRawVarint32Size(value.length());
|
||||
int oldPosition = position;
|
||||
final int length;
|
||||
try {
|
||||
if (minLengthVarIntSize == maxLengthVarIntSize) {
|
||||
position = oldPosition + minLengthVarIntSize;
|
||||
int newPosition = Utf8.encode(value, buffer, position, limit - position);
|
||||
// Since this class is stateful and tracks the position, we rewind and store the state,
|
||||
// prepend the length, then reset it back to the end of the string.
|
||||
position = oldPosition;
|
||||
length = newPosition - oldPosition - minLengthVarIntSize;
|
||||
writeRawVarint32(length);
|
||||
position = newPosition;
|
||||
} else {
|
||||
length = Utf8.encodedLength(value);
|
||||
writeRawVarint32(length);
|
||||
position = Utf8.encode(value, buffer, position, limit - position);
|
||||
}
|
||||
} catch (UnpairedSurrogateException e) {
|
||||
// Be extra careful and restore the original position for retrying the write with the less
|
||||
// efficient path.
|
||||
position = oldPosition;
|
||||
throw e;
|
||||
} catch (ArrayIndexOutOfBoundsException e) {
|
||||
throw new OutOfSpaceException(e);
|
||||
}
|
||||
totalBytesWritten += length;
|
||||
}
|
||||
}
|
||||
|
||||
/** Write a {@code group} field to the stream. */
|
||||
public void writeGroupNoTag(final MessageLite value) throws IOException {
|
||||
value.writeTo(this);
|
||||
@ -826,9 +906,16 @@ public final class CodedOutputStream {
|
||||
* {@code string} field.
|
||||
*/
|
||||
public static int computeStringSizeNoTag(final String value) {
|
||||
final byte[] bytes = value.getBytes(Internal.UTF_8);
|
||||
return computeRawVarint32Size(bytes.length) +
|
||||
bytes.length;
|
||||
int length;
|
||||
try {
|
||||
length = Utf8.encodedLength(value);
|
||||
} catch (UnpairedSurrogateException e) {
|
||||
// TODO(dweis): Consider using nio Charset methods instead.
|
||||
final byte[] bytes = value.getBytes(Internal.UTF_8);
|
||||
length = bytes.length;
|
||||
}
|
||||
|
||||
return computeRawVarint32Size(length) + length;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -1007,9 +1094,15 @@ public final class CodedOutputStream {
|
||||
public static class OutOfSpaceException extends IOException {
|
||||
private static final long serialVersionUID = -6947486886997889499L;
|
||||
|
||||
private static final String MESSAGE =
|
||||
"CodedOutputStream was writing to a flat byte array and ran out of space.";
|
||||
|
||||
OutOfSpaceException() {
|
||||
super("CodedOutputStream was writing to a flat byte array and ran " +
|
||||
"out of space.");
|
||||
super(MESSAGE);
|
||||
}
|
||||
|
||||
OutOfSpaceException(Throwable cause) {
|
||||
super(MESSAGE, cause);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -31,6 +31,7 @@
|
||||
package com.google.protobuf;
|
||||
|
||||
import com.google.protobuf.DescriptorProtos.*;
|
||||
import com.google.protobuf.Descriptors.FileDescriptor.Syntax;
|
||||
|
||||
import java.lang.ref.WeakReference;
|
||||
import java.util.ArrayList;
|
||||
@ -912,7 +913,17 @@ public final class Descriptors {
|
||||
|
||||
/** For internal use only. */
|
||||
public boolean needsUtf8Check() {
|
||||
return (type == Type.STRING) && (getFile().getOptions().getJavaStringCheckUtf8());
|
||||
if (type != Type.STRING) {
|
||||
return false;
|
||||
}
|
||||
if (getContainingType().getOptions().getMapEntry()) {
|
||||
// Always enforce strict UTF-8 checking for map fields.
|
||||
return true;
|
||||
}
|
||||
if (getFile().getSyntax() == Syntax.PROTO3) {
|
||||
return true;
|
||||
}
|
||||
return getFile().getOptions().getJavaStringCheckUtf8();
|
||||
}
|
||||
|
||||
public boolean isMapField() {
|
||||
@ -1118,9 +1129,9 @@ public final class Descriptors {
|
||||
static {
|
||||
// Refuse to init if someone added a new declared type.
|
||||
if (Type.values().length != FieldDescriptorProto.Type.values().length) {
|
||||
throw new RuntimeException(
|
||||
"descriptor.proto has a new declared type but Desrciptors.java " +
|
||||
"wasn't updated.");
|
||||
throw new RuntimeException(""
|
||||
+ "descriptor.proto has a new declared type but Descriptors.java "
|
||||
+ "wasn't updated.");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -121,21 +121,43 @@ public abstract class GeneratedMessage extends AbstractMessage
|
||||
final TreeMap<FieldDescriptor, Object> result =
|
||||
new TreeMap<FieldDescriptor, Object>();
|
||||
final Descriptor descriptor = internalGetFieldAccessorTable().descriptor;
|
||||
for (final FieldDescriptor field : descriptor.getFields()) {
|
||||
if (field.isRepeated()) {
|
||||
final List<?> value = (List<?>) getField(field);
|
||||
if (!value.isEmpty()) {
|
||||
result.put(field, value);
|
||||
final List<FieldDescriptor> fields = descriptor.getFields();
|
||||
|
||||
for (int i = 0; i < fields.size(); i++) {
|
||||
FieldDescriptor field = fields.get(i);
|
||||
final OneofDescriptor oneofDescriptor = field.getContainingOneof();
|
||||
|
||||
/*
|
||||
* If the field is part of a Oneof, then at maximum one field in the Oneof is set
|
||||
* and it is not repeated. There is no need to iterate through the others.
|
||||
*/
|
||||
if (oneofDescriptor != null) {
|
||||
// Skip other fields in the Oneof we know are not set
|
||||
i += oneofDescriptor.getFieldCount() - 1;
|
||||
if (!hasOneof(oneofDescriptor)) {
|
||||
// If no field is set in the Oneof, skip all the fields in the Oneof
|
||||
continue;
|
||||
}
|
||||
// Get the pointer to the only field which is set in the Oneof
|
||||
field = getOneofFieldDescriptor(oneofDescriptor);
|
||||
} else {
|
||||
if (hasField(field)) {
|
||||
if (getBytesForString
|
||||
&& field.getJavaType() == FieldDescriptor.JavaType.STRING) {
|
||||
result.put(field, getFieldRaw(field));
|
||||
} else {
|
||||
result.put(field, getField(field));
|
||||
// If we are not in a Oneof, we need to check if the field is set and if it is repeated
|
||||
if (field.isRepeated()) {
|
||||
final List<?> value = (List<?>) getField(field);
|
||||
if (!value.isEmpty()) {
|
||||
result.put(field, value);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if (!hasField(field)) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
// Add the field to the map
|
||||
if (getBytesForString && field.getJavaType() == FieldDescriptor.JavaType.STRING) {
|
||||
result.put(field, getFieldRaw(field));
|
||||
} else {
|
||||
result.put(field, getField(field));
|
||||
}
|
||||
}
|
||||
return result;
|
||||
@ -398,17 +420,40 @@ public abstract class GeneratedMessage extends AbstractMessage
|
||||
final TreeMap<FieldDescriptor, Object> result =
|
||||
new TreeMap<FieldDescriptor, Object>();
|
||||
final Descriptor descriptor = internalGetFieldAccessorTable().descriptor;
|
||||
for (final FieldDescriptor field : descriptor.getFields()) {
|
||||
if (field.isRepeated()) {
|
||||
final List value = (List) getField(field);
|
||||
if (!value.isEmpty()) {
|
||||
result.put(field, value);
|
||||
final List<FieldDescriptor> fields = descriptor.getFields();
|
||||
|
||||
for (int i = 0; i < fields.size(); i++) {
|
||||
FieldDescriptor field = fields.get(i);
|
||||
final OneofDescriptor oneofDescriptor = field.getContainingOneof();
|
||||
|
||||
/*
|
||||
* If the field is part of a Oneof, then at maximum one field in the Oneof is set
|
||||
* and it is not repeated. There is no need to iterate through the others.
|
||||
*/
|
||||
if (oneofDescriptor != null) {
|
||||
// Skip other fields in the Oneof we know are not set
|
||||
i += oneofDescriptor.getFieldCount() - 1;
|
||||
if (!hasOneof(oneofDescriptor)) {
|
||||
// If no field is set in the Oneof, skip all the fields in the Oneof
|
||||
continue;
|
||||
}
|
||||
// Get the pointer to the only field which is set in the Oneof
|
||||
field = getOneofFieldDescriptor(oneofDescriptor);
|
||||
} else {
|
||||
if (hasField(field)) {
|
||||
result.put(field, getField(field));
|
||||
// If we are not in a Oneof, we need to check if the field is set and if it is repeated
|
||||
if (field.isRepeated()) {
|
||||
final List<?> value = (List<?>) getField(field);
|
||||
if (!value.isEmpty()) {
|
||||
result.put(field, value);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if (!hasField(field)) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
// Add the field to the map
|
||||
result.put(field, getField(field));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
@ -2696,4 +2741,38 @@ public abstract class GeneratedMessage extends AbstractMessage
|
||||
|
||||
return (Extension<MessageType, T>) extension;
|
||||
}
|
||||
|
||||
protected static int computeStringSize(final int fieldNumber, final Object value) {
|
||||
if (value instanceof String) {
|
||||
return CodedOutputStream.computeStringSize(fieldNumber, (String) value);
|
||||
} else {
|
||||
return CodedOutputStream.computeBytesSize(fieldNumber, (ByteString) value);
|
||||
}
|
||||
}
|
||||
|
||||
protected static int computeStringSizeNoTag(final Object value) {
|
||||
if (value instanceof String) {
|
||||
return CodedOutputStream.computeStringSizeNoTag((String) value);
|
||||
} else {
|
||||
return CodedOutputStream.computeBytesSizeNoTag((ByteString) value);
|
||||
}
|
||||
}
|
||||
|
||||
protected static void writeString(
|
||||
CodedOutputStream output, final int fieldNumber, final Object value) throws IOException {
|
||||
if (value instanceof String) {
|
||||
output.writeString(fieldNumber, (String) value);
|
||||
} else {
|
||||
output.writeBytes(fieldNumber, (ByteString) value);
|
||||
}
|
||||
}
|
||||
|
||||
protected static void writeStringNoTag(
|
||||
CodedOutputStream output, final Object value) throws IOException {
|
||||
if (value instanceof String) {
|
||||
output.writeStringNoTag((String) value);
|
||||
} else {
|
||||
output.writeBytesNoTag((ByteString) value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -48,7 +48,6 @@ import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
/**
|
||||
* Lite version of {@link GeneratedMessage}.
|
||||
@ -60,24 +59,6 @@ public abstract class GeneratedMessageLite<
|
||||
BuilderType extends GeneratedMessageLite.Builder<MessageType, BuilderType>>
|
||||
extends AbstractMessageLite
|
||||
implements Serializable {
|
||||
|
||||
/**
|
||||
* Holds all the {@link PrototypeHolder}s for loaded classes.
|
||||
*/
|
||||
// TODO(dweis): Consider different concurrency values.
|
||||
// TODO(dweis): This will prevent garbage collection of the class loader.
|
||||
// Ideally we'd use something like ClassValue but that's Java 7 only.
|
||||
private static final Map<Class<?>, PrototypeHolder<?, ?>> PROTOTYPE_MAP =
|
||||
new ConcurrentHashMap<Class<?>, PrototypeHolder<?, ?>>();
|
||||
|
||||
// For use by generated code only.
|
||||
protected static <
|
||||
MessageType extends GeneratedMessageLite<MessageType, BuilderType>,
|
||||
BuilderType extends GeneratedMessageLite.Builder<
|
||||
MessageType, BuilderType>> void onLoad(Class<MessageType> clazz,
|
||||
PrototypeHolder<MessageType, BuilderType> protoTypeHolder) {
|
||||
PROTOTYPE_MAP.put(clazz, protoTypeHolder);
|
||||
}
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
@ -90,20 +71,17 @@ public abstract class GeneratedMessageLite<
|
||||
|
||||
@SuppressWarnings("unchecked") // Guaranteed by runtime.
|
||||
public final Parser<MessageType> getParserForType() {
|
||||
return (Parser<MessageType>) PROTOTYPE_MAP
|
||||
.get(getClass()).getParserForType();
|
||||
return (Parser<MessageType>) dynamicMethod(MethodToInvoke.GET_PARSER);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked") // Guaranteed by runtime.
|
||||
public final MessageType getDefaultInstanceForType() {
|
||||
return (MessageType) PROTOTYPE_MAP
|
||||
.get(getClass()).getDefaultInstanceForType();
|
||||
return (MessageType) dynamicMethod(MethodToInvoke.GET_DEFAULT_INSTANCE);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked") // Guaranteed by runtime.
|
||||
public final BuilderType newBuilderForType() {
|
||||
return (BuilderType) PROTOTYPE_MAP
|
||||
.get(getClass()).newBuilderForType();
|
||||
return (BuilderType) dynamicMethod(MethodToInvoke.NEW_BUILDER);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -141,7 +119,9 @@ public abstract class GeneratedMessageLite<
|
||||
MERGE_FROM,
|
||||
MAKE_IMMUTABLE,
|
||||
NEW_INSTANCE,
|
||||
NEW_BUILDER;
|
||||
NEW_BUILDER,
|
||||
GET_DEFAULT_INSTANCE,
|
||||
GET_PARSER;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -168,9 +148,21 @@ public abstract class GeneratedMessageLite<
|
||||
* <p>
|
||||
* For use by generated code only.
|
||||
*/
|
||||
protected abstract Object dynamicMethod(
|
||||
MethodToInvoke method,
|
||||
Object... args);
|
||||
protected abstract Object dynamicMethod(MethodToInvoke method, Object arg0, Object arg1);
|
||||
|
||||
/**
|
||||
* Same as {@link #dynamicMethod(MethodToInvoke, Object, Object)} with {@code null} padding.
|
||||
*/
|
||||
protected Object dynamicMethod(MethodToInvoke method, Object arg0) {
|
||||
return dynamicMethod(method, arg0, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as {@link #dynamicMethod(MethodToInvoke, Object, Object)} with {@code null} padding.
|
||||
*/
|
||||
protected Object dynamicMethod(MethodToInvoke method) {
|
||||
return dynamicMethod(method, null, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge some unknown fields into the {@link UnknownFieldSetLite} for this
|
||||
@ -1059,18 +1051,22 @@ public abstract class GeneratedMessageLite<
|
||||
@SuppressWarnings("unchecked")
|
||||
protected Object readResolve() throws ObjectStreamException {
|
||||
try {
|
||||
Class messageClass = Class.forName(messageClassName);
|
||||
Parser<?> parser =
|
||||
(Parser<?>) messageClass.getField("PARSER").get(null);
|
||||
return parser.parsePartialFrom(asBytes);
|
||||
Class<?> messageClass = Class.forName(messageClassName);
|
||||
java.lang.reflect.Field defaultInstanceField =
|
||||
messageClass.getDeclaredField("DEFAULT_INSTANCE");
|
||||
defaultInstanceField.setAccessible(true);
|
||||
MessageLite defaultInstance = (MessageLite) defaultInstanceField.get(null);
|
||||
return defaultInstance.newBuilderForType()
|
||||
.mergeFrom(asBytes)
|
||||
.buildPartial();
|
||||
} catch (ClassNotFoundException e) {
|
||||
throw new RuntimeException("Unable to find proto buffer class", e);
|
||||
throw new RuntimeException("Unable to find proto buffer class: " + messageClassName, e);
|
||||
} catch (NoSuchFieldException e) {
|
||||
throw new RuntimeException("Unable to find PARSER", e);
|
||||
throw new RuntimeException("Unable to find DEFAULT_INSTANCE in " + messageClassName, e);
|
||||
} catch (SecurityException e) {
|
||||
throw new RuntimeException("Unable to call PARSER", e);
|
||||
throw new RuntimeException("Unable to call DEFAULT_INSTANCE in " + messageClassName, e);
|
||||
} catch (IllegalAccessException e) {
|
||||
throw new RuntimeException("Unable to call parseFrom method", e);
|
||||
throw new RuntimeException("Unable to call parsePartialFrom", e);
|
||||
} catch (InvalidProtocolBufferException e) {
|
||||
throw new RuntimeException("Unable to understand proto buffer", e);
|
||||
}
|
||||
@ -1103,45 +1099,6 @@ public abstract class GeneratedMessageLite<
|
||||
|
||||
return (GeneratedExtension<MessageType, T>) extension;
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents the state needed to implement *ForType methods. Generated code
|
||||
* must provide a static singleton instance by adding it with
|
||||
* {@link GeneratedMessageLite#onLoad(Class, PrototypeHolder)} on class load.
|
||||
* <ul>
|
||||
* <li>{@link #getDefaultInstanceForType()}
|
||||
* <li>{@link #getParserForType()}
|
||||
* <li>{@link #newBuilderForType()}
|
||||
* </ul>
|
||||
* This allows us to trade three generated methods for a static Map.
|
||||
*/
|
||||
protected static class PrototypeHolder<
|
||||
MessageType extends GeneratedMessageLite<MessageType, BuilderType>,
|
||||
BuilderType extends GeneratedMessageLite.Builder<
|
||||
MessageType, BuilderType>> {
|
||||
|
||||
private final MessageType defaultInstance;
|
||||
private final Parser<MessageType> parser;
|
||||
|
||||
public PrototypeHolder(
|
||||
MessageType defaultInstance, Parser<MessageType> parser) {
|
||||
this.defaultInstance = defaultInstance;
|
||||
this.parser = parser;
|
||||
}
|
||||
|
||||
public MessageType getDefaultInstanceForType() {
|
||||
return defaultInstance;
|
||||
}
|
||||
|
||||
public Parser<MessageType> getParserForType() {
|
||||
return parser;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked") // Guaranteed by runtime.
|
||||
public BuilderType newBuilderForType() {
|
||||
return (BuilderType) defaultInstance.toBuilder();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A static helper method for checking if a message is initialized, optionally memoizing.
|
||||
|
@ -31,6 +31,7 @@
|
||||
package com.google.protobuf;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.Method;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.AbstractList;
|
||||
@ -358,6 +359,17 @@ public class Internal {
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static <T extends MessageLite> T getDefaultInstance(Class<T> clazz) {
|
||||
try {
|
||||
Method method = clazz.getMethod("getDefaultInstance");
|
||||
return (T) method.invoke(method);
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(
|
||||
"Failed to get default instance for " + clazz, e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* An empty byte array constant used in generated code.
|
||||
*/
|
||||
|
@ -69,7 +69,7 @@ public class InvalidProtocolBufferException extends IOException {
|
||||
static InvalidProtocolBufferException truncatedMessage() {
|
||||
return new InvalidProtocolBufferException(
|
||||
"While parsing a protocol message, the input ended unexpectedly " +
|
||||
"in the middle of a field. This could mean either than the " +
|
||||
"in the middle of a field. This could mean either that the " +
|
||||
"input has been truncated or that an embedded message " +
|
||||
"misreported its own length.");
|
||||
}
|
||||
|
@ -215,6 +215,11 @@ public class LazyStringArrayList extends AbstractProtobufList<String>
|
||||
modCount++;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getRaw(int index) {
|
||||
return list.get(index);
|
||||
}
|
||||
|
||||
// @Override
|
||||
public ByteString getByteString(int index) {
|
||||
Object o = list.get(index);
|
||||
|
@ -56,7 +56,18 @@ public interface LazyStringList extends ProtocolStringList {
|
||||
* ({@code index < 0 || index >= size()})
|
||||
*/
|
||||
ByteString getByteString(int index);
|
||||
|
||||
|
||||
/**
|
||||
* Returns the element at the specified position in this list as an Object
|
||||
* that will either be a String or a ByteString.
|
||||
*
|
||||
* @param index index of the element to return
|
||||
* @return the element at the specified position in this list
|
||||
* @throws IndexOutOfBoundsException if the index is out of range
|
||||
* ({@code index < 0 || index >= size()})
|
||||
*/
|
||||
Object getRaw(int index);
|
||||
|
||||
/**
|
||||
* Returns the element at the specified position in this list as byte[].
|
||||
*
|
||||
|
@ -121,6 +121,9 @@ public interface Message extends MessageLite, MessageOrBuilder {
|
||||
* using the same merging rules.<br>
|
||||
* * For repeated fields, the elements in {@code other} are concatenated
|
||||
* with the elements in this message.
|
||||
* * For oneof groups, if the other message has one of the fields set,
|
||||
* the group of this message is cleared and replaced by the field
|
||||
* of the other message, so that the oneof constraint is preserved.
|
||||
*
|
||||
* This is equivalent to the {@code Message::MergeFrom} method in C++.
|
||||
*/
|
||||
|
@ -73,7 +73,7 @@ public class RepeatedFieldBuilder
|
||||
private GeneratedMessage.BuilderParent parent;
|
||||
|
||||
// List of messages. Never null. It may be immutable, in which case
|
||||
// isMessagesListImmutable will be true. See note below.
|
||||
// isMessagesListMutable will be false. See note below.
|
||||
private List<MType> messages;
|
||||
|
||||
// Whether messages is an mutable array that can be modified.
|
||||
|
@ -31,6 +31,7 @@
|
||||
package com.google.protobuf;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
||||
/**
|
||||
* {@code UnknownFieldSetLite} is used to keep track of fields which were seen
|
||||
@ -45,8 +46,11 @@ import java.io.IOException;
|
||||
*/
|
||||
public final class UnknownFieldSetLite {
|
||||
|
||||
private static final int[] EMPTY_INT_ARRAY = new int[0];
|
||||
private static final Object[] EMPTY_OBJECT_ARRAY = new Object[0];
|
||||
|
||||
private static final UnknownFieldSetLite DEFAULT_INSTANCE =
|
||||
new UnknownFieldSetLite(ByteString.EMPTY);
|
||||
new UnknownFieldSetLite(0, EMPTY_INT_ARRAY, EMPTY_OBJECT_ARRAY);
|
||||
|
||||
/**
|
||||
* Get an empty {@code UnknownFieldSetLite}.
|
||||
@ -71,19 +75,41 @@ public final class UnknownFieldSetLite {
|
||||
* {@code second}.
|
||||
*/
|
||||
static UnknownFieldSetLite concat(UnknownFieldSetLite first, UnknownFieldSetLite second) {
|
||||
return new UnknownFieldSetLite(first.byteString.concat(second.byteString));
|
||||
int count = first.count + second.count;
|
||||
int[] tags = Arrays.copyOf(first.tags, count);
|
||||
System.arraycopy(second.tags, 0, tags, first.count, second.count);
|
||||
Object[] objects = Arrays.copyOf(first.objects, count);
|
||||
System.arraycopy(second.objects, 0, objects, first.count, second.count);
|
||||
return new UnknownFieldSetLite(count, tags, objects);
|
||||
}
|
||||
|
||||
/**
|
||||
* The number of elements in the set.
|
||||
*/
|
||||
private int count;
|
||||
|
||||
/**
|
||||
* The tag numbers for the elements in the set.
|
||||
*/
|
||||
private int[] tags;
|
||||
|
||||
/**
|
||||
* The boxed values of the elements in the set.
|
||||
*/
|
||||
private Object[] objects;
|
||||
|
||||
/**
|
||||
* The lazily computed serialized size of the set.
|
||||
*/
|
||||
private int memoizedSerializedSize = -1;
|
||||
|
||||
/**
|
||||
* The internal representation of the unknown fields.
|
||||
* Constructs the {@code UnknownFieldSetLite}.
|
||||
*/
|
||||
private final ByteString byteString;
|
||||
|
||||
/**
|
||||
* Constructs the {@code UnknownFieldSetLite} as a thin wrapper around {@link ByteString}.
|
||||
*/
|
||||
private UnknownFieldSetLite(ByteString byteString) {
|
||||
this.byteString = byteString;
|
||||
private UnknownFieldSetLite(int count, int[] tags, Object[] objects) {
|
||||
this.count = count;
|
||||
this.tags = tags;
|
||||
this.objects = objects;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -92,17 +118,73 @@ public final class UnknownFieldSetLite {
|
||||
* <p>For use by generated code only.
|
||||
*/
|
||||
public void writeTo(CodedOutputStream output) throws IOException {
|
||||
output.writeRawBytes(byteString);
|
||||
for (int i = 0; i < count; i++) {
|
||||
int tag = tags[i];
|
||||
int fieldNumber = WireFormat.getTagFieldNumber(tag);
|
||||
switch (WireFormat.getTagWireType(tag)) {
|
||||
case WireFormat.WIRETYPE_VARINT:
|
||||
output.writeUInt64(fieldNumber, (Long) objects[i]);
|
||||
break;
|
||||
case WireFormat.WIRETYPE_FIXED32:
|
||||
output.writeFixed32(fieldNumber, (Integer) objects[i]);
|
||||
break;
|
||||
case WireFormat.WIRETYPE_FIXED64:
|
||||
output.writeFixed64(fieldNumber, (Long) objects[i]);
|
||||
break;
|
||||
case WireFormat.WIRETYPE_LENGTH_DELIMITED:
|
||||
output.writeBytes(fieldNumber, (ByteString) objects[i]);
|
||||
break;
|
||||
case WireFormat.WIRETYPE_START_GROUP:
|
||||
output.writeTag(fieldNumber, WireFormat.WIRETYPE_START_GROUP);
|
||||
((UnknownFieldSetLite) objects[i]).writeTo(output);
|
||||
output.writeTag(fieldNumber, WireFormat.WIRETYPE_END_GROUP);
|
||||
break;
|
||||
default:
|
||||
throw InvalidProtocolBufferException.invalidWireType();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Get the number of bytes required to encode this set.
|
||||
*
|
||||
* <p>For use by generated code only.
|
||||
*/
|
||||
public int getSerializedSize() {
|
||||
return byteString.size();
|
||||
int size = memoizedSerializedSize;
|
||||
if (size != -1) {
|
||||
return size;
|
||||
}
|
||||
|
||||
size = 0;
|
||||
for (int i = 0; i < count; i++) {
|
||||
int tag = tags[i];
|
||||
int fieldNumber = WireFormat.getTagFieldNumber(tag);
|
||||
switch (WireFormat.getTagWireType(tag)) {
|
||||
case WireFormat.WIRETYPE_VARINT:
|
||||
size += CodedOutputStream.computeUInt64Size(fieldNumber, (Long) objects[i]);
|
||||
break;
|
||||
case WireFormat.WIRETYPE_FIXED32:
|
||||
size += CodedOutputStream.computeFixed32Size(fieldNumber, (Integer) objects[i]);
|
||||
break;
|
||||
case WireFormat.WIRETYPE_FIXED64:
|
||||
size += CodedOutputStream.computeFixed64Size(fieldNumber, (Long) objects[i]);
|
||||
break;
|
||||
case WireFormat.WIRETYPE_LENGTH_DELIMITED:
|
||||
size += CodedOutputStream.computeBytesSize(fieldNumber, (ByteString) objects[i]);
|
||||
break;
|
||||
case WireFormat.WIRETYPE_START_GROUP:
|
||||
size += CodedOutputStream.computeTagSize(fieldNumber) * 2
|
||||
+ ((UnknownFieldSetLite) objects[i]).getSerializedSize();
|
||||
break;
|
||||
default:
|
||||
throw new IllegalStateException(InvalidProtocolBufferException.invalidWireType());
|
||||
}
|
||||
}
|
||||
|
||||
memoizedSerializedSize = size;
|
||||
|
||||
return size;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -111,16 +193,34 @@ public final class UnknownFieldSetLite {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (obj instanceof UnknownFieldSetLite) {
|
||||
return byteString.equals(((UnknownFieldSetLite) obj).byteString);
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return false;
|
||||
if (!(obj instanceof UnknownFieldSetLite)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
UnknownFieldSetLite other = (UnknownFieldSetLite) obj;
|
||||
if (count != other.count
|
||||
// TODO(dweis): Only have to compare up to count but at worst 2x worse than we need to do.
|
||||
|| !Arrays.equals(tags, other.tags)
|
||||
|| !Arrays.deepEquals(objects, other.objects)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return byteString.hashCode();
|
||||
int hashCode = 17;
|
||||
|
||||
hashCode = 31 * hashCode + count;
|
||||
hashCode = 31 * hashCode + Arrays.hashCode(tags);
|
||||
hashCode = 31 * hashCode + Arrays.deepHashCode(objects);
|
||||
|
||||
return hashCode;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -131,28 +231,49 @@ public final class UnknownFieldSetLite {
|
||||
* <p>For use by generated code only.
|
||||
*/
|
||||
public static final class Builder {
|
||||
|
||||
// Arbitrarily chosen.
|
||||
// TODO(dweis): Tune this number?
|
||||
private static final int MIN_CAPACITY = 8;
|
||||
|
||||
private int count = 0;
|
||||
private int[] tags = EMPTY_INT_ARRAY;
|
||||
private Object[] objects = EMPTY_OBJECT_ARRAY;
|
||||
|
||||
private ByteString.Output byteStringOutput;
|
||||
private CodedOutputStream output;
|
||||
private boolean built;
|
||||
|
||||
/**
|
||||
* Constructs a {@code Builder}. Lazily initialized by
|
||||
* {@link #ensureInitializedButNotBuilt()}.
|
||||
* Constructs a {@code Builder}.
|
||||
*/
|
||||
private Builder() {}
|
||||
|
||||
/**
|
||||
* Ensures internal state is initialized for use.
|
||||
*/
|
||||
private void ensureInitializedButNotBuilt() {
|
||||
private void ensureNotBuilt() {
|
||||
if (built) {
|
||||
throw new IllegalStateException("Do not reuse UnknownFieldSetLite Builders.");
|
||||
}
|
||||
|
||||
if (output == null && byteStringOutput == null) {
|
||||
byteStringOutput = ByteString.newOutput(100 /* initialCapacity */);
|
||||
output = CodedOutputStream.newInstance(byteStringOutput);
|
||||
}
|
||||
|
||||
private void storeField(int tag, Object value) {
|
||||
ensureCapacity();
|
||||
|
||||
tags[count] = tag;
|
||||
objects[count] = value;
|
||||
count++;
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensures that our arrays are long enough to store more metadata.
|
||||
*/
|
||||
private void ensureCapacity() {
|
||||
if (count == tags.length) {
|
||||
int increment = count < (MIN_CAPACITY / 2) ? MIN_CAPACITY : count >> 1;
|
||||
int newLength = count + increment;
|
||||
|
||||
tags = Arrays.copyOf(tags, newLength);
|
||||
objects = Arrays.copyOf(objects, newLength);
|
||||
}
|
||||
}
|
||||
|
||||
@ -166,31 +287,28 @@ public final class UnknownFieldSetLite {
|
||||
*/
|
||||
public boolean mergeFieldFrom(final int tag, final CodedInputStream input)
|
||||
throws IOException {
|
||||
ensureInitializedButNotBuilt();
|
||||
ensureNotBuilt();
|
||||
|
||||
final int fieldNumber = WireFormat.getTagFieldNumber(tag);
|
||||
switch (WireFormat.getTagWireType(tag)) {
|
||||
case WireFormat.WIRETYPE_VARINT:
|
||||
output.writeUInt64(fieldNumber, input.readInt64());
|
||||
storeField(tag, input.readInt64());
|
||||
return true;
|
||||
case WireFormat.WIRETYPE_FIXED32:
|
||||
output.writeFixed32(fieldNumber, input.readFixed32());
|
||||
storeField(tag, input.readFixed32());
|
||||
return true;
|
||||
case WireFormat.WIRETYPE_FIXED64:
|
||||
output.writeFixed64(fieldNumber, input.readFixed64());
|
||||
storeField(tag, input.readFixed64());
|
||||
return true;
|
||||
case WireFormat.WIRETYPE_LENGTH_DELIMITED:
|
||||
output.writeBytes(fieldNumber, input.readBytes());
|
||||
storeField(tag, input.readBytes());
|
||||
return true;
|
||||
case WireFormat.WIRETYPE_START_GROUP:
|
||||
final Builder subBuilder = newBuilder();
|
||||
subBuilder.mergeFrom(input);
|
||||
input.checkLastTagWas(
|
||||
WireFormat.makeTag(fieldNumber, WireFormat.WIRETYPE_END_GROUP));
|
||||
|
||||
output.writeTag(fieldNumber, WireFormat.WIRETYPE_START_GROUP);
|
||||
subBuilder.build().writeTo(output);
|
||||
output.writeTag(fieldNumber, WireFormat.WIRETYPE_END_GROUP);
|
||||
storeField(tag, subBuilder.build());
|
||||
return true;
|
||||
case WireFormat.WIRETYPE_END_GROUP:
|
||||
return false;
|
||||
@ -210,12 +328,10 @@ public final class UnknownFieldSetLite {
|
||||
if (fieldNumber == 0) {
|
||||
throw new IllegalArgumentException("Zero is not a valid field number.");
|
||||
}
|
||||
ensureInitializedButNotBuilt();
|
||||
try {
|
||||
output.writeUInt64(fieldNumber, value);
|
||||
} catch (IOException e) {
|
||||
// Should never happen.
|
||||
}
|
||||
ensureNotBuilt();
|
||||
|
||||
storeField(WireFormat.makeTag(fieldNumber, WireFormat.WIRETYPE_VARINT), (long) value);
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
@ -229,11 +345,24 @@ public final class UnknownFieldSetLite {
|
||||
if (fieldNumber == 0) {
|
||||
throw new IllegalArgumentException("Zero is not a valid field number.");
|
||||
}
|
||||
ensureInitializedButNotBuilt();
|
||||
try {
|
||||
output.writeBytes(fieldNumber, value);
|
||||
} catch (IOException e) {
|
||||
// Should never happen.
|
||||
ensureNotBuilt();
|
||||
|
||||
storeField(WireFormat.makeTag(fieldNumber, WireFormat.WIRETYPE_LENGTH_DELIMITED), value);
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse an entire message from {@code input} and merge its fields into
|
||||
* this set.
|
||||
*/
|
||||
private Builder mergeFrom(final CodedInputStream input) throws IOException {
|
||||
// Ensures initialization in mergeFieldFrom.
|
||||
while (true) {
|
||||
final int tag = input.readTag();
|
||||
if (tag == 0 || !mergeFieldFrom(tag, input)) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
@ -254,44 +383,12 @@ public final class UnknownFieldSetLite {
|
||||
}
|
||||
|
||||
built = true;
|
||||
|
||||
final UnknownFieldSetLite result;
|
||||
// If we were never initialized, no data was written.
|
||||
if (output == null) {
|
||||
result = getDefaultInstance();
|
||||
} else {
|
||||
try {
|
||||
output.flush();
|
||||
} catch (IOException e) {
|
||||
// Should never happen.
|
||||
}
|
||||
ByteString byteString = byteStringOutput.toByteString();
|
||||
if (byteString.isEmpty()) {
|
||||
result = getDefaultInstance();
|
||||
} else {
|
||||
result = new UnknownFieldSetLite(byteString);
|
||||
}
|
||||
|
||||
if (count == 0) {
|
||||
return DEFAULT_INSTANCE;
|
||||
}
|
||||
|
||||
// Allow for garbage collection.
|
||||
output = null;
|
||||
byteStringOutput = null;
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse an entire message from {@code input} and merge its fields into
|
||||
* this set.
|
||||
*/
|
||||
private Builder mergeFrom(final CodedInputStream input) throws IOException {
|
||||
// Ensures initialization in mergeFieldFrom.
|
||||
while (true) {
|
||||
final int tag = input.readTag();
|
||||
if (tag == 0 || !mergeFieldFrom(tag, input)) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
return this;
|
||||
return new UnknownFieldSetLite(count, tags, objects);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -57,6 +57,11 @@ public class UnmodifiableLazyStringList extends AbstractList<String>
|
||||
public String get(int index) {
|
||||
return list.get(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getRaw(int index) {
|
||||
return list.getRaw(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
|
@ -66,6 +66,12 @@ package com.google.protobuf;
|
||||
*/
|
||||
final class Utf8 {
|
||||
private Utf8() {}
|
||||
|
||||
/**
|
||||
* Maximum number of bytes per Java UTF-16 char in UTF-8.
|
||||
* @see java.nio.charset.CharsetEncoder#maxBytesPerChar()
|
||||
*/
|
||||
static final int MAX_BYTES_PER_CHAR = 3;
|
||||
|
||||
/**
|
||||
* State value indicating that the byte sequence is well-formed and
|
||||
@ -346,4 +352,130 @@ final class Utf8 {
|
||||
default: throw new AssertionError();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// These UTF-8 handling methods are copied from Guava's Utf8 class with a modification to throw
|
||||
// a protocol buffer local exception. This exception is then caught in CodedOutputStream so it can
|
||||
// fallback to more lenient behavior.
|
||||
|
||||
static class UnpairedSurrogateException extends IllegalArgumentException {
|
||||
|
||||
private UnpairedSurrogateException(int index) {
|
||||
super("Unpaired surrogate at index " + index);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the number of bytes in the UTF-8-encoded form of {@code sequence}. For a string,
|
||||
* this method is equivalent to {@code string.getBytes(UTF_8).length}, but is more efficient in
|
||||
* both time and space.
|
||||
*
|
||||
* @throws IllegalArgumentException if {@code sequence} contains ill-formed UTF-16 (unpaired
|
||||
* surrogates)
|
||||
*/
|
||||
static int encodedLength(CharSequence sequence) {
|
||||
// Warning to maintainers: this implementation is highly optimized.
|
||||
int utf16Length = sequence.length();
|
||||
int utf8Length = utf16Length;
|
||||
int i = 0;
|
||||
|
||||
// This loop optimizes for pure ASCII.
|
||||
while (i < utf16Length && sequence.charAt(i) < 0x80) {
|
||||
i++;
|
||||
}
|
||||
|
||||
// This loop optimizes for chars less than 0x800.
|
||||
for (; i < utf16Length; i++) {
|
||||
char c = sequence.charAt(i);
|
||||
if (c < 0x800) {
|
||||
utf8Length += ((0x7f - c) >>> 31); // branch free!
|
||||
} else {
|
||||
utf8Length += encodedLengthGeneral(sequence, i);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (utf8Length < utf16Length) {
|
||||
// Necessary and sufficient condition for overflow because of maximum 3x expansion
|
||||
throw new IllegalArgumentException("UTF-8 length does not fit in int: "
|
||||
+ (utf8Length + (1L << 32)));
|
||||
}
|
||||
return utf8Length;
|
||||
}
|
||||
|
||||
private static int encodedLengthGeneral(CharSequence sequence, int start) {
|
||||
int utf16Length = sequence.length();
|
||||
int utf8Length = 0;
|
||||
for (int i = start; i < utf16Length; i++) {
|
||||
char c = sequence.charAt(i);
|
||||
if (c < 0x800) {
|
||||
utf8Length += (0x7f - c) >>> 31; // branch free!
|
||||
} else {
|
||||
utf8Length += 2;
|
||||
// jdk7+: if (Character.isSurrogate(c)) {
|
||||
if (Character.MIN_SURROGATE <= c && c <= Character.MAX_SURROGATE) {
|
||||
// Check that we have a well-formed surrogate pair.
|
||||
int cp = Character.codePointAt(sequence, i);
|
||||
if (cp < Character.MIN_SUPPLEMENTARY_CODE_POINT) {
|
||||
throw new UnpairedSurrogateException(i);
|
||||
}
|
||||
i++;
|
||||
}
|
||||
}
|
||||
}
|
||||
return utf8Length;
|
||||
}
|
||||
|
||||
static int encode(CharSequence sequence, byte[] bytes, int offset, int length) {
|
||||
int utf16Length = sequence.length();
|
||||
int j = offset;
|
||||
int i = 0;
|
||||
int limit = offset + length;
|
||||
// Designed to take advantage of
|
||||
// https://wikis.oracle.com/display/HotSpotInternals/RangeCheckElimination
|
||||
for (char c; i < utf16Length && i + j < limit && (c = sequence.charAt(i)) < 0x80; i++) {
|
||||
bytes[j + i] = (byte) c;
|
||||
}
|
||||
if (i == utf16Length) {
|
||||
return j + utf16Length;
|
||||
}
|
||||
j += i;
|
||||
for (char c; i < utf16Length; i++) {
|
||||
c = sequence.charAt(i);
|
||||
if (c < 0x80 && j < limit) {
|
||||
bytes[j++] = (byte) c;
|
||||
} else if (c < 0x800 && j <= limit - 2) { // 11 bits, two UTF-8 bytes
|
||||
bytes[j++] = (byte) ((0xF << 6) | (c >>> 6));
|
||||
bytes[j++] = (byte) (0x80 | (0x3F & c));
|
||||
} else if ((c < Character.MIN_SURROGATE || Character.MAX_SURROGATE < c) && j <= limit - 3) {
|
||||
// Maximum single-char code point is 0xFFFF, 16 bits, three UTF-8 bytes
|
||||
bytes[j++] = (byte) ((0xF << 5) | (c >>> 12));
|
||||
bytes[j++] = (byte) (0x80 | (0x3F & (c >>> 6)));
|
||||
bytes[j++] = (byte) (0x80 | (0x3F & c));
|
||||
} else if (j <= limit - 4) {
|
||||
// Minimum code point represented by a surrogate pair is 0x10000, 17 bits, four UTF-8 bytes
|
||||
final char low;
|
||||
if (i + 1 == sequence.length()
|
||||
|| !Character.isSurrogatePair(c, (low = sequence.charAt(++i)))) {
|
||||
throw new UnpairedSurrogateException((i - 1));
|
||||
}
|
||||
int codePoint = Character.toCodePoint(c, low);
|
||||
bytes[j++] = (byte) ((0xF << 4) | (codePoint >>> 18));
|
||||
bytes[j++] = (byte) (0x80 | (0x3F & (codePoint >>> 12)));
|
||||
bytes[j++] = (byte) (0x80 | (0x3F & (codePoint >>> 6)));
|
||||
bytes[j++] = (byte) (0x80 | (0x3F & codePoint));
|
||||
} else {
|
||||
// If we are surrogates and we're not a surrogate pair, always throw an
|
||||
// IllegalArgumentException instead of an ArrayOutOfBoundsException.
|
||||
if ((Character.MIN_SURROGATE <= c && c <= Character.MAX_SURROGATE)
|
||||
&& (i + 1 == sequence.length()
|
||||
|| !Character.isSurrogatePair(c, sequence.charAt(i + 1)))) {
|
||||
throw new UnpairedSurrogateException(i);
|
||||
}
|
||||
throw new ArrayIndexOutOfBoundsException("Failed writing " + c + " at index " + j);
|
||||
}
|
||||
}
|
||||
return j;
|
||||
}
|
||||
// End Guava UTF-8 methods.
|
||||
}
|
||||
|
@ -58,7 +58,7 @@ public final class WireFormat {
|
||||
static final int TAG_TYPE_MASK = (1 << TAG_TYPE_BITS) - 1;
|
||||
|
||||
/** Given a tag value, determines the wire type (the lower 3 bits). */
|
||||
static int getTagWireType(final int tag) {
|
||||
public static int getTagWireType(final int tag) {
|
||||
return tag & TAG_TYPE_MASK;
|
||||
}
|
||||
|
||||
|
92
java/src/test/java/com/google/protobuf/AnyTest.java
Normal file
92
java/src/test/java/com/google/protobuf/AnyTest.java
Normal file
@ -0,0 +1,92 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package com.google.protobuf;
|
||||
|
||||
import any_test.AnyTestProto.TestAny;
|
||||
import protobuf_unittest.UnittestProto.TestAllTypes;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
/**
|
||||
* Unit tests for Any message.
|
||||
*/
|
||||
public class AnyTest extends TestCase {
|
||||
public void testAnyGeneratedApi() throws Exception {
|
||||
TestAllTypes.Builder builder = TestAllTypes.newBuilder();
|
||||
TestUtil.setAllFields(builder);
|
||||
TestAllTypes message = builder.build();
|
||||
|
||||
TestAny container = TestAny.newBuilder()
|
||||
.setValue(Any.pack(message)).build();
|
||||
|
||||
assertTrue(container.getValue().is(TestAllTypes.class));
|
||||
assertFalse(container.getValue().is(TestAny.class));
|
||||
|
||||
TestAllTypes result = container.getValue().unpack(TestAllTypes.class);
|
||||
TestUtil.assertAllFieldsSet(result);
|
||||
|
||||
|
||||
// Unpacking to a wrong type will throw an exception.
|
||||
try {
|
||||
TestAny wrongMessage = container.getValue().unpack(TestAny.class);
|
||||
fail("Exception is expected.");
|
||||
} catch (InvalidProtocolBufferException e) {
|
||||
// expected.
|
||||
}
|
||||
|
||||
// Test that unpacking throws an exception if parsing fails.
|
||||
TestAny.Builder containerBuilder = container.toBuilder();
|
||||
containerBuilder.getValueBuilder().setValue(
|
||||
ByteString.copyFrom(new byte[]{0x11}));
|
||||
container = containerBuilder.build();
|
||||
try {
|
||||
TestAllTypes parsingFailed = container.getValue().unpack(TestAllTypes.class);
|
||||
fail("Exception is expected.");
|
||||
} catch (InvalidProtocolBufferException e) {
|
||||
// expected.
|
||||
}
|
||||
}
|
||||
|
||||
public void testCachedUnpackResult() throws Exception {
|
||||
TestAllTypes.Builder builder = TestAllTypes.newBuilder();
|
||||
TestUtil.setAllFields(builder);
|
||||
TestAllTypes message = builder.build();
|
||||
|
||||
TestAny container = TestAny.newBuilder()
|
||||
.setValue(Any.pack(message)).build();
|
||||
|
||||
assertTrue(container.getValue().is(TestAllTypes.class));
|
||||
|
||||
TestAllTypes result1 = container.getValue().unpack(TestAllTypes.class);
|
||||
TestAllTypes result2 = container.getValue().unpack(TestAllTypes.class);
|
||||
assertTrue(result1 == result2);
|
||||
}
|
||||
}
|
@ -85,6 +85,7 @@ public class BoundedByteStringTest extends LiteralByteStringTest {
|
||||
testString.substring(2, testString.length() - 6), roundTripString);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testJavaSerialization() throws Exception {
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
ObjectOutputStream oos = new ObjectOutputStream(out);
|
||||
|
@ -58,8 +58,7 @@ public class CheckUtf8Test extends TestCase {
|
||||
public void testParseRequiredStringWithGoodUtf8() throws Exception {
|
||||
ByteString serialized =
|
||||
BytesWrapper.newBuilder().setReq(UTF8_BYTE_STRING).build().toByteString();
|
||||
assertEquals(UTF8_BYTE_STRING_TEXT,
|
||||
StringWrapper.PARSER.parseFrom(serialized).getReq());
|
||||
assertEquals(UTF8_BYTE_STRING_TEXT, StringWrapper.parser().parseFrom(serialized).getReq());
|
||||
}
|
||||
|
||||
public void testBuildRequiredStringWithBadUtf8() throws Exception {
|
||||
@ -93,7 +92,7 @@ public class CheckUtf8Test extends TestCase {
|
||||
ByteString serialized =
|
||||
BytesWrapper.newBuilder().setReq(NON_UTF8_BYTE_STRING).build().toByteString();
|
||||
try {
|
||||
StringWrapper.PARSER.parseFrom(serialized);
|
||||
StringWrapper.parser().parseFrom(serialized);
|
||||
fail("Expected InvalidProtocolBufferException for non UTF-8 byte string.");
|
||||
} catch (InvalidProtocolBufferException exception) {
|
||||
assertEquals("Protocol message had invalid UTF-8.", exception.getMessage());
|
||||
@ -131,7 +130,7 @@ public class CheckUtf8Test extends TestCase {
|
||||
ByteString serialized =
|
||||
BytesWrapperSize.newBuilder().setReq(NON_UTF8_BYTE_STRING).build().toByteString();
|
||||
try {
|
||||
StringWrapperSize.PARSER.parseFrom(serialized);
|
||||
StringWrapperSize.parser().parseFrom(serialized);
|
||||
fail("Expected InvalidProtocolBufferException for non UTF-8 byte string.");
|
||||
} catch (InvalidProtocolBufferException exception) {
|
||||
assertEquals("Protocol message had invalid UTF-8.", exception.getMessage());
|
||||
|
@ -40,6 +40,7 @@ import junit.framework.TestCase;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
@ -325,10 +326,41 @@ public class CodedOutputStreamTest extends TestCase {
|
||||
for (int i = 0; i < 1024; ++i) {
|
||||
codedStream.writeRawBytes(value, 0, value.length);
|
||||
}
|
||||
String string =
|
||||
"abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz";
|
||||
// Ensure we take the slower fast path.
|
||||
assertTrue(CodedOutputStream.computeRawVarint32Size(string.length())
|
||||
!= CodedOutputStream.computeRawVarint32Size(string.length() * Utf8.MAX_BYTES_PER_CHAR));
|
||||
|
||||
codedStream.writeStringNoTag(string);
|
||||
int stringSize = CodedOutputStream.computeStringSizeNoTag(string);
|
||||
|
||||
// Make sure we have written more bytes than the buffer could hold. This is
|
||||
// to make the test complete.
|
||||
assertTrue(codedStream.getTotalBytesWritten() > BUFFER_SIZE);
|
||||
assertEquals(value.length * 1024, codedStream.getTotalBytesWritten());
|
||||
|
||||
// Verify that the total bytes written is correct
|
||||
assertEquals((value.length * 1024) + stringSize, codedStream.getTotalBytesWritten());
|
||||
}
|
||||
|
||||
// TODO(dweis): Write a comprehensive test suite for CodedOutputStream that covers more than just
|
||||
// this case.
|
||||
public void testWriteStringNoTag_fastpath() throws Exception {
|
||||
int bufferSize = 153;
|
||||
String threeBytesPer = "\u0981";
|
||||
String string = threeBytesPer;
|
||||
for (int i = 0; i < 50; i++) {
|
||||
string += threeBytesPer;
|
||||
}
|
||||
// These checks ensure we will tickle the slower fast path.
|
||||
assertEquals(1, CodedOutputStream.computeRawVarint32Size(string.length()));
|
||||
assertEquals(
|
||||
2, CodedOutputStream.computeRawVarint32Size(string.length() * Utf8.MAX_BYTES_PER_CHAR));
|
||||
assertEquals(bufferSize, string.length() * Utf8.MAX_BYTES_PER_CHAR);
|
||||
|
||||
CodedOutputStream output =
|
||||
CodedOutputStream.newInstance(ByteBuffer.allocate(bufferSize), bufferSize);
|
||||
output.writeStringNoTag(string);
|
||||
}
|
||||
|
||||
public void testWriteToByteBuffer() throws Exception {
|
||||
@ -398,4 +430,80 @@ public class CodedOutputStreamTest extends TestCase {
|
||||
assertEqualBytes(bytes(0x02, 0x33, 0x44, 0x00), destination);
|
||||
assertEquals(3, codedStream.getTotalBytesWritten());
|
||||
}
|
||||
|
||||
public void testSerializeInvalidUtf8() throws Exception {
|
||||
String[] invalidStrings = new String[] {
|
||||
newString(Character.MIN_HIGH_SURROGATE),
|
||||
"foobar" + newString(Character.MIN_HIGH_SURROGATE),
|
||||
newString(Character.MIN_LOW_SURROGATE),
|
||||
"foobar" + newString(Character.MIN_LOW_SURROGATE),
|
||||
newString(Character.MIN_HIGH_SURROGATE, Character.MIN_HIGH_SURROGATE)
|
||||
};
|
||||
|
||||
CodedOutputStream outputWithStream = CodedOutputStream.newInstance(new ByteArrayOutputStream());
|
||||
CodedOutputStream outputWithArray = CodedOutputStream.newInstance(new byte[10000]);
|
||||
for (String s : invalidStrings) {
|
||||
// TODO(dweis): These should all fail; instead they are corrupting data.
|
||||
CodedOutputStream.computeStringSizeNoTag(s);
|
||||
outputWithStream.writeStringNoTag(s);
|
||||
outputWithArray.writeStringNoTag(s);
|
||||
}
|
||||
}
|
||||
|
||||
private static String newString(char... chars) {
|
||||
return new String(chars);
|
||||
}
|
||||
|
||||
/** Regression test for https://github.com/google/protobuf/issues/292 */
|
||||
public void testCorrectExceptionThrowWhenEncodingStringsWithoutEnoughSpace() throws Exception {
|
||||
String testCase = "Foooooooo";
|
||||
assertEquals(CodedOutputStream.computeRawVarint32Size(testCase.length()),
|
||||
CodedOutputStream.computeRawVarint32Size(testCase.length() * 3));
|
||||
assertEquals(11, CodedOutputStream.computeStringSize(1, testCase));
|
||||
// Tag is one byte, varint describing string length is 1 byte, string length is 9 bytes.
|
||||
// An array of size 1 will cause a failure when trying to write the varint.
|
||||
for (int i = 0; i < 11; i++) {
|
||||
CodedOutputStream output = CodedOutputStream.newInstance(new byte[i]);
|
||||
try {
|
||||
output.writeString(1, testCase);
|
||||
fail("Should have thrown an out of space exception");
|
||||
} catch (CodedOutputStream.OutOfSpaceException expected) {}
|
||||
}
|
||||
}
|
||||
|
||||
public void testDifferentStringLengths() throws Exception {
|
||||
// Test string serialization roundtrip using strings of the following lengths,
|
||||
// with ASCII and Unicode characters requiring different UTF-8 byte counts per
|
||||
// char, hence causing the length delimiter varint to sometimes require more
|
||||
// bytes for the Unicode strings than the ASCII string of the same length.
|
||||
int[] lengths = new int[] {
|
||||
0,
|
||||
1,
|
||||
(1 << 4) - 1, // 1 byte for ASCII and Unicode
|
||||
(1 << 7) - 1, // 1 byte for ASCII, 2 bytes for Unicode
|
||||
(1 << 11) - 1, // 2 bytes for ASCII and Unicode
|
||||
(1 << 14) - 1, // 2 bytes for ASCII, 3 bytes for Unicode
|
||||
(1 << 17) - 1, // 3 bytes for ASCII and Unicode
|
||||
};
|
||||
for (int i : lengths) {
|
||||
testEncodingOfString('q', i); // 1 byte per char
|
||||
testEncodingOfString('\u07FF', i); // 2 bytes per char
|
||||
testEncodingOfString('\u0981', i); // 3 bytes per char
|
||||
}
|
||||
}
|
||||
|
||||
private void testEncodingOfString(char c, int length) throws Exception {
|
||||
String fullString = fullString(c, length);
|
||||
TestAllTypes testAllTypes = TestAllTypes.newBuilder()
|
||||
.setOptionalString(fullString)
|
||||
.build();
|
||||
assertEquals(
|
||||
fullString, TestAllTypes.parseFrom(testAllTypes.toByteArray()).getOptionalString());
|
||||
}
|
||||
|
||||
private String fullString(char c, int length) {
|
||||
char[] result = new char[length];
|
||||
Arrays.fill(result, c);
|
||||
return new String(result);
|
||||
}
|
||||
}
|
||||
|
@ -142,6 +142,16 @@ public class FieldPresenceTest extends TestCase {
|
||||
"OneofNestedMessage"));
|
||||
}
|
||||
|
||||
public void testOneofEquals() throws Exception {
|
||||
TestAllTypes.Builder builder = TestAllTypes.newBuilder();
|
||||
TestAllTypes message1 = builder.build();
|
||||
// Set message2's oneof_uint32 field to defalut value. The two
|
||||
// messages should be different when check with oneof case.
|
||||
builder.setOneofUint32(0);
|
||||
TestAllTypes message2 = builder.build();
|
||||
assertFalse(message1.equals(message2));
|
||||
}
|
||||
|
||||
public void testFieldPresence() {
|
||||
// Optional non-message fields set to their default value are treated the
|
||||
// same way as not set.
|
||||
|
@ -187,8 +187,7 @@ public class GeneratedMessageTest extends TestCase {
|
||||
}
|
||||
|
||||
public void testParsedMessagesAreImmutable() throws Exception {
|
||||
TestAllTypes value = TestAllTypes.PARSER.parseFrom(
|
||||
TestUtil.getAllSet().toByteString());
|
||||
TestAllTypes value = TestAllTypes.parser().parseFrom(TestUtil.getAllSet().toByteString());
|
||||
assertIsUnmodifiable(value.getRepeatedInt32List());
|
||||
assertIsUnmodifiable(value.getRepeatedInt64List());
|
||||
assertIsUnmodifiable(value.getRepeatedUint32List());
|
||||
|
@ -89,7 +89,7 @@ public class LazyStringEndToEndTest extends TestCase {
|
||||
TEST_ALL_TYPES_SERIALIZED_WITH_ILLEGAL_UTF8,
|
||||
ByteString.copyFrom(sink));
|
||||
}
|
||||
|
||||
|
||||
public void testCaching() {
|
||||
String a = "a";
|
||||
String b = "b";
|
||||
@ -106,24 +106,13 @@ public class LazyStringEndToEndTest extends TestCase {
|
||||
assertSame(c, proto.getRepeatedString(1));
|
||||
|
||||
|
||||
// There's no way to directly observe that the ByteString is cached
|
||||
// correctly on serialization, but we can observe that it had to recompute
|
||||
// the string after serialization.
|
||||
// Ensure serialization keeps strings cached.
|
||||
proto.toByteString();
|
||||
String aPrime = proto.getOptionalString();
|
||||
assertNotSame(a, aPrime);
|
||||
assertEquals(a, aPrime);
|
||||
String bPrime = proto.getRepeatedString(0);
|
||||
assertNotSame(b, bPrime);
|
||||
assertEquals(b, bPrime);
|
||||
String cPrime = proto.getRepeatedString(1);
|
||||
assertNotSame(c, cPrime);
|
||||
assertEquals(c, cPrime);
|
||||
|
||||
// And now the string should stay cached.
|
||||
assertSame(aPrime, proto.getOptionalString());
|
||||
assertSame(bPrime, proto.getRepeatedString(0));
|
||||
assertSame(cPrime, proto.getRepeatedString(1));
|
||||
assertSame(a, proto.getOptionalString());
|
||||
assertSame(b, proto.getRepeatedString(0));
|
||||
assertSame(c, proto.getRepeatedString(1));
|
||||
}
|
||||
|
||||
public void testNoStringCachingIfOnlyBytesAccessed() throws Exception {
|
||||
|
@ -42,6 +42,7 @@ import com.google.protobuf.UnittestLite.TestAllTypesLite.NestedMessage;
|
||||
import com.google.protobuf.UnittestLite.TestAllTypesLite.OneofFieldCase;
|
||||
import com.google.protobuf.UnittestLite.TestAllTypesLite.OptionalGroup;
|
||||
import com.google.protobuf.UnittestLite.TestAllTypesLite.RepeatedGroup;
|
||||
import com.google.protobuf.UnittestLite.TestAllTypesLiteOrBuilder;
|
||||
import com.google.protobuf.UnittestLite.TestNestedExtensionLite;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
@ -1400,6 +1401,8 @@ public class LiteTest extends TestCase {
|
||||
assertEquals("hi", messageAfterBuild.getOneofString());
|
||||
assertEquals(OneofFieldCase.ONEOF_UINT32, builder.getOneofFieldCase());
|
||||
assertEquals(1, builder.getOneofUint32());
|
||||
TestAllTypesLiteOrBuilder messageOrBuilder = builder;
|
||||
assertEquals(OneofFieldCase.ONEOF_UINT32, messageOrBuilder.getOneofFieldCase());
|
||||
|
||||
TestAllExtensionsLite.Builder extendableMessageBuilder =
|
||||
TestAllExtensionsLite.newBuilder();
|
||||
|
@ -34,6 +34,7 @@ import junit.framework.TestCase;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.EOFException;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.ObjectInputStream;
|
||||
@ -209,6 +210,62 @@ public class LiteralByteStringTest extends TestCase {
|
||||
Arrays.equals(referenceBytes, myBuffer.array()));
|
||||
}
|
||||
|
||||
public void testMarkSupported() {
|
||||
InputStream stream = stringUnderTest.newInput();
|
||||
assertTrue(classUnderTest + ".newInput() must support marking", stream.markSupported());
|
||||
}
|
||||
|
||||
public void testMarkAndReset() throws IOException {
|
||||
int fraction = stringUnderTest.size() / 3;
|
||||
|
||||
InputStream stream = stringUnderTest.newInput();
|
||||
stream.mark(stringUnderTest.size()); // First, mark() the end.
|
||||
|
||||
skipFully(stream, fraction); // Skip a large fraction, but not all.
|
||||
int available = stream.available();
|
||||
assertTrue(
|
||||
classUnderTest + ": after skipping to the 'middle', half the bytes are available",
|
||||
(stringUnderTest.size() - fraction) == available);
|
||||
stream.reset();
|
||||
|
||||
skipFully(stream, stringUnderTest.size()); // Skip to the end.
|
||||
available = stream.available();
|
||||
assertTrue(
|
||||
classUnderTest + ": after skipping to the end, no more bytes are available",
|
||||
0 == available);
|
||||
}
|
||||
|
||||
/**
|
||||
* Discards {@code n} bytes of data from the input stream. This method
|
||||
* will block until the full amount has been skipped. Does not close the
|
||||
* stream.
|
||||
* <p>Copied from com.google.common.io.ByteStreams to avoid adding dependency.
|
||||
*
|
||||
* @param in the input stream to read from
|
||||
* @param n the number of bytes to skip
|
||||
* @throws EOFException if this stream reaches the end before skipping all
|
||||
* the bytes
|
||||
* @throws IOException if an I/O error occurs, or the stream does not
|
||||
* support skipping
|
||||
*/
|
||||
static void skipFully(InputStream in, long n) throws IOException {
|
||||
long toSkip = n;
|
||||
while (n > 0) {
|
||||
long amt = in.skip(n);
|
||||
if (amt == 0) {
|
||||
// Force a blocking read to avoid infinite loop
|
||||
if (in.read() == -1) {
|
||||
long skipped = toSkip - n;
|
||||
throw new EOFException("reached end of stream after skipping "
|
||||
+ skipped + " bytes; " + toSkip + " bytes expected");
|
||||
}
|
||||
n--;
|
||||
} else {
|
||||
n -= amt;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void testAsReadOnlyByteBuffer() {
|
||||
ByteBuffer byteBuffer = stringUnderTest.asReadOnlyByteBuffer();
|
||||
byte[] roundTripBytes = new byte[referenceBytes.length];
|
||||
@ -305,13 +362,13 @@ public class LiteralByteStringTest extends TestCase {
|
||||
assertEquals(classUnderTest + " unicode must match", testString, roundTripString);
|
||||
}
|
||||
|
||||
public void testToString_returnsCanonicalEmptyString() throws UnsupportedEncodingException{
|
||||
public void testToString_returnsCanonicalEmptyString() {
|
||||
assertSame(classUnderTest + " must be the same string references",
|
||||
ByteString.EMPTY.toString(Internal.UTF_8),
|
||||
new LiteralByteString(new byte[]{}).toString(Internal.UTF_8));
|
||||
}
|
||||
|
||||
public void testToString_raisesException() throws UnsupportedEncodingException{
|
||||
public void testToString_raisesException() {
|
||||
try {
|
||||
ByteString.EMPTY.toString("invalid");
|
||||
fail("Should have thrown an exception.");
|
||||
|
@ -74,6 +74,16 @@ public class MapForProto2LiteTest extends TestCase {
|
||||
builder.getMutableStringToInt32Field().put("3", 33);
|
||||
}
|
||||
|
||||
private void copyMapValues(TestMap source, TestMap.Builder destination) {
|
||||
destination
|
||||
.putAllInt32ToInt32Field(source.getInt32ToInt32Field())
|
||||
.putAllInt32ToStringField(source.getInt32ToStringField())
|
||||
.putAllInt32ToBytesField(source.getInt32ToBytesField())
|
||||
.putAllInt32ToEnumField(source.getInt32ToEnumField())
|
||||
.putAllInt32ToMessageField(source.getInt32ToMessageField())
|
||||
.putAllStringToInt32Field(source.getStringToInt32Field());
|
||||
}
|
||||
|
||||
private void assertMapValuesSet(TestMap message) {
|
||||
assertEquals(3, message.getInt32ToInt32Field().size());
|
||||
assertEquals(11, message.getInt32ToInt32Field().get(1).intValue());
|
||||
@ -330,26 +340,36 @@ public class MapForProto2LiteTest extends TestCase {
|
||||
assertMapValuesCleared(message);
|
||||
}
|
||||
|
||||
public void testPutAll() throws Exception {
|
||||
TestMap.Builder sourceBuilder = TestMap.newBuilder();
|
||||
setMapValues(sourceBuilder);
|
||||
TestMap source = sourceBuilder.build();
|
||||
|
||||
TestMap.Builder destination = TestMap.newBuilder();
|
||||
copyMapValues(source, destination);
|
||||
assertMapValuesSet(destination.build());
|
||||
}
|
||||
|
||||
public void testSerializeAndParse() throws Exception {
|
||||
TestMap.Builder builder = TestMap.newBuilder();
|
||||
setMapValues(builder);
|
||||
TestMap message = builder.build();
|
||||
assertEquals(message.getSerializedSize(), message.toByteString().size());
|
||||
message = TestMap.PARSER.parseFrom(message.toByteString());
|
||||
message = TestMap.parser().parseFrom(message.toByteString());
|
||||
assertMapValuesSet(message);
|
||||
|
||||
builder = message.toBuilder();
|
||||
updateMapValues(builder);
|
||||
message = builder.build();
|
||||
assertEquals(message.getSerializedSize(), message.toByteString().size());
|
||||
message = TestMap.PARSER.parseFrom(message.toByteString());
|
||||
message = TestMap.parser().parseFrom(message.toByteString());
|
||||
assertMapValuesUpdated(message);
|
||||
|
||||
builder = message.toBuilder();
|
||||
builder.clear();
|
||||
message = builder.build();
|
||||
assertEquals(message.getSerializedSize(), message.toByteString().size());
|
||||
message = TestMap.PARSER.parseFrom(message.toByteString());
|
||||
message = TestMap.parser().parseFrom(message.toByteString());
|
||||
assertMapValuesCleared(message);
|
||||
}
|
||||
|
||||
|
@ -78,6 +78,16 @@ public class MapForProto2Test extends TestCase {
|
||||
builder.getMutableStringToInt32Field().put("3", 33);
|
||||
}
|
||||
|
||||
private void copyMapValues(TestMap source, TestMap.Builder destination) {
|
||||
destination
|
||||
.putAllInt32ToInt32Field(source.getInt32ToInt32Field())
|
||||
.putAllInt32ToStringField(source.getInt32ToStringField())
|
||||
.putAllInt32ToBytesField(source.getInt32ToBytesField())
|
||||
.putAllInt32ToEnumField(source.getInt32ToEnumField())
|
||||
.putAllInt32ToMessageField(source.getInt32ToMessageField())
|
||||
.putAllStringToInt32Field(source.getStringToInt32Field());
|
||||
}
|
||||
|
||||
private void assertMapValuesSet(TestMap message) {
|
||||
assertEquals(3, message.getInt32ToInt32Field().size());
|
||||
assertEquals(11, message.getInt32ToInt32Field().get(1).intValue());
|
||||
@ -310,26 +320,36 @@ public class MapForProto2Test extends TestCase {
|
||||
assertMapValuesCleared(message);
|
||||
}
|
||||
|
||||
public void testPutAll() throws Exception {
|
||||
TestMap.Builder sourceBuilder = TestMap.newBuilder();
|
||||
setMapValues(sourceBuilder);
|
||||
TestMap source = sourceBuilder.build();
|
||||
|
||||
TestMap.Builder destination = TestMap.newBuilder();
|
||||
copyMapValues(source, destination);
|
||||
assertMapValuesSet(destination.build());
|
||||
}
|
||||
|
||||
public void testSerializeAndParse() throws Exception {
|
||||
TestMap.Builder builder = TestMap.newBuilder();
|
||||
setMapValues(builder);
|
||||
TestMap message = builder.build();
|
||||
assertEquals(message.getSerializedSize(), message.toByteString().size());
|
||||
message = TestMap.PARSER.parseFrom(message.toByteString());
|
||||
message = TestMap.parser().parseFrom(message.toByteString());
|
||||
assertMapValuesSet(message);
|
||||
|
||||
builder = message.toBuilder();
|
||||
updateMapValues(builder);
|
||||
message = builder.build();
|
||||
assertEquals(message.getSerializedSize(), message.toByteString().size());
|
||||
message = TestMap.PARSER.parseFrom(message.toByteString());
|
||||
message = TestMap.parser().parseFrom(message.toByteString());
|
||||
assertMapValuesUpdated(message);
|
||||
|
||||
builder = message.toBuilder();
|
||||
builder.clear();
|
||||
message = builder.build();
|
||||
assertEquals(message.getSerializedSize(), message.toByteString().size());
|
||||
message = TestMap.PARSER.parseFrom(message.toByteString());
|
||||
message = TestMap.parser().parseFrom(message.toByteString());
|
||||
assertMapValuesCleared(message);
|
||||
}
|
||||
|
||||
|
@ -79,6 +79,16 @@ public class MapTest extends TestCase {
|
||||
builder.getMutableStringToInt32Field().put("3", 33);
|
||||
}
|
||||
|
||||
private void copyMapValues(TestMap source, TestMap.Builder destination) {
|
||||
destination
|
||||
.putAllInt32ToInt32Field(source.getInt32ToInt32Field())
|
||||
.putAllInt32ToStringField(source.getInt32ToStringField())
|
||||
.putAllInt32ToBytesField(source.getInt32ToBytesField())
|
||||
.putAllInt32ToEnumField(source.getInt32ToEnumField())
|
||||
.putAllInt32ToMessageField(source.getInt32ToMessageField())
|
||||
.putAllStringToInt32Field(source.getStringToInt32Field());
|
||||
}
|
||||
|
||||
private void assertMapValuesSet(TestMap message) {
|
||||
assertEquals(3, message.getInt32ToInt32Field().size());
|
||||
assertEquals(11, message.getInt32ToInt32Field().get(1).intValue());
|
||||
@ -311,26 +321,52 @@ public class MapTest extends TestCase {
|
||||
assertMapValuesCleared(message);
|
||||
}
|
||||
|
||||
public void testPutAll() throws Exception {
|
||||
TestMap.Builder sourceBuilder = TestMap.newBuilder();
|
||||
setMapValues(sourceBuilder);
|
||||
TestMap source = sourceBuilder.build();
|
||||
|
||||
TestMap.Builder destination = TestMap.newBuilder();
|
||||
copyMapValues(source, destination);
|
||||
assertMapValuesSet(destination.build());
|
||||
}
|
||||
|
||||
public void testPutAllForUnknownEnumValues() throws Exception {
|
||||
TestMap.Builder sourceBuilder = TestMap.newBuilder();
|
||||
sourceBuilder.getMutableInt32ToEnumFieldValue().put(0, 0);
|
||||
sourceBuilder.getMutableInt32ToEnumFieldValue().put(1, 1);
|
||||
sourceBuilder.getMutableInt32ToEnumFieldValue().put(2, 1000); // unknown value.
|
||||
TestMap source = sourceBuilder.build();
|
||||
|
||||
TestMap.Builder destinationBuilder = TestMap.newBuilder();
|
||||
destinationBuilder.putAllInt32ToEnumFieldValue(source.getInt32ToEnumFieldValue());
|
||||
TestMap destination = destinationBuilder.build();
|
||||
|
||||
assertEquals(0, destination.getInt32ToEnumFieldValue().get(0).intValue());
|
||||
assertEquals(1, destination.getInt32ToEnumFieldValue().get(1).intValue());
|
||||
assertEquals(1000, destination.getInt32ToEnumFieldValue().get(2).intValue());
|
||||
}
|
||||
|
||||
public void testSerializeAndParse() throws Exception {
|
||||
TestMap.Builder builder = TestMap.newBuilder();
|
||||
setMapValues(builder);
|
||||
TestMap message = builder.build();
|
||||
assertEquals(message.getSerializedSize(), message.toByteString().size());
|
||||
message = TestMap.PARSER.parseFrom(message.toByteString());
|
||||
message = TestMap.parser().parseFrom(message.toByteString());
|
||||
assertMapValuesSet(message);
|
||||
|
||||
builder = message.toBuilder();
|
||||
updateMapValues(builder);
|
||||
message = builder.build();
|
||||
assertEquals(message.getSerializedSize(), message.toByteString().size());
|
||||
message = TestMap.PARSER.parseFrom(message.toByteString());
|
||||
message = TestMap.parser().parseFrom(message.toByteString());
|
||||
assertMapValuesUpdated(message);
|
||||
|
||||
builder = message.toBuilder();
|
||||
builder.clear();
|
||||
message = builder.build();
|
||||
assertEquals(message.getSerializedSize(), message.toByteString().size());
|
||||
message = TestMap.PARSER.parseFrom(message.toByteString());
|
||||
message = TestMap.parser().parseFrom(message.toByteString());
|
||||
assertMapValuesCleared(message);
|
||||
}
|
||||
|
||||
|
@ -58,8 +58,7 @@ import java.io.InputStream;
|
||||
public class ParserTest extends TestCase {
|
||||
public void testGeneratedMessageParserSingleton() throws Exception {
|
||||
for (int i = 0; i < 10; i++) {
|
||||
assertEquals(TestAllTypes.PARSER,
|
||||
TestUtil.getAllSet().getParserForType());
|
||||
assertEquals(TestAllTypes.parser(), TestUtil.getAllSet().getParserForType());
|
||||
}
|
||||
}
|
||||
|
||||
@ -125,8 +124,7 @@ public class ParserTest extends TestCase {
|
||||
|
||||
|
||||
public void testParsePartial() throws Exception {
|
||||
assertParsePartial(TestRequired.PARSER,
|
||||
TestRequired.newBuilder().setA(1).buildPartial());
|
||||
assertParsePartial(TestRequired.parser(), TestRequired.newBuilder().setA(1).buildPartial());
|
||||
}
|
||||
|
||||
private <T extends MessageLite> void assertParsePartial(
|
||||
@ -216,8 +214,8 @@ public class ParserTest extends TestCase {
|
||||
|
||||
public void testParseUnknownFields() throws Exception {
|
||||
// All fields will be treated as unknown fields in emptyMessage.
|
||||
TestEmptyMessage emptyMessage = TestEmptyMessage.PARSER.parseFrom(
|
||||
TestUtil.getAllSet().toByteString());
|
||||
TestEmptyMessage emptyMessage =
|
||||
TestEmptyMessage.parser().parseFrom(TestUtil.getAllSet().toByteString());
|
||||
assertEquals(
|
||||
TestUtil.getAllSet().toByteString(),
|
||||
emptyMessage.toByteString());
|
||||
@ -298,8 +296,7 @@ public class ParserTest extends TestCase {
|
||||
// Parse TestParsingMerge.
|
||||
ExtensionRegistry registry = ExtensionRegistry.newInstance();
|
||||
UnittestProto.registerAllExtensions(registry);
|
||||
TestParsingMerge parsingMerge =
|
||||
TestParsingMerge.PARSER.parseFrom(data, registry);
|
||||
TestParsingMerge parsingMerge = TestParsingMerge.parser().parseFrom(data, registry);
|
||||
|
||||
// Required and optional fields should be merged.
|
||||
assertMessageMerged(parsingMerge.getRequiredAllTypes());
|
||||
@ -361,8 +358,7 @@ public class ParserTest extends TestCase {
|
||||
// Parse TestParsingMergeLite.
|
||||
ExtensionRegistry registry = ExtensionRegistry.newInstance();
|
||||
UnittestLite.registerAllExtensions(registry);
|
||||
TestParsingMergeLite parsingMerge =
|
||||
TestParsingMergeLite.PARSER.parseFrom(data, registry);
|
||||
TestParsingMergeLite parsingMerge = TestParsingMergeLite.parser().parseFrom(data, registry);
|
||||
|
||||
// Required and optional fields should be merged.
|
||||
assertMessageMerged(parsingMerge.getRequiredAllTypes());
|
||||
|
@ -119,7 +119,7 @@ public class RopeByteStringTest extends LiteralByteStringTest {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testCharsetToString() throws UnsupportedEncodingException {
|
||||
public void testCharsetToString() {
|
||||
String sourceString = "I love unicode \u1234\u5678 characters";
|
||||
ByteString sourceByteString = ByteString.copyFromUtf8(sourceString);
|
||||
int copies = 250;
|
||||
@ -145,14 +145,15 @@ public class RopeByteStringTest extends LiteralByteStringTest {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testToString_returnsCanonicalEmptyString() throws UnsupportedEncodingException {
|
||||
public void testToString_returnsCanonicalEmptyString() {
|
||||
RopeByteString ropeByteString =
|
||||
RopeByteString.newInstanceForTest(ByteString.EMPTY, ByteString.EMPTY);
|
||||
assertSame(classUnderTest + " must be the same string references",
|
||||
ByteString.EMPTY.toString(Internal.UTF_8), ropeByteString.toString(Internal.UTF_8));
|
||||
}
|
||||
|
||||
public void testToString_raisesException() throws UnsupportedEncodingException{
|
||||
@Override
|
||||
public void testToString_raisesException() {
|
||||
try {
|
||||
ByteString byteString =
|
||||
RopeByteString.newInstanceForTest(ByteString.EMPTY, ByteString.EMPTY);
|
||||
@ -172,6 +173,7 @@ public class RopeByteStringTest extends LiteralByteStringTest {
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testJavaSerialization() throws Exception {
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
ObjectOutputStream oos = new ObjectOutputStream(out);
|
||||
|
@ -732,6 +732,7 @@ public final class TestUtil {
|
||||
Assert.assertEquals("424", message.getDefaultStringPiece());
|
||||
Assert.assertEquals("425", message.getDefaultCord());
|
||||
|
||||
Assert.assertEquals(TestAllTypes.OneofFieldCase.ONEOF_BYTES, message.getOneofFieldCase());
|
||||
Assert.assertFalse(message.hasOneofUint32());
|
||||
Assert.assertFalse(message.hasOneofNestedMessage());
|
||||
Assert.assertFalse(message.hasOneofString());
|
||||
|
@ -32,7 +32,6 @@ package com.google.protobuf;
|
||||
|
||||
import com.google.protobuf.Descriptors.FieldDescriptor;
|
||||
import com.google.protobuf.TextFormat.Parser.SingularOverwritePolicy;
|
||||
import protobuf_unittest.UnittestMset.TestMessageSet;
|
||||
import protobuf_unittest.UnittestMset.TestMessageSetExtension1;
|
||||
import protobuf_unittest.UnittestMset.TestMessageSetExtension2;
|
||||
import protobuf_unittest.UnittestProto.OneString;
|
||||
@ -41,6 +40,7 @@ import protobuf_unittest.UnittestProto.TestAllTypes;
|
||||
import protobuf_unittest.UnittestProto.TestAllTypes.NestedMessage;
|
||||
import protobuf_unittest.UnittestProto.TestEmptyMessage;
|
||||
import protobuf_unittest.UnittestProto.TestOneof2;
|
||||
import proto2_wireformat_unittest.UnittestMsetWireFormat.TestMessageSet;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
|
@ -461,7 +461,7 @@ public class UnknownFieldSetTest extends TestCase {
|
||||
TestAllExtensions allExtensions = TestUtil.getAllExtensionsSet();
|
||||
ByteString allExtensionsData = allExtensions.toByteString();
|
||||
UnittestLite.TestEmptyMessageLite emptyMessageLite =
|
||||
UnittestLite.TestEmptyMessageLite.PARSER.parseFrom(allExtensionsData);
|
||||
UnittestLite.TestEmptyMessageLite.parser().parseFrom(allExtensionsData);
|
||||
ByteString data = emptyMessageLite.toByteString();
|
||||
TestAllExtensions message =
|
||||
TestAllExtensions.parseFrom(data, TestUtil.getExtensionRegistry());
|
||||
|
@ -44,10 +44,10 @@ import protobuf_unittest.UnittestProto.TestOneof2;
|
||||
import protobuf_unittest.UnittestProto.TestOneofBackwardsCompatible;
|
||||
import protobuf_unittest.UnittestProto.TestPackedExtensions;
|
||||
import protobuf_unittest.UnittestProto.TestPackedTypes;
|
||||
import protobuf_unittest.UnittestMset.TestMessageSet;
|
||||
import protobuf_unittest.UnittestMset.RawMessageSet;
|
||||
import protobuf_unittest.UnittestMset.TestMessageSetExtension1;
|
||||
import protobuf_unittest.UnittestMset.TestMessageSetExtension2;
|
||||
import proto2_wireformat_unittest.UnittestMsetWireFormat.TestMessageSet;
|
||||
import com.google.protobuf.UnittestLite.TestAllExtensionsLite;
|
||||
import com.google.protobuf.UnittestLite.TestPackedExtensionsLite;
|
||||
|
||||
|
42
java/src/test/java/com/google/protobuf/any_test.proto
Normal file
42
java/src/test/java/com/google/protobuf/any_test.proto
Normal file
@ -0,0 +1,42 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package any_test;
|
||||
|
||||
option java_package = "any_test";
|
||||
option java_outer_classname = "AnyTestProto";
|
||||
|
||||
import "google/protobuf/any.proto";
|
||||
|
||||
message TestAny {
|
||||
google.protobuf.Any value = 1;
|
||||
}
|
202
java/util/pom.xml
Normal file
202
java/util/pom.xml
Normal file
@ -0,0 +1,202 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<parent>
|
||||
<groupId>com.google</groupId>
|
||||
<artifactId>google</artifactId>
|
||||
<version>1</version>
|
||||
</parent>
|
||||
<groupId>com.google.protobuf</groupId>
|
||||
<artifactId>protobuf-java-util</artifactId>
|
||||
<version>3.0.0-alpha-4-pre</version>
|
||||
<packaging>bundle</packaging>
|
||||
<name>Protocol Buffer Java API</name>
|
||||
<description>
|
||||
Protocol Buffers are a way of encoding structured data in an efficient yet
|
||||
extensible format.
|
||||
</description>
|
||||
<inceptionYear>2008</inceptionYear>
|
||||
<url>https://developers.google.com/protocol-buffers/</url>
|
||||
<licenses>
|
||||
<license>
|
||||
<name>New BSD license</name>
|
||||
<url>http://www.opensource.org/licenses/bsd-license.php</url>
|
||||
<distribution>repo</distribution>
|
||||
</license>
|
||||
</licenses>
|
||||
<scm>
|
||||
<url>https://github.com/google/protobuf</url>
|
||||
<connection>
|
||||
scm:git:https://github.com/google/protobuf.git
|
||||
</connection>
|
||||
</scm>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>com.google.protobuf</groupId>
|
||||
<artifactId>protobuf-java</artifactId>
|
||||
<version>3.0.0-alpha-4-pre</version>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.guava</groupId>
|
||||
<artifactId>guava</artifactId>
|
||||
<version>18.0</version>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.code.gson</groupId>
|
||||
<artifactId>gson</artifactId>
|
||||
<version>2.3</version>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<version>4.4</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.easymock</groupId>
|
||||
<artifactId>easymock</artifactId>
|
||||
<version>2.2</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.easymock</groupId>
|
||||
<artifactId>easymockclassextension</artifactId>
|
||||
<version>2.2.1</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
<configuration>
|
||||
<source>1.5</source>
|
||||
<target>1.5</target>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<configuration>
|
||||
<includes>
|
||||
<include>**/*Test.java</include>
|
||||
<include>../src/main/java/com/google/protobuf/TestUtil.java</include>
|
||||
</includes>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<artifactId>maven-antrun-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>generate-test-sources</id>
|
||||
<phase>generate-test-sources</phase>
|
||||
<configuration>
|
||||
<tasks>
|
||||
<mkdir dir="target/generated-test-sources" />
|
||||
<exec executable="../../src/protoc">
|
||||
<arg value="--java_out=target/generated-test-sources" />
|
||||
<arg value="--proto_path=../../src" />
|
||||
<arg value="--proto_path=src/test/java" />
|
||||
<arg value="../../src/google/protobuf/unittest.proto" />
|
||||
<arg value="../../src/google/protobuf/unittest_import.proto" />
|
||||
<arg value="../../src/google/protobuf/unittest_import_public.proto" />
|
||||
<arg value="src/test/java/com/google/protobuf/util/json_test.proto" />
|
||||
</exec>
|
||||
</tasks>
|
||||
<testSourceRoot>target/generated-test-sources</testSourceRoot>
|
||||
</configuration>
|
||||
<goals>
|
||||
<goal>run</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.felix</groupId>
|
||||
<artifactId>maven-bundle-plugin</artifactId>
|
||||
<extensions>true</extensions>
|
||||
<configuration>
|
||||
<instructions>
|
||||
<Bundle-DocURL>https://developers.google.com/protocol-buffers/</Bundle-DocURL>
|
||||
<Bundle-SymbolicName>com.google.protobuf.util</Bundle-SymbolicName>
|
||||
<Export-Package>com.google.protobuf.util;version=3.0.0-alpha-3</Export-Package>
|
||||
</instructions>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
<profiles>
|
||||
<profile>
|
||||
<id>release</id>
|
||||
<distributionManagement>
|
||||
<snapshotRepository>
|
||||
<id>sonatype-nexus-staging</id>
|
||||
<url>https://oss.sonatype.org/content/repositories/snapshots</url>
|
||||
</snapshotRepository>
|
||||
<repository>
|
||||
<id>sonatype-nexus-staging</id>
|
||||
<url>https://oss.sonatype.org/service/local/staging/deploy/maven2/</url>
|
||||
</repository>
|
||||
</distributionManagement>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-source-plugin</artifactId>
|
||||
<version>2.2.1</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>attach-sources</id>
|
||||
<goals>
|
||||
<goal>jar-no-fork</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-javadoc-plugin</artifactId>
|
||||
<version>2.9.1</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>attach-javadocs</id>
|
||||
<goals>
|
||||
<goal>jar</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-gpg-plugin</artifactId>
|
||||
<version>1.5</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>sign-artifacts</id>
|
||||
<phase>verify</phase>
|
||||
<goals>
|
||||
<goal>sign</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.sonatype.plugins</groupId>
|
||||
<artifactId>nexus-staging-maven-plugin</artifactId>
|
||||
<version>1.6.3</version>
|
||||
<extensions>true</extensions>
|
||||
<configuration>
|
||||
<serverId>sonatype-nexus-staging</serverId>
|
||||
<nexusUrl>https://oss.sonatype.org/</nexusUrl>
|
||||
<autoReleaseAfterClose>false</autoReleaseAfterClose>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</profile>
|
||||
</profiles>
|
||||
</project>
|
@ -0,0 +1,259 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package com.google.protobuf.util;
|
||||
|
||||
import com.google.protobuf.Descriptors.Descriptor;
|
||||
import com.google.protobuf.Descriptors.FieldDescriptor;
|
||||
import com.google.protobuf.FieldMask;
|
||||
import com.google.protobuf.Message;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.TreeMap;
|
||||
import java.util.logging.Logger;
|
||||
|
||||
/**
|
||||
* A tree representation of a FieldMask. Each leaf node in this tree represent
|
||||
* a field path in the FieldMask.
|
||||
*
|
||||
* <p>For example, FieldMask "foo.bar,foo.baz,bar.baz" as a tree will be:
|
||||
* <pre>
|
||||
* [root] -+- foo -+- bar
|
||||
* | |
|
||||
* | +- baz
|
||||
* |
|
||||
* +- bar --- baz
|
||||
* </pre>
|
||||
*
|
||||
* <p>By representing FieldMasks with this tree structure we can easily convert
|
||||
* a FieldMask to a canonical form, merge two FieldMasks, calculate the
|
||||
* intersection to two FieldMasks and traverse all fields specified by the
|
||||
* FieldMask in a message tree.
|
||||
*/
|
||||
class FieldMaskTree {
|
||||
private static final Logger logger =
|
||||
Logger.getLogger(FieldMaskTree.class.getName());
|
||||
|
||||
private static final String FIELD_PATH_SEPARATOR_REGEX = "\\.";
|
||||
|
||||
private static class Node {
|
||||
public TreeMap<String, Node> children = new TreeMap<String, Node>();
|
||||
}
|
||||
|
||||
private final Node root = new Node();
|
||||
|
||||
/** Creates an empty FieldMaskTree. */
|
||||
public FieldMaskTree() {}
|
||||
|
||||
/** Creates a FieldMaskTree for a given FieldMask. */
|
||||
public FieldMaskTree(FieldMask mask) {
|
||||
mergeFromFieldMask(mask);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return FieldMaskUtil.toString(toFieldMask());
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a field path to the tree. In a FieldMask, every field path matches the
|
||||
* specified field as well as all its sub-fields. For example, a field path
|
||||
* "foo.bar" matches field "foo.bar" and also "foo.bar.baz", etc. When adding
|
||||
* a field path to the tree, redundant sub-paths will be removed. That is,
|
||||
* after adding "foo.bar" to the tree, "foo.bar.baz" will be removed if it
|
||||
* exists, which will turn the tree node for "foo.bar" to a leaf node.
|
||||
* Likewise, if the field path to add is a sub-path of an existing leaf node,
|
||||
* nothing will be changed in the tree.
|
||||
*/
|
||||
public FieldMaskTree addFieldPath(String path) {
|
||||
String[] parts = path.split(FIELD_PATH_SEPARATOR_REGEX);
|
||||
if (parts.length == 0) {
|
||||
return this;
|
||||
}
|
||||
Node node = root;
|
||||
boolean createNewBranch = false;
|
||||
// Find the matching node in the tree.
|
||||
for (String part : parts) {
|
||||
// Check whether the path matches an existing leaf node.
|
||||
if (!createNewBranch && node != root && node.children.isEmpty()) {
|
||||
// The path to add is a sub-path of an existing leaf node.
|
||||
return this;
|
||||
}
|
||||
if (node.children.containsKey(part)) {
|
||||
node = node.children.get(part);
|
||||
} else {
|
||||
createNewBranch = true;
|
||||
Node tmp = new Node();
|
||||
node.children.put(part, tmp);
|
||||
node = tmp;
|
||||
}
|
||||
}
|
||||
// Turn the matching node into a leaf node (i.e., remove sub-paths).
|
||||
node.children.clear();
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Merges all field paths in a FieldMask into this tree.
|
||||
*/
|
||||
public FieldMaskTree mergeFromFieldMask(FieldMask mask) {
|
||||
for (String path : mask.getPathsList()) {
|
||||
addFieldPath(path);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
/** Converts this tree to a FieldMask. */
|
||||
public FieldMask toFieldMask() {
|
||||
if (root.children.isEmpty()) {
|
||||
return FieldMask.getDefaultInstance();
|
||||
}
|
||||
List<String> paths = new ArrayList<String>();
|
||||
getFieldPaths(root, "", paths);
|
||||
return FieldMask.newBuilder().addAllPaths(paths).build();
|
||||
}
|
||||
|
||||
/** Gathers all field paths in a sub-tree. */
|
||||
private void getFieldPaths(Node node, String path, List<String> paths) {
|
||||
if (node.children.isEmpty()) {
|
||||
paths.add(path);
|
||||
return;
|
||||
}
|
||||
for (Entry<String, Node> entry : node.children.entrySet()) {
|
||||
String childPath = path.isEmpty()
|
||||
? entry.getKey() : path + "." + entry.getKey();
|
||||
getFieldPaths(entry.getValue(), childPath, paths);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds the intersection of this tree with the given {@code path} to
|
||||
* {@code output}.
|
||||
*/
|
||||
public void intersectFieldPath(String path, FieldMaskTree output) {
|
||||
if (root.children.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
String[] parts = path.split(FIELD_PATH_SEPARATOR_REGEX);
|
||||
if (parts.length == 0) {
|
||||
return;
|
||||
}
|
||||
Node node = root;
|
||||
for (String part : parts) {
|
||||
if (node != root && node.children.isEmpty()) {
|
||||
// The given path is a sub-path of an existing leaf node in the tree.
|
||||
output.addFieldPath(path);
|
||||
return;
|
||||
}
|
||||
if (node.children.containsKey(part)) {
|
||||
node = node.children.get(part);
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
}
|
||||
// We found a matching node for the path. All leaf children of this matching
|
||||
// node is in the intersection.
|
||||
List<String> paths = new ArrayList<String>();
|
||||
getFieldPaths(node, path, paths);
|
||||
for (String value : paths) {
|
||||
output.addFieldPath(value);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Merges all fields specified by this FieldMaskTree from {@code source} to
|
||||
* {@code destination}.
|
||||
*/
|
||||
public void merge(Message source, Message.Builder destination,
|
||||
FieldMaskUtil.MergeOptions options) {
|
||||
if (source.getDescriptorForType() != destination.getDescriptorForType()) {
|
||||
throw new IllegalArgumentException(
|
||||
"Cannot merge messages of different types.");
|
||||
}
|
||||
if (root.children.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
merge(root, "", source, destination, options);
|
||||
}
|
||||
|
||||
/** Merges all fields specified by a sub-tree from {@code source} to
|
||||
* {@code destination}.
|
||||
*/
|
||||
private void merge(Node node, String path, Message source,
|
||||
Message.Builder destination, FieldMaskUtil.MergeOptions options) {
|
||||
assert source.getDescriptorForType() == destination.getDescriptorForType();
|
||||
|
||||
Descriptor descriptor = source.getDescriptorForType();
|
||||
for (Entry<String, Node> entry : node.children.entrySet()) {
|
||||
FieldDescriptor field =
|
||||
descriptor.findFieldByName(entry.getKey());
|
||||
if (field == null) {
|
||||
logger.warning("Cannot find field \"" + entry.getKey()
|
||||
+ "\" in message type " + descriptor.getFullName());
|
||||
continue;
|
||||
}
|
||||
if (!entry.getValue().children.isEmpty()) {
|
||||
if (field.isRepeated()
|
||||
|| field.getJavaType() != FieldDescriptor.JavaType.MESSAGE) {
|
||||
logger.warning("Field \"" + field.getFullName() + "\" is not a "
|
||||
+ "singluar message field and cannot have sub-fields.");
|
||||
continue;
|
||||
}
|
||||
String childPath = path.isEmpty()
|
||||
? entry.getKey() : path + "." + entry.getKey();
|
||||
merge(entry.getValue(), childPath, (Message) source.getField(field),
|
||||
destination.getFieldBuilder(field), options);
|
||||
continue;
|
||||
}
|
||||
if (field.isRepeated()) {
|
||||
if (options.replaceRepeatedFields()) {
|
||||
destination.setField(field, source.getField(field));
|
||||
} else {
|
||||
for (Object element : (List) source.getField(field)) {
|
||||
destination.addRepeatedField(field, element);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (field.getJavaType() == FieldDescriptor.JavaType.MESSAGE) {
|
||||
if (options.replaceMessageFields()) {
|
||||
destination.setField(field, source.getField(field));
|
||||
} else {
|
||||
destination.getFieldBuilder(field).mergeFrom(
|
||||
(Message) source.getField(field));
|
||||
}
|
||||
} else {
|
||||
destination.setField(field, source.getField(field));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,222 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package com.google.protobuf.util;
|
||||
|
||||
import com.google.protobuf.Descriptors.Descriptor;
|
||||
import com.google.protobuf.Descriptors.FieldDescriptor;
|
||||
import com.google.protobuf.FieldMask;
|
||||
import com.google.protobuf.Internal;
|
||||
import com.google.protobuf.Message;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Utility helper functions to work with {@link com.google.protobuf.FieldMask}.
|
||||
*/
|
||||
public class FieldMaskUtil {
|
||||
private static final String FIELD_PATH_SEPARATOR = ",";
|
||||
private static final String FIELD_PATH_SEPARATOR_REGEX = ",";
|
||||
private static final String FIELD_SEPARATOR_REGEX = "\\.";
|
||||
|
||||
private FieldMaskUtil() {}
|
||||
|
||||
/**
|
||||
* Converts a FieldMask to a string.
|
||||
*/
|
||||
public static String toString(FieldMask fieldMask) {
|
||||
StringBuilder result = new StringBuilder();
|
||||
boolean first = true;
|
||||
for (String value : fieldMask.getPathsList()) {
|
||||
if (value.isEmpty()) {
|
||||
// Ignore empty paths.
|
||||
continue;
|
||||
}
|
||||
if (first) {
|
||||
first = false;
|
||||
} else {
|
||||
result.append(FIELD_PATH_SEPARATOR);
|
||||
}
|
||||
result.append(value);
|
||||
}
|
||||
return result.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses from a string to a FieldMask.
|
||||
*/
|
||||
public static FieldMask fromString(String value) {
|
||||
return fromStringList(
|
||||
null, Arrays.asList(value.split(FIELD_PATH_SEPARATOR_REGEX)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses from a string to a FieldMask and validates all field paths.
|
||||
*
|
||||
* @throws IllegalArgumentException if any of the field path is invalid.
|
||||
*/
|
||||
public static FieldMask fromString(Class<? extends Message> type, String value)
|
||||
throws IllegalArgumentException {
|
||||
return fromStringList(
|
||||
type, Arrays.asList(value.split(FIELD_PATH_SEPARATOR_REGEX)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a FieldMask for a list of field paths in a certain type.
|
||||
*
|
||||
* @throws IllegalArgumentException if any of the field path is not valid.
|
||||
*/
|
||||
public static FieldMask fromStringList(
|
||||
Class<? extends Message> type, List<String> paths)
|
||||
throws IllegalArgumentException {
|
||||
FieldMask.Builder builder = FieldMask.newBuilder();
|
||||
for (String path : paths) {
|
||||
if (path.isEmpty()) {
|
||||
// Ignore empty field paths.
|
||||
continue;
|
||||
}
|
||||
if (type != null && !isValid(type, path)) {
|
||||
throw new IllegalArgumentException(
|
||||
path + " is not a valid path for " + type);
|
||||
}
|
||||
builder.addPaths(path);
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether a given field path is valid.
|
||||
*/
|
||||
public static boolean isValid(Class<? extends Message> type, String path) {
|
||||
String[] parts = path.split(FIELD_SEPARATOR_REGEX);
|
||||
if (parts.length == 0) {
|
||||
return false;
|
||||
}
|
||||
Descriptor descriptor =
|
||||
Internal.getDefaultInstance(type).getDescriptorForType();
|
||||
for (String name : parts) {
|
||||
if (descriptor == null) {
|
||||
return false;
|
||||
}
|
||||
FieldDescriptor field = descriptor.findFieldByName(name);
|
||||
if (field == null) {
|
||||
return false;
|
||||
}
|
||||
if (!field.isRepeated()
|
||||
&& field.getJavaType() == FieldDescriptor.JavaType.MESSAGE) {
|
||||
descriptor = field.getMessageType();
|
||||
} else {
|
||||
descriptor = null;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a FieldMask to its canonical form. In the canonical form of a
|
||||
* FieldMask, all field paths are sorted alphabetically and redundant field
|
||||
* paths are moved.
|
||||
*/
|
||||
public static FieldMask normalize(FieldMask mask) {
|
||||
return new FieldMaskTree(mask).toFieldMask();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an union of two FieldMasks.
|
||||
*/
|
||||
public static FieldMask union(FieldMask mask1, FieldMask mask2) {
|
||||
return new FieldMaskTree(mask1).mergeFromFieldMask(mask2).toFieldMask();
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates the intersection of two FieldMasks.
|
||||
*/
|
||||
public static FieldMask intersection(FieldMask mask1, FieldMask mask2) {
|
||||
FieldMaskTree tree = new FieldMaskTree(mask1);
|
||||
FieldMaskTree result = new FieldMaskTree();
|
||||
for (String path : mask2.getPathsList()) {
|
||||
tree.intersectFieldPath(path, result);
|
||||
}
|
||||
return result.toFieldMask();
|
||||
}
|
||||
|
||||
/**
|
||||
* Options to customize merging behavior.
|
||||
*/
|
||||
public static class MergeOptions {
|
||||
private boolean replaceMessageFields = false;
|
||||
private boolean replaceRepeatedFields = false;
|
||||
|
||||
/**
|
||||
* Whether to replace message fields (i.e., discard existing content in
|
||||
* destination message fields) when merging.
|
||||
* Default behavior is to merge the source message field into the
|
||||
* destination message field.
|
||||
*/
|
||||
public boolean replaceMessageFields() {
|
||||
return replaceMessageFields;
|
||||
}
|
||||
|
||||
/**
|
||||
* Whether to replace repeated fields (i.e., discard existing content in
|
||||
* destination repeated fields) when merging.
|
||||
* Default behavior is to append elements from source repeated field to the
|
||||
* destination repeated field.
|
||||
*/
|
||||
public boolean replaceRepeatedFields() {
|
||||
return replaceRepeatedFields;
|
||||
}
|
||||
|
||||
public void setReplaceMessageFields(boolean value) {
|
||||
replaceMessageFields = value;
|
||||
}
|
||||
|
||||
public void setReplaceRepeatedFields(boolean value) {
|
||||
replaceRepeatedFields = value;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Merges fields specified by a FieldMask from one message to another.
|
||||
*/
|
||||
public static void merge(FieldMask mask, Message source,
|
||||
Message.Builder destination, MergeOptions options) {
|
||||
new FieldMaskTree(mask).merge(source, destination, options);
|
||||
}
|
||||
|
||||
/**
|
||||
* Merges fields specified by a FieldMask from one message to another.
|
||||
*/
|
||||
public static void merge(FieldMask mask, Message source,
|
||||
Message.Builder destination) {
|
||||
merge(mask, source, destination, new MergeOptions());
|
||||
}
|
||||
}
|
1571
java/util/src/main/java/com/google/protobuf/util/JsonFormat.java
Normal file
1571
java/util/src/main/java/com/google/protobuf/util/JsonFormat.java
Normal file
File diff suppressed because it is too large
Load Diff
545
java/util/src/main/java/com/google/protobuf/util/TimeUtil.java
Normal file
545
java/util/src/main/java/com/google/protobuf/util/TimeUtil.java
Normal file
@ -0,0 +1,545 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package com.google.protobuf.util;
|
||||
|
||||
import com.google.protobuf.Duration;
|
||||
import com.google.protobuf.Timestamp;
|
||||
|
||||
import java.math.BigInteger;
|
||||
import java.text.ParseException;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.Date;
|
||||
import java.util.GregorianCalendar;
|
||||
import java.util.TimeZone;
|
||||
|
||||
/**
|
||||
* Utilities to help create/manipulate Timestamp/Duration
|
||||
*/
|
||||
public class TimeUtil {
|
||||
// Timestamp for "0001-01-01T00:00:00Z"
|
||||
public static final long TIMESTAMP_SECONDS_MIN = -62135596800L;
|
||||
|
||||
// Timestamp for "9999-12-31T23:59:59Z"
|
||||
public static final long TIMESTAMP_SECONDS_MAX = 253402300799L;
|
||||
public static final long DURATION_SECONDS_MIN = -315576000000L;
|
||||
public static final long DURATION_SECONDS_MAX = 315576000000L;
|
||||
|
||||
private static final long NANOS_PER_SECOND = 1000000000;
|
||||
private static final long NANOS_PER_MILLISECOND = 1000000;
|
||||
private static final long NANOS_PER_MICROSECOND = 1000;
|
||||
private static final long MILLIS_PER_SECOND = 1000;
|
||||
private static final long MICROS_PER_SECOND = 1000000;
|
||||
|
||||
private static final SimpleDateFormat timestampFormat =
|
||||
createTimestampFormat();
|
||||
|
||||
private static SimpleDateFormat createTimestampFormat() {
|
||||
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss");
|
||||
GregorianCalendar calendar =
|
||||
new GregorianCalendar(TimeZone.getTimeZone("UTC"));
|
||||
// We use Proleptic Gregorian Calendar (i.e., Gregorian calendar extends
|
||||
// backwards to year one) for timestamp formating.
|
||||
calendar.setGregorianChange(new Date(Long.MIN_VALUE));
|
||||
sdf.setCalendar(calendar);
|
||||
return sdf;
|
||||
}
|
||||
|
||||
private TimeUtil() {}
|
||||
|
||||
/**
|
||||
* Convert Timestamp to RFC 3339 date string format. The output will always
|
||||
* be Z-normalized and uses 3, 6 or 9 fractional digits as required to
|
||||
* represent the exact value. Note that Timestamp can only represent time
|
||||
* from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. See
|
||||
* https://www.ietf.org/rfc/rfc3339.txt
|
||||
*
|
||||
* <p>Example of generated format: "1972-01-01T10:00:20.021Z"
|
||||
*
|
||||
* @return The string representation of the given timestamp.
|
||||
* @throws IllegalArgumentException if the given timestamp is not in the
|
||||
* valid range.
|
||||
*/
|
||||
public static String toString(Timestamp timestamp)
|
||||
throws IllegalArgumentException {
|
||||
StringBuilder result = new StringBuilder();
|
||||
// Format the seconds part.
|
||||
if (timestamp.getSeconds() < TIMESTAMP_SECONDS_MIN
|
||||
|| timestamp.getSeconds() > TIMESTAMP_SECONDS_MAX) {
|
||||
throw new IllegalArgumentException("Timestamp is out of range.");
|
||||
}
|
||||
Date date = new Date(timestamp.getSeconds() * MILLIS_PER_SECOND);
|
||||
result.append(timestampFormat.format(date));
|
||||
// Format the nanos part.
|
||||
if (timestamp.getNanos() < 0 || timestamp.getNanos() >= NANOS_PER_SECOND) {
|
||||
throw new IllegalArgumentException("Timestamp has invalid nanos value.");
|
||||
}
|
||||
if (timestamp.getNanos() != 0) {
|
||||
result.append(".");
|
||||
result.append(formatNanos(timestamp.getNanos()));
|
||||
}
|
||||
result.append("Z");
|
||||
return result.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse from RFC 3339 date string to Timestamp. This method accepts all
|
||||
* outputs of {@link #toString(Timestamp)} and it also accepts any fractional
|
||||
* digits (or none) and any offset as long as they fit into nano-seconds
|
||||
* precision.
|
||||
*
|
||||
* <p>Example of accepted format: "1972-01-01T10:00:20.021-05:00"
|
||||
*
|
||||
* @return A Timestamp parsed from the string.
|
||||
* @throws ParseException if parsing fails.
|
||||
*/
|
||||
|
||||
public static Timestamp parseTimestamp(String value) throws ParseException {
|
||||
int dayOffset = value.indexOf('T');
|
||||
if (dayOffset == -1) {
|
||||
throw new ParseException(
|
||||
"Failed to parse timestamp: invalid timestamp \"" + value + "\"", 0);
|
||||
}
|
||||
int timezoneOffsetPosition = value.indexOf('Z', dayOffset);
|
||||
if (timezoneOffsetPosition == -1) {
|
||||
timezoneOffsetPosition = value.indexOf('+', dayOffset);
|
||||
}
|
||||
if (timezoneOffsetPosition == -1) {
|
||||
timezoneOffsetPosition = value.indexOf('-', dayOffset);
|
||||
}
|
||||
if (timezoneOffsetPosition == -1) {
|
||||
throw new ParseException(
|
||||
"Failed to parse timestamp: missing valid timezone offset.", 0);
|
||||
}
|
||||
// Parse seconds and nanos.
|
||||
String timeValue = value.substring(0, timezoneOffsetPosition);
|
||||
String secondValue = timeValue;
|
||||
String nanoValue = "";
|
||||
int pointPosition = timeValue.indexOf('.');
|
||||
if (pointPosition != -1) {
|
||||
secondValue = timeValue.substring(0, pointPosition);
|
||||
nanoValue = timeValue.substring(pointPosition + 1);
|
||||
}
|
||||
Date date = timestampFormat.parse(secondValue);
|
||||
long seconds = date.getTime() / MILLIS_PER_SECOND;
|
||||
int nanos = nanoValue.isEmpty() ? 0 : parseNanos(nanoValue);
|
||||
// Parse timezone offsets.
|
||||
if (value.charAt(timezoneOffsetPosition) == 'Z') {
|
||||
if (value.length() != timezoneOffsetPosition + 1) {
|
||||
throw new ParseException(
|
||||
"Failed to parse timestamp: invalid trailing data \""
|
||||
+ value.substring(timezoneOffsetPosition) + "\"", 0);
|
||||
}
|
||||
} else {
|
||||
String offsetValue = value.substring(timezoneOffsetPosition + 1);
|
||||
long offset = parseTimezoneOffset(offsetValue);
|
||||
if (value.charAt(timezoneOffsetPosition) == '+') {
|
||||
seconds -= offset;
|
||||
} else {
|
||||
seconds += offset;
|
||||
}
|
||||
}
|
||||
try {
|
||||
return normalizedTimestamp(seconds, nanos);
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new ParseException(
|
||||
"Failed to parse timestmap: timestamp is out of range.", 0);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert Duration to string format. The string format will contains 3, 6,
|
||||
* or 9 fractional digits depending on the precision required to represent
|
||||
* the exact Duration value. For example: "1s", "1.010s", "1.000000100s",
|
||||
* "-3.100s" The range that can be represented by Duration is from
|
||||
* -315,576,000,000 to +315,576,000,000 inclusive (in seconds).
|
||||
*
|
||||
* @return The string representation of the given duration.
|
||||
* @throws IllegalArgumentException if the given duration is not in the valid
|
||||
* range.
|
||||
*/
|
||||
public static String toString(Duration duration)
|
||||
throws IllegalArgumentException {
|
||||
if (duration.getSeconds() < DURATION_SECONDS_MIN
|
||||
|| duration.getSeconds() > DURATION_SECONDS_MAX) {
|
||||
throw new IllegalArgumentException("Duration is out of valid range.");
|
||||
}
|
||||
StringBuilder result = new StringBuilder();
|
||||
long seconds = duration.getSeconds();
|
||||
int nanos = duration.getNanos();
|
||||
if (seconds < 0 || nanos < 0) {
|
||||
if (seconds > 0 || nanos > 0) {
|
||||
throw new IllegalArgumentException(
|
||||
"Invalid duration: seconds value and nanos value must have the same"
|
||||
+ "sign.");
|
||||
}
|
||||
result.append("-");
|
||||
seconds = -seconds;
|
||||
nanos = -nanos;
|
||||
}
|
||||
result.append(seconds);
|
||||
if (nanos != 0) {
|
||||
result.append(".");
|
||||
result.append(formatNanos(nanos));
|
||||
}
|
||||
result.append("s");
|
||||
return result.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse from a string to produce a duration.
|
||||
*
|
||||
* @return A Duration parsed from the string.
|
||||
* @throws ParseException if parsing fails.
|
||||
*/
|
||||
public static Duration parseDuration(String value) throws ParseException {
|
||||
// Must ended with "s".
|
||||
if (value.isEmpty() || value.charAt(value.length() - 1) != 's') {
|
||||
throw new ParseException("Invalid duration string: " + value, 0);
|
||||
}
|
||||
boolean negative = false;
|
||||
if (value.charAt(0) == '-') {
|
||||
negative = true;
|
||||
value = value.substring(1);
|
||||
}
|
||||
String secondValue = value.substring(0, value.length() - 1);
|
||||
String nanoValue = "";
|
||||
int pointPosition = secondValue.indexOf('.');
|
||||
if (pointPosition != -1) {
|
||||
nanoValue = secondValue.substring(pointPosition + 1);
|
||||
secondValue = secondValue.substring(0, pointPosition);
|
||||
}
|
||||
long seconds = Long.parseLong(secondValue);
|
||||
int nanos = nanoValue.isEmpty() ? 0 : parseNanos(nanoValue);
|
||||
if (seconds < 0) {
|
||||
throw new ParseException("Invalid duration string: " + value, 0);
|
||||
}
|
||||
if (negative) {
|
||||
seconds = -seconds;
|
||||
nanos = -nanos;
|
||||
}
|
||||
try {
|
||||
return normalizedDuration(seconds, nanos);
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new ParseException("Duration value is out of range.", 0);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a Timestamp from the number of milliseconds elapsed from the epoch.
|
||||
*/
|
||||
public static Timestamp createTimestampFromMillis(long milliseconds) {
|
||||
return normalizedTimestamp(milliseconds / MILLIS_PER_SECOND,
|
||||
(int) (milliseconds % MILLIS_PER_SECOND * NANOS_PER_MILLISECOND));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a Duration from the number of milliseconds.
|
||||
*/
|
||||
public static Duration createDurationFromMillis(long milliseconds) {
|
||||
return normalizedDuration(milliseconds / MILLIS_PER_SECOND,
|
||||
(int) (milliseconds % MILLIS_PER_SECOND * NANOS_PER_MILLISECOND));
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a Timestamp to the number of milliseconds elapsed from the epoch.
|
||||
*
|
||||
* <p>The result will be rounded down to the nearest millisecond. E.g., if the
|
||||
* timestamp represents "1969-12-31T23:59:59.999999999Z", it will be rounded
|
||||
* to -1 millisecond.
|
||||
*/
|
||||
public static long toMillis(Timestamp timestamp) {
|
||||
return timestamp.getSeconds() * MILLIS_PER_SECOND + timestamp.getNanos()
|
||||
/ NANOS_PER_MILLISECOND;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a Duration to the number of milliseconds.The result will be
|
||||
* rounded towards 0 to the nearest millisecond. E.g., if the duration
|
||||
* represents -1 nanosecond, it will be rounded to 0.
|
||||
*/
|
||||
public static long toMillis(Duration duration) {
|
||||
return duration.getSeconds() * MILLIS_PER_SECOND + duration.getNanos()
|
||||
/ NANOS_PER_MILLISECOND;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a Timestamp from the number of microseconds elapsed from the epoch.
|
||||
*/
|
||||
public static Timestamp createTimestampFromMicros(long microseconds) {
|
||||
return normalizedTimestamp(microseconds / MICROS_PER_SECOND,
|
||||
(int) (microseconds % MICROS_PER_SECOND * NANOS_PER_MICROSECOND));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a Duration from the number of microseconds.
|
||||
*/
|
||||
public static Duration createDurationFromMicros(long microseconds) {
|
||||
return normalizedDuration(microseconds / MICROS_PER_SECOND,
|
||||
(int) (microseconds % MICROS_PER_SECOND * NANOS_PER_MICROSECOND));
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a Timestamp to the number of microseconds elapsed from the epoch.
|
||||
*
|
||||
* <p>The result will be rounded down to the nearest microsecond. E.g., if the
|
||||
* timestamp represents "1969-12-31T23:59:59.999999999Z", it will be rounded
|
||||
* to -1 millisecond.
|
||||
*/
|
||||
public static long toMicros(Timestamp timestamp) {
|
||||
return timestamp.getSeconds() * MICROS_PER_SECOND + timestamp.getNanos()
|
||||
/ NANOS_PER_MICROSECOND;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a Duration to the number of microseconds.The result will be
|
||||
* rounded towards 0 to the nearest microseconds. E.g., if the duration
|
||||
* represents -1 nanosecond, it will be rounded to 0.
|
||||
*/
|
||||
public static long toMicros(Duration duration) {
|
||||
return duration.getSeconds() * MICROS_PER_SECOND + duration.getNanos()
|
||||
/ NANOS_PER_MICROSECOND;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a Timestamp from the number of nanoseconds elapsed from the epoch.
|
||||
*/
|
||||
public static Timestamp createTimestampFromNanos(long nanoseconds) {
|
||||
return normalizedTimestamp(nanoseconds / NANOS_PER_SECOND,
|
||||
(int) (nanoseconds % NANOS_PER_SECOND));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a Duration from the number of nanoseconds.
|
||||
*/
|
||||
public static Duration createDurationFromNanos(long nanoseconds) {
|
||||
return normalizedDuration(nanoseconds / NANOS_PER_SECOND,
|
||||
(int) (nanoseconds % NANOS_PER_SECOND));
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a Timestamp to the number of nanoseconds elapsed from the epoch.
|
||||
*/
|
||||
public static long toNanos(Timestamp timestamp) {
|
||||
return timestamp.getSeconds() * NANOS_PER_SECOND + timestamp.getNanos();
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a Duration to the number of nanoseconds.
|
||||
*/
|
||||
public static long toNanos(Duration duration) {
|
||||
return duration.getSeconds() * NANOS_PER_SECOND + duration.getNanos();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the current time.
|
||||
*/
|
||||
public static Timestamp getCurrentTime() {
|
||||
return createTimestampFromMillis(System.currentTimeMillis());
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the epoch.
|
||||
*/
|
||||
public static Timestamp getEpoch() {
|
||||
return Timestamp.getDefaultInstance();
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate the difference between two timestamps.
|
||||
*/
|
||||
public static Duration distance(Timestamp from, Timestamp to) {
|
||||
return normalizedDuration(to.getSeconds() - from.getSeconds(),
|
||||
to.getNanos() - from.getNanos());
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a duration to a timestamp.
|
||||
*/
|
||||
public static Timestamp add(Timestamp start, Duration length) {
|
||||
return normalizedTimestamp(start.getSeconds() + length.getSeconds(),
|
||||
start.getNanos() + length.getNanos());
|
||||
}
|
||||
|
||||
/**
|
||||
* Subtract a duration from a timestamp.
|
||||
*/
|
||||
public static Timestamp subtract(Timestamp start, Duration length) {
|
||||
return normalizedTimestamp(start.getSeconds() - length.getSeconds(),
|
||||
start.getNanos() - length.getNanos());
|
||||
}
|
||||
|
||||
/**
|
||||
* Add two durations.
|
||||
*/
|
||||
public static Duration add(Duration d1, Duration d2) {
|
||||
return normalizedDuration(d1.getSeconds() + d2.getSeconds(),
|
||||
d1.getNanos() + d2.getNanos());
|
||||
}
|
||||
|
||||
/**
|
||||
* Subtract a duration from another.
|
||||
*/
|
||||
public static Duration subtract(Duration d1, Duration d2) {
|
||||
return normalizedDuration(d1.getSeconds() - d2.getSeconds(),
|
||||
d1.getNanos() - d2.getNanos());
|
||||
}
|
||||
|
||||
// Multiplications and divisions.
|
||||
|
||||
public static Duration multiply(Duration duration, double times) {
|
||||
double result = duration.getSeconds() * times + duration.getNanos() * times
|
||||
/ 1000000000.0;
|
||||
if (result < Long.MIN_VALUE || result > Long.MAX_VALUE) {
|
||||
throw new IllegalArgumentException("Result is out of valid range.");
|
||||
}
|
||||
long seconds = (long) result;
|
||||
int nanos = (int) ((result - seconds) * 1000000000);
|
||||
return normalizedDuration(seconds, nanos);
|
||||
}
|
||||
|
||||
public static Duration divide(Duration duration, double value) {
|
||||
return multiply(duration, 1.0 / value);
|
||||
}
|
||||
|
||||
public static Duration multiply(Duration duration, long times) {
|
||||
return createDurationFromBigInteger(
|
||||
toBigInteger(duration).multiply(toBigInteger(times)));
|
||||
}
|
||||
|
||||
public static Duration divide(Duration duration, long times) {
|
||||
return createDurationFromBigInteger(
|
||||
toBigInteger(duration).divide(toBigInteger(times)));
|
||||
}
|
||||
|
||||
public static long divide(Duration d1, Duration d2) {
|
||||
return toBigInteger(d1).divide(toBigInteger(d2)).longValue();
|
||||
}
|
||||
|
||||
public static Duration remainder(Duration d1, Duration d2) {
|
||||
return createDurationFromBigInteger(
|
||||
toBigInteger(d1).remainder(toBigInteger(d2)));
|
||||
}
|
||||
|
||||
private static final BigInteger NANOS_PER_SECOND_BIG_INTEGER =
|
||||
new BigInteger(String.valueOf(NANOS_PER_SECOND));
|
||||
|
||||
private static BigInteger toBigInteger(Duration duration) {
|
||||
return toBigInteger(duration.getSeconds())
|
||||
.multiply(NANOS_PER_SECOND_BIG_INTEGER)
|
||||
.add(toBigInteger(duration.getNanos()));
|
||||
}
|
||||
|
||||
private static BigInteger toBigInteger(long value) {
|
||||
return new BigInteger(String.valueOf(value));
|
||||
}
|
||||
|
||||
private static Duration createDurationFromBigInteger(BigInteger value) {
|
||||
long seconds = value.divide(
|
||||
new BigInteger(String.valueOf(NANOS_PER_SECOND))).longValue();
|
||||
int nanos = value.remainder(
|
||||
new BigInteger(String.valueOf(NANOS_PER_SECOND))).intValue();
|
||||
return normalizedDuration(seconds, nanos);
|
||||
|
||||
}
|
||||
|
||||
private static Duration normalizedDuration(long seconds, int nanos) {
|
||||
if (nanos <= -NANOS_PER_SECOND || nanos >= NANOS_PER_SECOND) {
|
||||
seconds += nanos / NANOS_PER_SECOND;
|
||||
nanos %= NANOS_PER_SECOND;
|
||||
}
|
||||
if (seconds > 0 && nanos < 0) {
|
||||
nanos += NANOS_PER_SECOND;
|
||||
seconds -= 1;
|
||||
}
|
||||
if (seconds < 0 && nanos > 0) {
|
||||
nanos -= NANOS_PER_SECOND;
|
||||
seconds += 1;
|
||||
}
|
||||
if (seconds < DURATION_SECONDS_MIN || seconds > DURATION_SECONDS_MAX) {
|
||||
throw new IllegalArgumentException("Duration is out of valid range.");
|
||||
}
|
||||
return Duration.newBuilder().setSeconds(seconds).setNanos(nanos).build();
|
||||
}
|
||||
|
||||
private static Timestamp normalizedTimestamp(long seconds, int nanos) {
|
||||
if (nanos <= -NANOS_PER_SECOND || nanos >= NANOS_PER_SECOND) {
|
||||
seconds += nanos / NANOS_PER_SECOND;
|
||||
nanos %= NANOS_PER_SECOND;
|
||||
}
|
||||
if (nanos < 0) {
|
||||
nanos += NANOS_PER_SECOND;
|
||||
seconds -= 1;
|
||||
}
|
||||
if (seconds < TIMESTAMP_SECONDS_MIN || seconds > TIMESTAMP_SECONDS_MAX) {
|
||||
throw new IllegalArgumentException("Timestamp is out of valid range.");
|
||||
}
|
||||
return Timestamp.newBuilder().setSeconds(seconds).setNanos(nanos).build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Format the nano part of a timestamp or a duration.
|
||||
*/
|
||||
private static String formatNanos(int nanos) {
|
||||
assert nanos >= 1 && nanos <= 999999999;
|
||||
// Determine whether to use 3, 6, or 9 digits for the nano part.
|
||||
if (nanos % NANOS_PER_MILLISECOND == 0) {
|
||||
return String.format("%1$03d", nanos / NANOS_PER_MILLISECOND);
|
||||
} else if (nanos % NANOS_PER_MICROSECOND == 0) {
|
||||
return String.format("%1$06d", nanos / NANOS_PER_MICROSECOND);
|
||||
} else {
|
||||
return String.format("%1$09d", nanos);
|
||||
}
|
||||
}
|
||||
|
||||
private static int parseNanos(String value) throws ParseException {
|
||||
int result = 0;
|
||||
for (int i = 0; i < 9; ++i) {
|
||||
result = result * 10;
|
||||
if (i < value.length()) {
|
||||
if (value.charAt(i) < '0' || value.charAt(i) > '9') {
|
||||
throw new ParseException("Invalid nanosecnds.", 0);
|
||||
}
|
||||
result += value.charAt(i) - '0';
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private static long parseTimezoneOffset(String value) throws ParseException {
|
||||
int pos = value.indexOf(':');
|
||||
if (pos == -1) {
|
||||
throw new ParseException("Invalid offset value: " + value, 0);
|
||||
}
|
||||
String hours = value.substring(0, pos);
|
||||
String minutes = value.substring(pos + 1);
|
||||
return (Long.parseLong(hours) * 60 + Long.parseLong(minutes)) * 60;
|
||||
}
|
||||
}
|
@ -0,0 +1,229 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package com.google.protobuf.util;
|
||||
|
||||
import protobuf_unittest.UnittestProto.NestedTestAllTypes;
|
||||
import protobuf_unittest.UnittestProto.TestAllTypes;
|
||||
import protobuf_unittest.UnittestProto.TestAllTypes.NestedMessage;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
public class FieldMaskTreeTest extends TestCase {
|
||||
public void testAddFieldPath() throws Exception {
|
||||
FieldMaskTree tree = new FieldMaskTree();
|
||||
assertEquals("", tree.toString());
|
||||
tree.addFieldPath("");
|
||||
assertEquals("", tree.toString());
|
||||
// New branch.
|
||||
tree.addFieldPath("foo");
|
||||
assertEquals("foo", tree.toString());
|
||||
// Redundant path.
|
||||
tree.addFieldPath("foo");
|
||||
assertEquals("foo", tree.toString());
|
||||
// New branch.
|
||||
tree.addFieldPath("bar.baz");
|
||||
assertEquals("bar.baz,foo", tree.toString());
|
||||
// Redundant sub-path.
|
||||
tree.addFieldPath("foo.bar");
|
||||
assertEquals("bar.baz,foo", tree.toString());
|
||||
// New branch from a non-root node.
|
||||
tree.addFieldPath("bar.quz");
|
||||
assertEquals("bar.baz,bar.quz,foo", tree.toString());
|
||||
// A path that matches several existing sub-paths.
|
||||
tree.addFieldPath("bar");
|
||||
assertEquals("bar,foo", tree.toString());
|
||||
}
|
||||
|
||||
public void testMergeFromFieldMask() throws Exception {
|
||||
FieldMaskTree tree = new FieldMaskTree(
|
||||
FieldMaskUtil.fromString("foo,bar.baz,bar.quz"));
|
||||
assertEquals("bar.baz,bar.quz,foo", tree.toString());
|
||||
tree.mergeFromFieldMask(
|
||||
FieldMaskUtil.fromString("foo.bar,bar"));
|
||||
assertEquals("bar,foo", tree.toString());
|
||||
}
|
||||
|
||||
public void testIntersectFieldPath() throws Exception {
|
||||
FieldMaskTree tree = new FieldMaskTree(
|
||||
FieldMaskUtil.fromString("foo,bar.baz,bar.quz"));
|
||||
FieldMaskTree result = new FieldMaskTree();
|
||||
// Empty path.
|
||||
tree.intersectFieldPath("", result);
|
||||
assertEquals("", result.toString());
|
||||
// Non-exist path.
|
||||
tree.intersectFieldPath("quz", result);
|
||||
assertEquals("", result.toString());
|
||||
// Sub-path of an existing leaf.
|
||||
tree.intersectFieldPath("foo.bar", result);
|
||||
assertEquals("foo.bar", result.toString());
|
||||
// Match an existing leaf node.
|
||||
tree.intersectFieldPath("foo", result);
|
||||
assertEquals("foo", result.toString());
|
||||
// Non-exist path.
|
||||
tree.intersectFieldPath("bar.foo", result);
|
||||
assertEquals("foo", result.toString());
|
||||
// Match a non-leaf node.
|
||||
tree.intersectFieldPath("bar", result);
|
||||
assertEquals("bar.baz,bar.quz,foo", result.toString());
|
||||
}
|
||||
|
||||
public void testMerge() throws Exception {
|
||||
TestAllTypes value = TestAllTypes.newBuilder()
|
||||
.setOptionalInt32(1234)
|
||||
.setOptionalNestedMessage(NestedMessage.newBuilder().setBb(5678))
|
||||
.addRepeatedInt32(4321)
|
||||
.addRepeatedNestedMessage(NestedMessage.newBuilder().setBb(8765))
|
||||
.build();
|
||||
NestedTestAllTypes source = NestedTestAllTypes.newBuilder()
|
||||
.setPayload(value)
|
||||
.setChild(NestedTestAllTypes.newBuilder().setPayload(value))
|
||||
.build();
|
||||
// Now we have a message source with the following structure:
|
||||
// [root] -+- payload -+- optional_int32
|
||||
// | +- optional_nested_message
|
||||
// | +- repeated_int32
|
||||
// | +- repeated_nested_message
|
||||
// |
|
||||
// +- child --- payload -+- optional_int32
|
||||
// +- optional_nested_message
|
||||
// +- repeated_int32
|
||||
// +- repeated_nested_message
|
||||
|
||||
FieldMaskUtil.MergeOptions options = new FieldMaskUtil.MergeOptions();
|
||||
|
||||
// Test merging each individual field.
|
||||
NestedTestAllTypes.Builder builder = NestedTestAllTypes.newBuilder();
|
||||
new FieldMaskTree().addFieldPath("payload.optional_int32")
|
||||
.merge(source, builder, options);
|
||||
NestedTestAllTypes.Builder expected = NestedTestAllTypes.newBuilder();
|
||||
expected.getPayloadBuilder().setOptionalInt32(1234);
|
||||
assertEquals(expected.build(), builder.build());
|
||||
|
||||
builder = NestedTestAllTypes.newBuilder();
|
||||
new FieldMaskTree().addFieldPath("payload.optional_nested_message")
|
||||
.merge(source, builder, options);
|
||||
expected = NestedTestAllTypes.newBuilder();
|
||||
expected.getPayloadBuilder().setOptionalNestedMessage(
|
||||
NestedMessage.newBuilder().setBb(5678));
|
||||
assertEquals(expected.build(), builder.build());
|
||||
|
||||
|
||||
builder = NestedTestAllTypes.newBuilder();
|
||||
new FieldMaskTree().addFieldPath("payload.repeated_int32")
|
||||
.merge(source, builder, options);
|
||||
expected = NestedTestAllTypes.newBuilder();
|
||||
expected.getPayloadBuilder().addRepeatedInt32(4321);
|
||||
assertEquals(expected.build(), builder.build());
|
||||
|
||||
builder = NestedTestAllTypes.newBuilder();
|
||||
new FieldMaskTree().addFieldPath("payload.repeated_nested_message")
|
||||
.merge(source, builder, options);
|
||||
expected = NestedTestAllTypes.newBuilder();
|
||||
expected.getPayloadBuilder().addRepeatedNestedMessage(
|
||||
NestedMessage.newBuilder().setBb(8765));
|
||||
assertEquals(expected.build(), builder.build());
|
||||
|
||||
builder = NestedTestAllTypes.newBuilder();
|
||||
new FieldMaskTree().addFieldPath("child.payload.optional_int32")
|
||||
.merge(source, builder, options);
|
||||
expected = NestedTestAllTypes.newBuilder();
|
||||
expected.getChildBuilder().getPayloadBuilder().setOptionalInt32(1234);
|
||||
assertEquals(expected.build(), builder.build());
|
||||
|
||||
builder = NestedTestAllTypes.newBuilder();
|
||||
new FieldMaskTree().addFieldPath("child.payload.optional_nested_message")
|
||||
.merge(source, builder, options);
|
||||
expected = NestedTestAllTypes.newBuilder();
|
||||
expected.getChildBuilder().getPayloadBuilder().setOptionalNestedMessage(
|
||||
NestedMessage.newBuilder().setBb(5678));
|
||||
assertEquals(expected.build(), builder.build());
|
||||
|
||||
|
||||
builder = NestedTestAllTypes.newBuilder();
|
||||
new FieldMaskTree().addFieldPath("child.payload.repeated_int32")
|
||||
.merge(source, builder, options);
|
||||
expected = NestedTestAllTypes.newBuilder();
|
||||
expected.getChildBuilder().getPayloadBuilder().addRepeatedInt32(4321);
|
||||
assertEquals(expected.build(), builder.build());
|
||||
|
||||
|
||||
builder = NestedTestAllTypes.newBuilder();
|
||||
new FieldMaskTree().addFieldPath("child.payload.repeated_nested_message")
|
||||
.merge(source, builder, options);
|
||||
expected = NestedTestAllTypes.newBuilder();
|
||||
expected.getChildBuilder().getPayloadBuilder().addRepeatedNestedMessage(
|
||||
NestedMessage.newBuilder().setBb(8765));
|
||||
assertEquals(expected.build(), builder.build());
|
||||
|
||||
// Test merging all fields.
|
||||
builder = NestedTestAllTypes.newBuilder();
|
||||
new FieldMaskTree().addFieldPath("child").addFieldPath("payload")
|
||||
.merge(source, builder, options);
|
||||
assertEquals(source, builder.build());
|
||||
|
||||
// Test repeated options.
|
||||
builder = NestedTestAllTypes.newBuilder();
|
||||
builder.getPayloadBuilder().addRepeatedInt32(1000);
|
||||
new FieldMaskTree().addFieldPath("payload.repeated_int32")
|
||||
.merge(source, builder, options);
|
||||
// Default behavior is to append repeated fields.
|
||||
assertEquals(2, builder.getPayload().getRepeatedInt32Count());
|
||||
assertEquals(1000, builder.getPayload().getRepeatedInt32(0));
|
||||
assertEquals(4321, builder.getPayload().getRepeatedInt32(1));
|
||||
// Change to replace repeated fields.
|
||||
options.setReplaceRepeatedFields(true);
|
||||
new FieldMaskTree().addFieldPath("payload.repeated_int32")
|
||||
.merge(source, builder, options);
|
||||
assertEquals(1, builder.getPayload().getRepeatedInt32Count());
|
||||
assertEquals(4321, builder.getPayload().getRepeatedInt32(0));
|
||||
|
||||
// Test message options.
|
||||
builder = NestedTestAllTypes.newBuilder();
|
||||
builder.getPayloadBuilder().setOptionalInt32(1000);
|
||||
builder.getPayloadBuilder().setOptionalUint32(2000);
|
||||
new FieldMaskTree().addFieldPath("payload")
|
||||
.merge(source, builder, options);
|
||||
// Default behavior is to merge message fields.
|
||||
assertEquals(1234, builder.getPayload().getOptionalInt32());
|
||||
assertEquals(2000, builder.getPayload().getOptionalUint32());
|
||||
|
||||
// Change to replace message fields.
|
||||
options.setReplaceMessageFields(true);
|
||||
builder = NestedTestAllTypes.newBuilder();
|
||||
builder.getPayloadBuilder().setOptionalInt32(1000);
|
||||
builder.getPayloadBuilder().setOptionalUint32(2000);
|
||||
new FieldMaskTree().addFieldPath("payload")
|
||||
.merge(source, builder, options);
|
||||
assertEquals(1234, builder.getPayload().getOptionalInt32());
|
||||
assertEquals(0, builder.getPayload().getOptionalUint32());
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,135 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package com.google.protobuf.util;
|
||||
|
||||
import com.google.protobuf.FieldMask;
|
||||
import protobuf_unittest.UnittestProto.NestedTestAllTypes;
|
||||
import protobuf_unittest.UnittestProto.TestAllTypes;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
/** Unit tests for {@link FieldMaskUtil}. */
|
||||
public class FieldMaskUtilTest extends TestCase {
|
||||
public void testIsValid() throws Exception {
|
||||
assertTrue(FieldMaskUtil.isValid(NestedTestAllTypes.class, "payload"));
|
||||
assertFalse(FieldMaskUtil.isValid(NestedTestAllTypes.class, "nonexist"));
|
||||
assertTrue(FieldMaskUtil.isValid(
|
||||
NestedTestAllTypes.class, "payload.optional_int32"));
|
||||
assertTrue(FieldMaskUtil.isValid(
|
||||
NestedTestAllTypes.class, "payload.repeated_int32"));
|
||||
assertTrue(FieldMaskUtil.isValid(
|
||||
NestedTestAllTypes.class, "payload.optional_nested_message"));
|
||||
assertTrue(FieldMaskUtil.isValid(
|
||||
NestedTestAllTypes.class, "payload.repeated_nested_message"));
|
||||
assertFalse(FieldMaskUtil.isValid(
|
||||
NestedTestAllTypes.class, "payload.nonexist"));
|
||||
|
||||
assertTrue(FieldMaskUtil.isValid(
|
||||
NestedTestAllTypes.class, "payload.optional_nested_message.bb"));
|
||||
// Repeated fields cannot have sub-paths.
|
||||
assertFalse(FieldMaskUtil.isValid(
|
||||
NestedTestAllTypes.class, "payload.repeated_nested_message.bb"));
|
||||
// Non-message fields cannot have sub-paths.
|
||||
assertFalse(FieldMaskUtil.isValid(
|
||||
NestedTestAllTypes.class, "payload.optional_int32.bb"));
|
||||
}
|
||||
|
||||
public void testToString() throws Exception {
|
||||
assertEquals("", FieldMaskUtil.toString(FieldMask.getDefaultInstance()));
|
||||
FieldMask mask = FieldMask.newBuilder().addPaths("foo").build();
|
||||
assertEquals("foo", FieldMaskUtil.toString(mask));
|
||||
mask = FieldMask.newBuilder().addPaths("foo").addPaths("bar").build();
|
||||
assertEquals("foo,bar", FieldMaskUtil.toString(mask));
|
||||
|
||||
// Empty field paths are ignored.
|
||||
mask = FieldMask.newBuilder().addPaths("").addPaths("foo").addPaths("").
|
||||
addPaths("bar").addPaths("").build();
|
||||
assertEquals("foo,bar", FieldMaskUtil.toString(mask));
|
||||
}
|
||||
|
||||
public void testFromString() throws Exception {
|
||||
FieldMask mask = FieldMaskUtil.fromString("");
|
||||
assertEquals(0, mask.getPathsCount());
|
||||
mask = FieldMaskUtil.fromString("foo");
|
||||
assertEquals(1, mask.getPathsCount());
|
||||
assertEquals("foo", mask.getPaths(0));
|
||||
mask = FieldMaskUtil.fromString("foo,bar.baz");
|
||||
assertEquals(2, mask.getPathsCount());
|
||||
assertEquals("foo", mask.getPaths(0));
|
||||
assertEquals("bar.baz", mask.getPaths(1));
|
||||
|
||||
// Empty field paths are ignore.
|
||||
mask = FieldMaskUtil.fromString(",foo,,bar,");
|
||||
assertEquals(2, mask.getPathsCount());
|
||||
assertEquals("foo", mask.getPaths(0));
|
||||
assertEquals("bar", mask.getPaths(1));
|
||||
|
||||
// Check whether the field paths are valid if a class parameter is provided.
|
||||
mask = FieldMaskUtil.fromString(NestedTestAllTypes.class, ",payload");
|
||||
|
||||
try {
|
||||
mask = FieldMaskUtil.fromString(
|
||||
NestedTestAllTypes.class, "payload,nonexist");
|
||||
fail("Exception is expected.");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// Expected.
|
||||
}
|
||||
}
|
||||
|
||||
public void testUnion() throws Exception {
|
||||
// Only test a simple case here and expect
|
||||
// {@link FieldMaskTreeTest#testAddFieldPath} to cover all scenarios.
|
||||
FieldMask mask1 = FieldMaskUtil.fromString("foo,bar.baz,bar.quz");
|
||||
FieldMask mask2 = FieldMaskUtil.fromString("foo.bar,bar");
|
||||
FieldMask result = FieldMaskUtil.union(mask1, mask2);
|
||||
assertEquals("bar,foo", FieldMaskUtil.toString(result));
|
||||
}
|
||||
|
||||
public void testIntersection() throws Exception {
|
||||
// Only test a simple case here and expect
|
||||
// {@link FieldMaskTreeTest#testIntersectFieldPath} to cover all scenarios.
|
||||
FieldMask mask1 = FieldMaskUtil.fromString("foo,bar.baz,bar.quz");
|
||||
FieldMask mask2 = FieldMaskUtil.fromString("foo.bar,bar");
|
||||
FieldMask result = FieldMaskUtil.intersection(mask1, mask2);
|
||||
assertEquals("bar.baz,bar.quz,foo.bar", FieldMaskUtil.toString(result));
|
||||
}
|
||||
|
||||
public void testMerge() throws Exception {
|
||||
// Only test a simple case here and expect
|
||||
// {@link FieldMaskTreeTest#testMerge} to cover all scenarios.
|
||||
NestedTestAllTypes source = NestedTestAllTypes.newBuilder()
|
||||
.setPayload(TestAllTypes.newBuilder().setOptionalInt32(1234))
|
||||
.build();
|
||||
NestedTestAllTypes.Builder builder = NestedTestAllTypes.newBuilder();
|
||||
FieldMaskUtil.merge(FieldMaskUtil.fromString("payload"), source, builder);
|
||||
assertEquals(1234, builder.getPayload().getOptionalInt32());
|
||||
}
|
||||
}
|
@ -0,0 +1,976 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package com.google.protobuf.util;
|
||||
|
||||
import com.google.protobuf.Any;
|
||||
import com.google.protobuf.BoolValue;
|
||||
import com.google.protobuf.ByteString;
|
||||
import com.google.protobuf.BytesValue;
|
||||
import com.google.protobuf.DoubleValue;
|
||||
import com.google.protobuf.FloatValue;
|
||||
import com.google.protobuf.Int32Value;
|
||||
import com.google.protobuf.Int64Value;
|
||||
import com.google.protobuf.InvalidProtocolBufferException;
|
||||
import com.google.protobuf.ListValue;
|
||||
import com.google.protobuf.Message;
|
||||
import com.google.protobuf.StringValue;
|
||||
import com.google.protobuf.Struct;
|
||||
import com.google.protobuf.UInt32Value;
|
||||
import com.google.protobuf.UInt64Value;
|
||||
import com.google.protobuf.Value;
|
||||
import com.google.protobuf.util.JsonFormat.TypeRegistry;
|
||||
import com.google.protobuf.util.JsonTestProto.TestAllTypes;
|
||||
import com.google.protobuf.util.JsonTestProto.TestAllTypes.NestedEnum;
|
||||
import com.google.protobuf.util.JsonTestProto.TestAllTypes.NestedMessage;
|
||||
import com.google.protobuf.util.JsonTestProto.TestAny;
|
||||
import com.google.protobuf.util.JsonTestProto.TestDuration;
|
||||
import com.google.protobuf.util.JsonTestProto.TestFieldMask;
|
||||
import com.google.protobuf.util.JsonTestProto.TestMap;
|
||||
import com.google.protobuf.util.JsonTestProto.TestStruct;
|
||||
import com.google.protobuf.util.JsonTestProto.TestTimestamp;
|
||||
import com.google.protobuf.util.JsonTestProto.TestWrappers;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.math.BigDecimal;
|
||||
import java.math.BigInteger;
|
||||
|
||||
public class JsonFormatTest extends TestCase {
|
||||
private void setAllFields(TestAllTypes.Builder builder) {
|
||||
builder.setOptionalInt32(1234);
|
||||
builder.setOptionalInt64(1234567890123456789L);
|
||||
builder.setOptionalUint32(5678);
|
||||
builder.setOptionalUint64(2345678901234567890L);
|
||||
builder.setOptionalSint32(9012);
|
||||
builder.setOptionalSint64(3456789012345678901L);
|
||||
builder.setOptionalFixed32(3456);
|
||||
builder.setOptionalFixed64(4567890123456789012L);
|
||||
builder.setOptionalSfixed32(7890);
|
||||
builder.setOptionalSfixed64(5678901234567890123L);
|
||||
builder.setOptionalFloat(1.5f);
|
||||
builder.setOptionalDouble(1.25);
|
||||
builder.setOptionalBool(true);
|
||||
builder.setOptionalString("Hello world!");
|
||||
builder.setOptionalBytes(ByteString.copyFrom(new byte[]{0, 1, 2}));
|
||||
builder.setOptionalNestedEnum(NestedEnum.BAR);
|
||||
builder.getOptionalNestedMessageBuilder().setValue(100);
|
||||
|
||||
builder.addRepeatedInt32(1234);
|
||||
builder.addRepeatedInt64(1234567890123456789L);
|
||||
builder.addRepeatedUint32(5678);
|
||||
builder.addRepeatedUint64(2345678901234567890L);
|
||||
builder.addRepeatedSint32(9012);
|
||||
builder.addRepeatedSint64(3456789012345678901L);
|
||||
builder.addRepeatedFixed32(3456);
|
||||
builder.addRepeatedFixed64(4567890123456789012L);
|
||||
builder.addRepeatedSfixed32(7890);
|
||||
builder.addRepeatedSfixed64(5678901234567890123L);
|
||||
builder.addRepeatedFloat(1.5f);
|
||||
builder.addRepeatedDouble(1.25);
|
||||
builder.addRepeatedBool(true);
|
||||
builder.addRepeatedString("Hello world!");
|
||||
builder.addRepeatedBytes(ByteString.copyFrom(new byte[]{0, 1, 2}));
|
||||
builder.addRepeatedNestedEnum(NestedEnum.BAR);
|
||||
builder.addRepeatedNestedMessageBuilder().setValue(100);
|
||||
|
||||
builder.addRepeatedInt32(234);
|
||||
builder.addRepeatedInt64(234567890123456789L);
|
||||
builder.addRepeatedUint32(678);
|
||||
builder.addRepeatedUint64(345678901234567890L);
|
||||
builder.addRepeatedSint32(012);
|
||||
builder.addRepeatedSint64(456789012345678901L);
|
||||
builder.addRepeatedFixed32(456);
|
||||
builder.addRepeatedFixed64(567890123456789012L);
|
||||
builder.addRepeatedSfixed32(890);
|
||||
builder.addRepeatedSfixed64(678901234567890123L);
|
||||
builder.addRepeatedFloat(11.5f);
|
||||
builder.addRepeatedDouble(11.25);
|
||||
builder.addRepeatedBool(true);
|
||||
builder.addRepeatedString("ello world!");
|
||||
builder.addRepeatedBytes(ByteString.copyFrom(new byte[]{1, 2}));
|
||||
builder.addRepeatedNestedEnum(NestedEnum.BAZ);
|
||||
builder.addRepeatedNestedMessageBuilder().setValue(200);
|
||||
}
|
||||
|
||||
private void assertRoundTripEquals(Message message) throws Exception {
|
||||
assertRoundTripEquals(message, TypeRegistry.getEmptyTypeRegistry());
|
||||
}
|
||||
|
||||
private void assertRoundTripEquals(Message message, TypeRegistry registry) throws Exception {
|
||||
JsonFormat.Printer printer = JsonFormat.printer().usingTypeRegistry(registry);
|
||||
JsonFormat.Parser parser = JsonFormat.parser().usingTypeRegistry(registry);
|
||||
Message.Builder builder = message.newBuilderForType();
|
||||
parser.merge(printer.print(message), builder);
|
||||
Message parsedMessage = builder.build();
|
||||
assertEquals(message.toString(), parsedMessage.toString());
|
||||
}
|
||||
|
||||
private String toJsonString(Message message) throws IOException {
|
||||
return JsonFormat.printer().print(message);
|
||||
}
|
||||
|
||||
private void mergeFromJson(String json, Message.Builder builder) throws IOException {
|
||||
JsonFormat.parser().merge(json, builder);
|
||||
}
|
||||
|
||||
public void testAllFields() throws Exception {
|
||||
TestAllTypes.Builder builder = TestAllTypes.newBuilder();
|
||||
setAllFields(builder);
|
||||
TestAllTypes message = builder.build();
|
||||
|
||||
assertEquals(
|
||||
"{\n"
|
||||
+ " \"optionalInt32\": 1234,\n"
|
||||
+ " \"optionalInt64\": \"1234567890123456789\",\n"
|
||||
+ " \"optionalUint32\": 5678,\n"
|
||||
+ " \"optionalUint64\": \"2345678901234567890\",\n"
|
||||
+ " \"optionalSint32\": 9012,\n"
|
||||
+ " \"optionalSint64\": \"3456789012345678901\",\n"
|
||||
+ " \"optionalFixed32\": 3456,\n"
|
||||
+ " \"optionalFixed64\": \"4567890123456789012\",\n"
|
||||
+ " \"optionalSfixed32\": 7890,\n"
|
||||
+ " \"optionalSfixed64\": \"5678901234567890123\",\n"
|
||||
+ " \"optionalFloat\": 1.5,\n"
|
||||
+ " \"optionalDouble\": 1.25,\n"
|
||||
+ " \"optionalBool\": true,\n"
|
||||
+ " \"optionalString\": \"Hello world!\",\n"
|
||||
+ " \"optionalBytes\": \"AAEC\",\n"
|
||||
+ " \"optionalNestedMessage\": {\n"
|
||||
+ " \"value\": 100\n"
|
||||
+ " },\n"
|
||||
+ " \"optionalNestedEnum\": \"BAR\",\n"
|
||||
+ " \"repeatedInt32\": [1234, 234],\n"
|
||||
+ " \"repeatedInt64\": [\"1234567890123456789\", \"234567890123456789\"],\n"
|
||||
+ " \"repeatedUint32\": [5678, 678],\n"
|
||||
+ " \"repeatedUint64\": [\"2345678901234567890\", \"345678901234567890\"],\n"
|
||||
+ " \"repeatedSint32\": [9012, 10],\n"
|
||||
+ " \"repeatedSint64\": [\"3456789012345678901\", \"456789012345678901\"],\n"
|
||||
+ " \"repeatedFixed32\": [3456, 456],\n"
|
||||
+ " \"repeatedFixed64\": [\"4567890123456789012\", \"567890123456789012\"],\n"
|
||||
+ " \"repeatedSfixed32\": [7890, 890],\n"
|
||||
+ " \"repeatedSfixed64\": [\"5678901234567890123\", \"678901234567890123\"],\n"
|
||||
+ " \"repeatedFloat\": [1.5, 11.5],\n"
|
||||
+ " \"repeatedDouble\": [1.25, 11.25],\n"
|
||||
+ " \"repeatedBool\": [true, true],\n"
|
||||
+ " \"repeatedString\": [\"Hello world!\", \"ello world!\"],\n"
|
||||
+ " \"repeatedBytes\": [\"AAEC\", \"AQI=\"],\n"
|
||||
+ " \"repeatedNestedMessage\": [{\n"
|
||||
+ " \"value\": 100\n"
|
||||
+ " }, {\n"
|
||||
+ " \"value\": 200\n"
|
||||
+ " }],\n"
|
||||
+ " \"repeatedNestedEnum\": [\"BAR\", \"BAZ\"]\n"
|
||||
+ "}",
|
||||
toJsonString(message));
|
||||
|
||||
assertRoundTripEquals(message);
|
||||
}
|
||||
|
||||
public void testUnknownEnumValues() throws Exception {
|
||||
// Unknown enum values will be dropped.
|
||||
// TODO(xiaofeng): We may want to revisit this (whether we should omit
|
||||
// unknown enum values).
|
||||
TestAllTypes message = TestAllTypes.newBuilder()
|
||||
.setOptionalNestedEnumValue(12345)
|
||||
.addRepeatedNestedEnumValue(12345)
|
||||
.addRepeatedNestedEnumValue(0)
|
||||
.build();
|
||||
assertEquals(
|
||||
"{\n"
|
||||
+ " \"repeatedNestedEnum\": [\"FOO\"]\n"
|
||||
+ "}", toJsonString(message));
|
||||
|
||||
TestMap.Builder mapBuilder = TestMap.newBuilder();
|
||||
mapBuilder.getMutableInt32ToEnumMapValue().put(1, 0);
|
||||
mapBuilder.getMutableInt32ToEnumMapValue().put(2, 12345);
|
||||
TestMap mapMessage = mapBuilder.build();
|
||||
assertEquals(
|
||||
"{\n"
|
||||
+ " \"int32ToEnumMap\": {\n"
|
||||
+ " \"1\": \"FOO\"\n"
|
||||
+ " }\n"
|
||||
+ "}", toJsonString(mapMessage));
|
||||
}
|
||||
|
||||
public void testSpecialFloatValues() throws Exception {
|
||||
TestAllTypes message = TestAllTypes.newBuilder()
|
||||
.addRepeatedFloat(Float.NaN)
|
||||
.addRepeatedFloat(Float.POSITIVE_INFINITY)
|
||||
.addRepeatedFloat(Float.NEGATIVE_INFINITY)
|
||||
.addRepeatedDouble(Double.NaN)
|
||||
.addRepeatedDouble(Double.POSITIVE_INFINITY)
|
||||
.addRepeatedDouble(Double.NEGATIVE_INFINITY)
|
||||
.build();
|
||||
assertEquals(
|
||||
"{\n"
|
||||
+ " \"repeatedFloat\": [\"NaN\", \"Infinity\", \"-Infinity\"],\n"
|
||||
+ " \"repeatedDouble\": [\"NaN\", \"Infinity\", \"-Infinity\"]\n"
|
||||
+ "}", toJsonString(message));
|
||||
|
||||
assertRoundTripEquals(message);
|
||||
}
|
||||
|
||||
public void testParserAcceptStringForNumbericField() throws Exception {
|
||||
TestAllTypes.Builder builder = TestAllTypes.newBuilder();
|
||||
mergeFromJson(
|
||||
"{\n"
|
||||
+ " \"optionalInt32\": \"1234\",\n"
|
||||
+ " \"optionalUint32\": \"5678\",\n"
|
||||
+ " \"optionalSint32\": \"9012\",\n"
|
||||
+ " \"optionalFixed32\": \"3456\",\n"
|
||||
+ " \"optionalSfixed32\": \"7890\",\n"
|
||||
+ " \"optionalFloat\": \"1.5\",\n"
|
||||
+ " \"optionalDouble\": \"1.25\",\n"
|
||||
+ " \"optionalBool\": \"true\"\n"
|
||||
+ "}", builder);
|
||||
TestAllTypes message = builder.build();
|
||||
assertEquals(1234, message.getOptionalInt32());
|
||||
assertEquals(5678, message.getOptionalUint32());
|
||||
assertEquals(9012, message.getOptionalSint32());
|
||||
assertEquals(3456, message.getOptionalFixed32());
|
||||
assertEquals(7890, message.getOptionalSfixed32());
|
||||
assertEquals(1.5f, message.getOptionalFloat());
|
||||
assertEquals(1.25, message.getOptionalDouble());
|
||||
assertEquals(true, message.getOptionalBool());
|
||||
}
|
||||
|
||||
private void assertRejects(String name, String value) {
|
||||
TestAllTypes.Builder builder = TestAllTypes.newBuilder();
|
||||
try {
|
||||
// Numeric form is rejected.
|
||||
mergeFromJson("{\"" + name + "\":" + value + "}", builder);
|
||||
fail("Exception is expected.");
|
||||
} catch (IOException e) {
|
||||
// Expected.
|
||||
}
|
||||
try {
|
||||
// String form is also rejected.
|
||||
mergeFromJson("{\"" + name + "\":\"" + value + "\"}", builder);
|
||||
fail("Exception is expected.");
|
||||
} catch (IOException e) {
|
||||
// Expected.
|
||||
}
|
||||
}
|
||||
|
||||
private void assertAccepts(String name, String value) throws IOException {
|
||||
TestAllTypes.Builder builder = TestAllTypes.newBuilder();
|
||||
// Both numeric form and string form are accepted.
|
||||
mergeFromJson("{\"" + name + "\":" + value + "}", builder);
|
||||
mergeFromJson("{\"" + name + "\":\"" + value + "\"}", builder);
|
||||
}
|
||||
|
||||
public void testParserRejectOutOfRangeNumericValues() throws Exception {
|
||||
assertAccepts("optionalInt32", String.valueOf(Integer.MAX_VALUE));
|
||||
assertAccepts("optionalInt32", String.valueOf(Integer.MIN_VALUE));
|
||||
assertRejects("optionalInt32", String.valueOf(Integer.MAX_VALUE + 1L));
|
||||
assertRejects("optionalInt32", String.valueOf(Integer.MIN_VALUE - 1L));
|
||||
|
||||
assertAccepts("optionalUint32", String.valueOf(Integer.MAX_VALUE + 1L));
|
||||
assertRejects("optionalUint32", "123456789012345");
|
||||
assertRejects("optionalUint32", "-1");
|
||||
|
||||
BigInteger one = new BigInteger("1");
|
||||
BigInteger maxLong = new BigInteger(String.valueOf(Long.MAX_VALUE));
|
||||
BigInteger minLong = new BigInteger(String.valueOf(Long.MIN_VALUE));
|
||||
assertAccepts("optionalInt64", maxLong.toString());
|
||||
assertAccepts("optionalInt64", minLong.toString());
|
||||
assertRejects("optionalInt64", maxLong.add(one).toString());
|
||||
assertRejects("optionalInt64", minLong.subtract(one).toString());
|
||||
|
||||
assertAccepts("optionalUint64", maxLong.add(one).toString());
|
||||
assertRejects("optionalUint64", "1234567890123456789012345");
|
||||
assertRejects("optionalUint64", "-1");
|
||||
|
||||
assertAccepts("optionalBool", "true");
|
||||
assertRejects("optionalBool", "1");
|
||||
assertRejects("optionalBool", "0");
|
||||
|
||||
assertAccepts("optionalFloat", String.valueOf(Float.MAX_VALUE));
|
||||
assertAccepts("optionalFloat", String.valueOf(-Float.MAX_VALUE));
|
||||
assertRejects("optionalFloat", String.valueOf(Double.MAX_VALUE));
|
||||
assertRejects("optionalFloat", String.valueOf(-Double.MAX_VALUE));
|
||||
|
||||
BigDecimal moreThanOne = new BigDecimal("1.000001");
|
||||
BigDecimal maxDouble = new BigDecimal(Double.MAX_VALUE);
|
||||
BigDecimal minDouble = new BigDecimal(-Double.MAX_VALUE);
|
||||
assertAccepts("optionalDouble", maxDouble.toString());
|
||||
assertAccepts("optionalDouble", minDouble.toString());
|
||||
assertRejects("optionalDouble", maxDouble.multiply(moreThanOne).toString());
|
||||
assertRejects("optionalDouble", minDouble.multiply(moreThanOne).toString());
|
||||
}
|
||||
|
||||
public void testParserAcceptNull() throws Exception {
|
||||
TestAllTypes.Builder builder = TestAllTypes.newBuilder();
|
||||
mergeFromJson(
|
||||
"{\n"
|
||||
+ " \"optionalInt32\": null,\n"
|
||||
+ " \"optionalInt64\": null,\n"
|
||||
+ " \"optionalUint32\": null,\n"
|
||||
+ " \"optionalUint64\": null,\n"
|
||||
+ " \"optionalSint32\": null,\n"
|
||||
+ " \"optionalSint64\": null,\n"
|
||||
+ " \"optionalFixed32\": null,\n"
|
||||
+ " \"optionalFixed64\": null,\n"
|
||||
+ " \"optionalSfixed32\": null,\n"
|
||||
+ " \"optionalSfixed64\": null,\n"
|
||||
+ " \"optionalFloat\": null,\n"
|
||||
+ " \"optionalDouble\": null,\n"
|
||||
+ " \"optionalBool\": null,\n"
|
||||
+ " \"optionalString\": null,\n"
|
||||
+ " \"optionalBytes\": null,\n"
|
||||
+ " \"optionalNestedMessage\": null,\n"
|
||||
+ " \"optionalNestedEnum\": null,\n"
|
||||
+ " \"repeatedInt32\": null,\n"
|
||||
+ " \"repeatedInt64\": null,\n"
|
||||
+ " \"repeatedUint32\": null,\n"
|
||||
+ " \"repeatedUint64\": null,\n"
|
||||
+ " \"repeatedSint32\": null,\n"
|
||||
+ " \"repeatedSint64\": null,\n"
|
||||
+ " \"repeatedFixed32\": null,\n"
|
||||
+ " \"repeatedFixed64\": null,\n"
|
||||
+ " \"repeatedSfixed32\": null,\n"
|
||||
+ " \"repeatedSfixed64\": null,\n"
|
||||
+ " \"repeatedFloat\": null,\n"
|
||||
+ " \"repeatedDouble\": null,\n"
|
||||
+ " \"repeatedBool\": null,\n"
|
||||
+ " \"repeatedString\": null,\n"
|
||||
+ " \"repeatedBytes\": null,\n"
|
||||
+ " \"repeatedNestedMessage\": null,\n"
|
||||
+ " \"repeatedNestedEnum\": null\n"
|
||||
+ "}", builder);
|
||||
TestAllTypes message = builder.build();
|
||||
assertEquals(TestAllTypes.getDefaultInstance(), message);
|
||||
|
||||
// Repeated field elements can also be null.
|
||||
builder = TestAllTypes.newBuilder();
|
||||
mergeFromJson(
|
||||
"{\n"
|
||||
+ " \"repeatedInt32\": [null, null],\n"
|
||||
+ " \"repeatedInt64\": [null, null],\n"
|
||||
+ " \"repeatedUint32\": [null, null],\n"
|
||||
+ " \"repeatedUint64\": [null, null],\n"
|
||||
+ " \"repeatedSint32\": [null, null],\n"
|
||||
+ " \"repeatedSint64\": [null, null],\n"
|
||||
+ " \"repeatedFixed32\": [null, null],\n"
|
||||
+ " \"repeatedFixed64\": [null, null],\n"
|
||||
+ " \"repeatedSfixed32\": [null, null],\n"
|
||||
+ " \"repeatedSfixed64\": [null, null],\n"
|
||||
+ " \"repeatedFloat\": [null, null],\n"
|
||||
+ " \"repeatedDouble\": [null, null],\n"
|
||||
+ " \"repeatedBool\": [null, null],\n"
|
||||
+ " \"repeatedString\": [null, null],\n"
|
||||
+ " \"repeatedBytes\": [null, null],\n"
|
||||
+ " \"repeatedNestedMessage\": [null, null],\n"
|
||||
+ " \"repeatedNestedEnum\": [null, null]\n"
|
||||
+ "}", builder);
|
||||
message = builder.build();
|
||||
// "null" elements will be parsed to default values.
|
||||
assertEquals(2, message.getRepeatedInt32Count());
|
||||
assertEquals(0, message.getRepeatedInt32(0));
|
||||
assertEquals(0, message.getRepeatedInt32(1));
|
||||
assertEquals(2, message.getRepeatedInt32Count());
|
||||
assertEquals(0, message.getRepeatedInt32(0));
|
||||
assertEquals(0, message.getRepeatedInt32(1));
|
||||
assertEquals(2, message.getRepeatedInt64Count());
|
||||
assertEquals(0, message.getRepeatedInt64(0));
|
||||
assertEquals(0, message.getRepeatedInt64(1));
|
||||
assertEquals(2, message.getRepeatedUint32Count());
|
||||
assertEquals(0, message.getRepeatedUint32(0));
|
||||
assertEquals(0, message.getRepeatedUint32(1));
|
||||
assertEquals(2, message.getRepeatedUint64Count());
|
||||
assertEquals(0, message.getRepeatedUint64(0));
|
||||
assertEquals(0, message.getRepeatedUint64(1));
|
||||
assertEquals(2, message.getRepeatedSint32Count());
|
||||
assertEquals(0, message.getRepeatedSint32(0));
|
||||
assertEquals(0, message.getRepeatedSint32(1));
|
||||
assertEquals(2, message.getRepeatedSint64Count());
|
||||
assertEquals(0, message.getRepeatedSint64(0));
|
||||
assertEquals(0, message.getRepeatedSint64(1));
|
||||
assertEquals(2, message.getRepeatedFixed32Count());
|
||||
assertEquals(0, message.getRepeatedFixed32(0));
|
||||
assertEquals(0, message.getRepeatedFixed32(1));
|
||||
assertEquals(2, message.getRepeatedFixed64Count());
|
||||
assertEquals(0, message.getRepeatedFixed64(0));
|
||||
assertEquals(0, message.getRepeatedFixed64(1));
|
||||
assertEquals(2, message.getRepeatedSfixed32Count());
|
||||
assertEquals(0, message.getRepeatedSfixed32(0));
|
||||
assertEquals(0, message.getRepeatedSfixed32(1));
|
||||
assertEquals(2, message.getRepeatedSfixed64Count());
|
||||
assertEquals(0, message.getRepeatedSfixed64(0));
|
||||
assertEquals(0, message.getRepeatedSfixed64(1));
|
||||
assertEquals(2, message.getRepeatedFloatCount());
|
||||
assertEquals(0f, message.getRepeatedFloat(0));
|
||||
assertEquals(0f, message.getRepeatedFloat(1));
|
||||
assertEquals(2, message.getRepeatedDoubleCount());
|
||||
assertEquals(0.0, message.getRepeatedDouble(0));
|
||||
assertEquals(0.0, message.getRepeatedDouble(1));
|
||||
assertEquals(2, message.getRepeatedBoolCount());
|
||||
assertFalse(message.getRepeatedBool(0));
|
||||
assertFalse(message.getRepeatedBool(1));
|
||||
assertEquals(2, message.getRepeatedStringCount());
|
||||
assertTrue(message.getRepeatedString(0).isEmpty());
|
||||
assertTrue(message.getRepeatedString(1).isEmpty());
|
||||
assertEquals(2, message.getRepeatedBytesCount());
|
||||
assertTrue(message.getRepeatedBytes(0).isEmpty());
|
||||
assertTrue(message.getRepeatedBytes(1).isEmpty());
|
||||
assertEquals(2, message.getRepeatedNestedMessageCount());
|
||||
assertEquals(NestedMessage.getDefaultInstance(), message.getRepeatedNestedMessage(0));
|
||||
assertEquals(NestedMessage.getDefaultInstance(), message.getRepeatedNestedMessage(1));
|
||||
assertEquals(2, message.getRepeatedNestedEnumCount());
|
||||
assertEquals(0, message.getRepeatedNestedEnumValue(0));
|
||||
assertEquals(0, message.getRepeatedNestedEnumValue(1));
|
||||
}
|
||||
|
||||
public void testMapFields() throws Exception {
|
||||
TestMap.Builder builder = TestMap.newBuilder();
|
||||
builder.getMutableInt32ToInt32Map().put(1, 10);
|
||||
builder.getMutableInt64ToInt32Map().put(1234567890123456789L, 10);
|
||||
builder.getMutableUint32ToInt32Map().put(2, 20);
|
||||
builder.getMutableUint64ToInt32Map().put(2234567890123456789L, 20);
|
||||
builder.getMutableSint32ToInt32Map().put(3, 30);
|
||||
builder.getMutableSint64ToInt32Map().put(3234567890123456789L, 30);
|
||||
builder.getMutableFixed32ToInt32Map().put(4, 40);
|
||||
builder.getMutableFixed64ToInt32Map().put(4234567890123456789L, 40);
|
||||
builder.getMutableSfixed32ToInt32Map().put(5, 50);
|
||||
builder.getMutableSfixed64ToInt32Map().put(5234567890123456789L, 50);
|
||||
builder.getMutableBoolToInt32Map().put(false, 6);
|
||||
builder.getMutableStringToInt32Map().put("Hello", 10);
|
||||
|
||||
builder.getMutableInt32ToInt64Map().put(1, 1234567890123456789L);
|
||||
builder.getMutableInt32ToUint32Map().put(2, 20);
|
||||
builder.getMutableInt32ToUint64Map().put(2, 2234567890123456789L);
|
||||
builder.getMutableInt32ToSint32Map().put(3, 30);
|
||||
builder.getMutableInt32ToSint64Map().put(3, 3234567890123456789L);
|
||||
builder.getMutableInt32ToFixed32Map().put(4, 40);
|
||||
builder.getMutableInt32ToFixed64Map().put(4, 4234567890123456789L);
|
||||
builder.getMutableInt32ToSfixed32Map().put(5, 50);
|
||||
builder.getMutableInt32ToSfixed64Map().put(5, 5234567890123456789L);
|
||||
builder.getMutableInt32ToFloatMap().put(6, 1.5f);
|
||||
builder.getMutableInt32ToDoubleMap().put(6, 1.25);
|
||||
builder.getMutableInt32ToBoolMap().put(7, false);
|
||||
builder.getMutableInt32ToStringMap().put(7, "World");
|
||||
builder.getMutableInt32ToBytesMap().put(
|
||||
8, ByteString.copyFrom(new byte[]{1, 2, 3}));
|
||||
builder.getMutableInt32ToMessageMap().put(
|
||||
8, NestedMessage.newBuilder().setValue(1234).build());
|
||||
builder.getMutableInt32ToEnumMap().put(9, NestedEnum.BAR);
|
||||
TestMap message = builder.build();
|
||||
|
||||
assertEquals(
|
||||
"{\n"
|
||||
+ " \"int32ToInt32Map\": {\n"
|
||||
+ " \"1\": 10\n"
|
||||
+ " },\n"
|
||||
+ " \"int64ToInt32Map\": {\n"
|
||||
+ " \"1234567890123456789\": 10\n"
|
||||
+ " },\n"
|
||||
+ " \"uint32ToInt32Map\": {\n"
|
||||
+ " \"2\": 20\n"
|
||||
+ " },\n"
|
||||
+ " \"uint64ToInt32Map\": {\n"
|
||||
+ " \"2234567890123456789\": 20\n"
|
||||
+ " },\n"
|
||||
+ " \"sint32ToInt32Map\": {\n"
|
||||
+ " \"3\": 30\n"
|
||||
+ " },\n"
|
||||
+ " \"sint64ToInt32Map\": {\n"
|
||||
+ " \"3234567890123456789\": 30\n"
|
||||
+ " },\n"
|
||||
+ " \"fixed32ToInt32Map\": {\n"
|
||||
+ " \"4\": 40\n"
|
||||
+ " },\n"
|
||||
+ " \"fixed64ToInt32Map\": {\n"
|
||||
+ " \"4234567890123456789\": 40\n"
|
||||
+ " },\n"
|
||||
+ " \"sfixed32ToInt32Map\": {\n"
|
||||
+ " \"5\": 50\n"
|
||||
+ " },\n"
|
||||
+ " \"sfixed64ToInt32Map\": {\n"
|
||||
+ " \"5234567890123456789\": 50\n"
|
||||
+ " },\n"
|
||||
+ " \"boolToInt32Map\": {\n"
|
||||
+ " \"false\": 6\n"
|
||||
+ " },\n"
|
||||
+ " \"stringToInt32Map\": {\n"
|
||||
+ " \"Hello\": 10\n"
|
||||
+ " },\n"
|
||||
+ " \"int32ToInt64Map\": {\n"
|
||||
+ " \"1\": \"1234567890123456789\"\n"
|
||||
+ " },\n"
|
||||
+ " \"int32ToUint32Map\": {\n"
|
||||
+ " \"2\": 20\n"
|
||||
+ " },\n"
|
||||
+ " \"int32ToUint64Map\": {\n"
|
||||
+ " \"2\": \"2234567890123456789\"\n"
|
||||
+ " },\n"
|
||||
+ " \"int32ToSint32Map\": {\n"
|
||||
+ " \"3\": 30\n"
|
||||
+ " },\n"
|
||||
+ " \"int32ToSint64Map\": {\n"
|
||||
+ " \"3\": \"3234567890123456789\"\n"
|
||||
+ " },\n"
|
||||
+ " \"int32ToFixed32Map\": {\n"
|
||||
+ " \"4\": 40\n"
|
||||
+ " },\n"
|
||||
+ " \"int32ToFixed64Map\": {\n"
|
||||
+ " \"4\": \"4234567890123456789\"\n"
|
||||
+ " },\n"
|
||||
+ " \"int32ToSfixed32Map\": {\n"
|
||||
+ " \"5\": 50\n"
|
||||
+ " },\n"
|
||||
+ " \"int32ToSfixed64Map\": {\n"
|
||||
+ " \"5\": \"5234567890123456789\"\n"
|
||||
+ " },\n"
|
||||
+ " \"int32ToFloatMap\": {\n"
|
||||
+ " \"6\": 1.5\n"
|
||||
+ " },\n"
|
||||
+ " \"int32ToDoubleMap\": {\n"
|
||||
+ " \"6\": 1.25\n"
|
||||
+ " },\n"
|
||||
+ " \"int32ToBoolMap\": {\n"
|
||||
+ " \"7\": false\n"
|
||||
+ " },\n"
|
||||
+ " \"int32ToStringMap\": {\n"
|
||||
+ " \"7\": \"World\"\n"
|
||||
+ " },\n"
|
||||
+ " \"int32ToBytesMap\": {\n"
|
||||
+ " \"8\": \"AQID\"\n"
|
||||
+ " },\n"
|
||||
+ " \"int32ToMessageMap\": {\n"
|
||||
+ " \"8\": {\n"
|
||||
+ " \"value\": 1234\n"
|
||||
+ " }\n"
|
||||
+ " },\n"
|
||||
+ " \"int32ToEnumMap\": {\n"
|
||||
+ " \"9\": \"BAR\"\n"
|
||||
+ " }\n"
|
||||
+ "}", toJsonString(message));
|
||||
assertRoundTripEquals(message);
|
||||
|
||||
// Test multiple entries.
|
||||
builder = TestMap.newBuilder();
|
||||
builder.getMutableInt32ToInt32Map().put(1, 2);
|
||||
builder.getMutableInt32ToInt32Map().put(3, 4);
|
||||
message = builder.build();
|
||||
|
||||
assertEquals(
|
||||
"{\n"
|
||||
+ " \"int32ToInt32Map\": {\n"
|
||||
+ " \"1\": 2,\n"
|
||||
+ " \"3\": 4\n"
|
||||
+ " }\n"
|
||||
+ "}", toJsonString(message));
|
||||
assertRoundTripEquals(message);
|
||||
}
|
||||
|
||||
public void testMapNullValueIsDefault() throws Exception {
|
||||
TestMap.Builder builder = TestMap.newBuilder();
|
||||
mergeFromJson(
|
||||
"{\n"
|
||||
+ " \"int32ToInt32Map\": {\"1\": null},\n"
|
||||
+ " \"int32ToMessageMap\": {\"2\": null}\n"
|
||||
+ "}", builder);
|
||||
TestMap message = builder.build();
|
||||
assertTrue(message.getInt32ToInt32Map().containsKey(1));
|
||||
assertEquals(0, message.getInt32ToInt32Map().get(1).intValue());
|
||||
assertTrue(message.getInt32ToMessageMap().containsKey(2));
|
||||
assertEquals(0, message.getInt32ToMessageMap().get(2).getValue());
|
||||
}
|
||||
|
||||
public void testParserAcceptNonQuotedObjectKey() throws Exception {
|
||||
TestMap.Builder builder = TestMap.newBuilder();
|
||||
mergeFromJson(
|
||||
"{\n"
|
||||
+ " int32ToInt32Map: {1: 2},\n"
|
||||
+ " stringToInt32Map: {hello: 3}\n"
|
||||
+ "}", builder);
|
||||
TestMap message = builder.build();
|
||||
assertEquals(2, message.getInt32ToInt32Map().get(1).intValue());
|
||||
assertEquals(3, message.getStringToInt32Map().get("hello").intValue());
|
||||
}
|
||||
|
||||
public void testWrappers() throws Exception {
|
||||
TestWrappers.Builder builder = TestWrappers.newBuilder();
|
||||
builder.getBoolValueBuilder().setValue(false);
|
||||
builder.getInt32ValueBuilder().setValue(0);
|
||||
builder.getInt64ValueBuilder().setValue(0);
|
||||
builder.getUint32ValueBuilder().setValue(0);
|
||||
builder.getUint64ValueBuilder().setValue(0);
|
||||
builder.getFloatValueBuilder().setValue(0.0f);
|
||||
builder.getDoubleValueBuilder().setValue(0.0);
|
||||
builder.getStringValueBuilder().setValue("");
|
||||
builder.getBytesValueBuilder().setValue(ByteString.EMPTY);
|
||||
TestWrappers message = builder.build();
|
||||
|
||||
assertEquals(
|
||||
"{\n"
|
||||
+ " \"int32Value\": 0,\n"
|
||||
+ " \"uint32Value\": 0,\n"
|
||||
+ " \"int64Value\": \"0\",\n"
|
||||
+ " \"uint64Value\": \"0\",\n"
|
||||
+ " \"floatValue\": 0.0,\n"
|
||||
+ " \"doubleValue\": 0.0,\n"
|
||||
+ " \"boolValue\": false,\n"
|
||||
+ " \"stringValue\": \"\",\n"
|
||||
+ " \"bytesValue\": \"\"\n"
|
||||
+ "}", toJsonString(message));
|
||||
assertRoundTripEquals(message);
|
||||
|
||||
builder = TestWrappers.newBuilder();
|
||||
builder.getBoolValueBuilder().setValue(true);
|
||||
builder.getInt32ValueBuilder().setValue(1);
|
||||
builder.getInt64ValueBuilder().setValue(2);
|
||||
builder.getUint32ValueBuilder().setValue(3);
|
||||
builder.getUint64ValueBuilder().setValue(4);
|
||||
builder.getFloatValueBuilder().setValue(5.0f);
|
||||
builder.getDoubleValueBuilder().setValue(6.0);
|
||||
builder.getStringValueBuilder().setValue("7");
|
||||
builder.getBytesValueBuilder().setValue(ByteString.copyFrom(new byte[]{8}));
|
||||
message = builder.build();
|
||||
|
||||
assertEquals(
|
||||
"{\n"
|
||||
+ " \"int32Value\": 1,\n"
|
||||
+ " \"uint32Value\": 3,\n"
|
||||
+ " \"int64Value\": \"2\",\n"
|
||||
+ " \"uint64Value\": \"4\",\n"
|
||||
+ " \"floatValue\": 5.0,\n"
|
||||
+ " \"doubleValue\": 6.0,\n"
|
||||
+ " \"boolValue\": true,\n"
|
||||
+ " \"stringValue\": \"7\",\n"
|
||||
+ " \"bytesValue\": \"CA==\"\n"
|
||||
+ "}", toJsonString(message));
|
||||
assertRoundTripEquals(message);
|
||||
}
|
||||
|
||||
public void testTimestamp() throws Exception {
|
||||
TestTimestamp message = TestTimestamp.newBuilder()
|
||||
.setTimestampValue(TimeUtil.parseTimestamp("1970-01-01T00:00:00Z"))
|
||||
.build();
|
||||
|
||||
assertEquals(
|
||||
"{\n"
|
||||
+ " \"timestampValue\": \"1970-01-01T00:00:00Z\"\n"
|
||||
+ "}", toJsonString(message));
|
||||
assertRoundTripEquals(message);
|
||||
}
|
||||
|
||||
public void testDuration() throws Exception {
|
||||
TestDuration message = TestDuration.newBuilder()
|
||||
.setDurationValue(TimeUtil.parseDuration("12345s"))
|
||||
.build();
|
||||
|
||||
assertEquals(
|
||||
"{\n"
|
||||
+ " \"durationValue\": \"12345s\"\n"
|
||||
+ "}", toJsonString(message));
|
||||
assertRoundTripEquals(message);
|
||||
}
|
||||
|
||||
public void testFieldMask() throws Exception {
|
||||
TestFieldMask message = TestFieldMask.newBuilder()
|
||||
.setFieldMaskValue(FieldMaskUtil.fromString("foo.bar,baz"))
|
||||
.build();
|
||||
|
||||
assertEquals(
|
||||
"{\n"
|
||||
+ " \"fieldMaskValue\": \"foo.bar,baz\"\n"
|
||||
+ "}", toJsonString(message));
|
||||
assertRoundTripEquals(message);
|
||||
}
|
||||
|
||||
public void testStruct() throws Exception {
|
||||
// Build a struct with all possible values.
|
||||
TestStruct.Builder builder = TestStruct.newBuilder();
|
||||
Struct.Builder structBuilder = builder.getStructValueBuilder();
|
||||
structBuilder.getMutableFields().put(
|
||||
"null_value", Value.newBuilder().setNullValueValue(0).build());
|
||||
structBuilder.getMutableFields().put(
|
||||
"number_value", Value.newBuilder().setNumberValue(1.25).build());
|
||||
structBuilder.getMutableFields().put(
|
||||
"string_value", Value.newBuilder().setStringValue("hello").build());
|
||||
Struct.Builder subStructBuilder = Struct.newBuilder();
|
||||
subStructBuilder.getMutableFields().put(
|
||||
"number_value", Value.newBuilder().setNumberValue(1234).build());
|
||||
structBuilder.getMutableFields().put(
|
||||
"struct_value", Value.newBuilder().setStructValue(subStructBuilder.build()).build());
|
||||
ListValue.Builder listBuilder = ListValue.newBuilder();
|
||||
listBuilder.addValues(Value.newBuilder().setNumberValue(1.125).build());
|
||||
listBuilder.addValues(Value.newBuilder().setNullValueValue(0).build());
|
||||
structBuilder.getMutableFields().put(
|
||||
"list_value", Value.newBuilder().setListValue(listBuilder.build()).build());
|
||||
TestStruct message = builder.build();
|
||||
|
||||
assertEquals(
|
||||
"{\n"
|
||||
+ " \"structValue\": {\n"
|
||||
+ " \"null_value\": null,\n"
|
||||
+ " \"number_value\": 1.25,\n"
|
||||
+ " \"string_value\": \"hello\",\n"
|
||||
+ " \"struct_value\": {\n"
|
||||
+ " \"number_value\": 1234.0\n"
|
||||
+ " },\n"
|
||||
+ " \"list_value\": [1.125, null]\n"
|
||||
+ " }\n"
|
||||
+ "}", toJsonString(message));
|
||||
assertRoundTripEquals(message);
|
||||
}
|
||||
|
||||
public void testAnyFields() throws Exception {
|
||||
TestAllTypes content = TestAllTypes.newBuilder().setOptionalInt32(1234).build();
|
||||
TestAny message = TestAny.newBuilder().setAnyValue(Any.pack(content)).build();
|
||||
|
||||
// A TypeRegistry must be provided in order to convert Any types.
|
||||
try {
|
||||
toJsonString(message);
|
||||
fail("Exception is expected.");
|
||||
} catch (IOException e) {
|
||||
// Expected.
|
||||
}
|
||||
|
||||
JsonFormat.TypeRegistry registry = JsonFormat.TypeRegistry.newBuilder()
|
||||
.add(TestAllTypes.getDescriptor()).build();
|
||||
JsonFormat.Printer printer = JsonFormat.printer().usingTypeRegistry(registry);
|
||||
|
||||
assertEquals(
|
||||
"{\n"
|
||||
+ " \"anyValue\": {\n"
|
||||
+ " \"@type\": \"type.googleapis.com/json_test.TestAllTypes\",\n"
|
||||
+ " \"optionalInt32\": 1234\n"
|
||||
+ " }\n"
|
||||
+ "}" , printer.print(message));
|
||||
assertRoundTripEquals(message, registry);
|
||||
|
||||
|
||||
// Well-known types have a special formatting when embedded in Any.
|
||||
//
|
||||
// 1. Any in Any.
|
||||
Any anyMessage = Any.pack(Any.pack(content));
|
||||
assertEquals(
|
||||
"{\n"
|
||||
+ " \"@type\": \"type.googleapis.com/google.protobuf.Any\",\n"
|
||||
+ " \"value\": {\n"
|
||||
+ " \"@type\": \"type.googleapis.com/json_test.TestAllTypes\",\n"
|
||||
+ " \"optionalInt32\": 1234\n"
|
||||
+ " }\n"
|
||||
+ "}", printer.print(anyMessage));
|
||||
assertRoundTripEquals(anyMessage, registry);
|
||||
|
||||
// 2. Wrappers in Any.
|
||||
anyMessage = Any.pack(Int32Value.newBuilder().setValue(12345).build());
|
||||
assertEquals(
|
||||
"{\n"
|
||||
+ " \"@type\": \"type.googleapis.com/google.protobuf.Int32Value\",\n"
|
||||
+ " \"value\": 12345\n"
|
||||
+ "}", printer.print(anyMessage));
|
||||
assertRoundTripEquals(anyMessage, registry);
|
||||
anyMessage = Any.pack(UInt32Value.newBuilder().setValue(12345).build());
|
||||
assertEquals(
|
||||
"{\n"
|
||||
+ " \"@type\": \"type.googleapis.com/google.protobuf.UInt32Value\",\n"
|
||||
+ " \"value\": 12345\n"
|
||||
+ "}", printer.print(anyMessage));
|
||||
assertRoundTripEquals(anyMessage, registry);
|
||||
anyMessage = Any.pack(Int64Value.newBuilder().setValue(12345).build());
|
||||
assertEquals(
|
||||
"{\n"
|
||||
+ " \"@type\": \"type.googleapis.com/google.protobuf.Int64Value\",\n"
|
||||
+ " \"value\": \"12345\"\n"
|
||||
+ "}", printer.print(anyMessage));
|
||||
assertRoundTripEquals(anyMessage, registry);
|
||||
anyMessage = Any.pack(UInt64Value.newBuilder().setValue(12345).build());
|
||||
assertEquals(
|
||||
"{\n"
|
||||
+ " \"@type\": \"type.googleapis.com/google.protobuf.UInt64Value\",\n"
|
||||
+ " \"value\": \"12345\"\n"
|
||||
+ "}", printer.print(anyMessage));
|
||||
assertRoundTripEquals(anyMessage, registry);
|
||||
anyMessage = Any.pack(FloatValue.newBuilder().setValue(12345).build());
|
||||
assertEquals(
|
||||
"{\n"
|
||||
+ " \"@type\": \"type.googleapis.com/google.protobuf.FloatValue\",\n"
|
||||
+ " \"value\": 12345.0\n"
|
||||
+ "}", printer.print(anyMessage));
|
||||
assertRoundTripEquals(anyMessage, registry);
|
||||
anyMessage = Any.pack(DoubleValue.newBuilder().setValue(12345).build());
|
||||
assertEquals(
|
||||
"{\n"
|
||||
+ " \"@type\": \"type.googleapis.com/google.protobuf.DoubleValue\",\n"
|
||||
+ " \"value\": 12345.0\n"
|
||||
+ "}", printer.print(anyMessage));
|
||||
assertRoundTripEquals(anyMessage, registry);
|
||||
anyMessage = Any.pack(BoolValue.newBuilder().setValue(true).build());
|
||||
assertEquals(
|
||||
"{\n"
|
||||
+ " \"@type\": \"type.googleapis.com/google.protobuf.BoolValue\",\n"
|
||||
+ " \"value\": true\n"
|
||||
+ "}", printer.print(anyMessage));
|
||||
assertRoundTripEquals(anyMessage, registry);
|
||||
anyMessage = Any.pack(StringValue.newBuilder().setValue("Hello").build());
|
||||
assertEquals(
|
||||
"{\n"
|
||||
+ " \"@type\": \"type.googleapis.com/google.protobuf.StringValue\",\n"
|
||||
+ " \"value\": \"Hello\"\n"
|
||||
+ "}", printer.print(anyMessage));
|
||||
assertRoundTripEquals(anyMessage, registry);
|
||||
anyMessage = Any.pack(BytesValue.newBuilder().setValue(
|
||||
ByteString.copyFrom(new byte[]{1, 2})).build());
|
||||
assertEquals(
|
||||
"{\n"
|
||||
+ " \"@type\": \"type.googleapis.com/google.protobuf.BytesValue\",\n"
|
||||
+ " \"value\": \"AQI=\"\n"
|
||||
+ "}", printer.print(anyMessage));
|
||||
assertRoundTripEquals(anyMessage, registry);
|
||||
|
||||
// 3. Timestamp in Any.
|
||||
anyMessage = Any.pack(TimeUtil.parseTimestamp("1969-12-31T23:59:59Z"));
|
||||
assertEquals(
|
||||
"{\n"
|
||||
+ " \"@type\": \"type.googleapis.com/google.protobuf.Timestamp\",\n"
|
||||
+ " \"value\": \"1969-12-31T23:59:59Z\"\n"
|
||||
+ "}", printer.print(anyMessage));
|
||||
assertRoundTripEquals(anyMessage, registry);
|
||||
|
||||
// 4. Duration in Any
|
||||
anyMessage = Any.pack(TimeUtil.parseDuration("12345.10s"));
|
||||
assertEquals(
|
||||
"{\n"
|
||||
+ " \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n"
|
||||
+ " \"value\": \"12345.100s\"\n"
|
||||
+ "}", printer.print(anyMessage));
|
||||
assertRoundTripEquals(anyMessage, registry);
|
||||
|
||||
// 5. FieldMask in Any
|
||||
anyMessage = Any.pack(FieldMaskUtil.fromString("foo.bar,baz"));
|
||||
assertEquals(
|
||||
"{\n"
|
||||
+ " \"@type\": \"type.googleapis.com/google.protobuf.FieldMask\",\n"
|
||||
+ " \"value\": \"foo.bar,baz\"\n"
|
||||
+ "}", printer.print(anyMessage));
|
||||
assertRoundTripEquals(anyMessage, registry);
|
||||
|
||||
// 6. Struct in Any
|
||||
Struct.Builder structBuilder = Struct.newBuilder();
|
||||
structBuilder.getMutableFields().put(
|
||||
"number", Value.newBuilder().setNumberValue(1.125).build());
|
||||
anyMessage = Any.pack(structBuilder.build());
|
||||
assertEquals(
|
||||
"{\n"
|
||||
+ " \"@type\": \"type.googleapis.com/google.protobuf.Struct\",\n"
|
||||
+ " \"value\": {\n"
|
||||
+ " \"number\": 1.125\n"
|
||||
+ " }\n"
|
||||
+ "}", printer.print(anyMessage));
|
||||
assertRoundTripEquals(anyMessage, registry);
|
||||
}
|
||||
|
||||
public void testParserMissingTypeUrl() throws Exception {
|
||||
try {
|
||||
Any.Builder builder = Any.newBuilder();
|
||||
mergeFromJson(
|
||||
"{\n"
|
||||
+ " \"optionalInt32\": 1234\n"
|
||||
+ "}", builder);
|
||||
fail("Exception is expected.");
|
||||
} catch (IOException e) {
|
||||
// Expected.
|
||||
}
|
||||
}
|
||||
|
||||
public void testParserUnexpectedTypeUrl() throws Exception {
|
||||
try {
|
||||
TestAllTypes.Builder builder = TestAllTypes.newBuilder();
|
||||
mergeFromJson(
|
||||
"{\n"
|
||||
+ " \"@type\": \"type.googleapis.com/json_test.TestAllTypes\",\n"
|
||||
+ " \"optionalInt32\": 12345\n"
|
||||
+ "}", builder);
|
||||
fail("Exception is expected.");
|
||||
} catch (IOException e) {
|
||||
// Expected.
|
||||
}
|
||||
}
|
||||
|
||||
public void testParserRejectTrailingComma() throws Exception {
|
||||
try {
|
||||
TestAllTypes.Builder builder = TestAllTypes.newBuilder();
|
||||
mergeFromJson(
|
||||
"{\n"
|
||||
+ " \"optionalInt32\": 12345,\n"
|
||||
+ "}", builder);
|
||||
fail("Exception is expected.");
|
||||
} catch (IOException e) {
|
||||
// Expected.
|
||||
}
|
||||
|
||||
// TODO(xiaofeng): GSON allows trailing comma in arrays even after I set
|
||||
// the JsonReader to non-lenient mode. If we want to enforce strict JSON
|
||||
// compliance, we might want to switch to a different JSON parser or
|
||||
// implement one by ourselves.
|
||||
// try {
|
||||
// TestAllTypes.Builder builder = TestAllTypes.newBuilder();
|
||||
// JsonFormat.merge(
|
||||
// "{\n"
|
||||
// + " \"repeatedInt32\": [12345,]\n"
|
||||
// + "}", builder);
|
||||
// fail("Exception is expected.");
|
||||
// } catch (IOException e) {
|
||||
// // Expected.
|
||||
// }
|
||||
}
|
||||
|
||||
public void testParserRejectInvalidBase64() throws Exception {
|
||||
try {
|
||||
TestAllTypes.Builder builder = TestAllTypes.newBuilder();
|
||||
mergeFromJson(
|
||||
"{\n"
|
||||
+ " \"optionalBytes\": \"!@#$\"\n"
|
||||
+ "}", builder);
|
||||
fail("Exception is expected.");
|
||||
} catch (InvalidProtocolBufferException e) {
|
||||
// Expected.
|
||||
}
|
||||
}
|
||||
|
||||
public void testParserRejectInvalidEnumValue() throws Exception {
|
||||
try {
|
||||
TestAllTypes.Builder builder = TestAllTypes.newBuilder();
|
||||
mergeFromJson(
|
||||
"{\n"
|
||||
+ " \"optionalNestedEnum\": \"XXX\"\n"
|
||||
+ "}", builder);
|
||||
fail("Exception is expected.");
|
||||
} catch (InvalidProtocolBufferException e) {
|
||||
// Expected.
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,439 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package com.google.protobuf.util;
|
||||
|
||||
import com.google.protobuf.Duration;
|
||||
import com.google.protobuf.Timestamp;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.junit.Assert;
|
||||
|
||||
import java.text.ParseException;
|
||||
|
||||
/** Unit tests for {@link TimeUtil}. */
|
||||
public class TimeUtilTest extends TestCase {
|
||||
public void testTimestampStringFormat() throws Exception {
|
||||
Timestamp start = TimeUtil.parseTimestamp("0001-01-01T00:00:00Z");
|
||||
Timestamp end = TimeUtil.parseTimestamp("9999-12-31T23:59:59.999999999Z");
|
||||
assertEquals(TimeUtil.TIMESTAMP_SECONDS_MIN, start.getSeconds());
|
||||
assertEquals(0, start.getNanos());
|
||||
assertEquals(TimeUtil.TIMESTAMP_SECONDS_MAX, end.getSeconds());
|
||||
assertEquals(999999999, end.getNanos());
|
||||
assertEquals("0001-01-01T00:00:00Z", TimeUtil.toString(start));
|
||||
assertEquals("9999-12-31T23:59:59.999999999Z", TimeUtil.toString(end));
|
||||
|
||||
Timestamp value = TimeUtil.parseTimestamp("1970-01-01T00:00:00Z");
|
||||
assertEquals(0, value.getSeconds());
|
||||
assertEquals(0, value.getNanos());
|
||||
|
||||
// Test negative timestamps.
|
||||
value = TimeUtil.parseTimestamp("1969-12-31T23:59:59.999Z");
|
||||
assertEquals(-1, value.getSeconds());
|
||||
// Nano part is in the range of [0, 999999999] for Timestamp.
|
||||
assertEquals(999000000, value.getNanos());
|
||||
|
||||
// Test that 3, 6, or 9 digits are used for the fractional part.
|
||||
value = Timestamp.newBuilder().setNanos(10).build();
|
||||
assertEquals("1970-01-01T00:00:00.000000010Z", TimeUtil.toString(value));
|
||||
value = Timestamp.newBuilder().setNanos(10000).build();
|
||||
assertEquals("1970-01-01T00:00:00.000010Z", TimeUtil.toString(value));
|
||||
value = Timestamp.newBuilder().setNanos(10000000).build();
|
||||
assertEquals("1970-01-01T00:00:00.010Z", TimeUtil.toString(value));
|
||||
|
||||
// Test that parsing accepts timezone offsets.
|
||||
value = TimeUtil.parseTimestamp("1970-01-01T00:00:00.010+08:00");
|
||||
assertEquals("1969-12-31T16:00:00.010Z", TimeUtil.toString(value));
|
||||
value = TimeUtil.parseTimestamp("1970-01-01T00:00:00.010-08:00");
|
||||
assertEquals("1970-01-01T08:00:00.010Z", TimeUtil.toString(value));
|
||||
}
|
||||
|
||||
public void testTimetampInvalidFormat() throws Exception {
|
||||
try {
|
||||
// Value too small.
|
||||
Timestamp value = Timestamp.newBuilder()
|
||||
.setSeconds(TimeUtil.TIMESTAMP_SECONDS_MIN - 1).build();
|
||||
TimeUtil.toString(value);
|
||||
Assert.fail("Exception is expected.");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// Expected.
|
||||
}
|
||||
|
||||
try {
|
||||
// Value too large.
|
||||
Timestamp value = Timestamp.newBuilder()
|
||||
.setSeconds(TimeUtil.TIMESTAMP_SECONDS_MAX + 1).build();
|
||||
TimeUtil.toString(value);
|
||||
Assert.fail("Exception is expected.");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// Expected.
|
||||
}
|
||||
|
||||
try {
|
||||
// Invalid nanos value.
|
||||
Timestamp value = Timestamp.newBuilder().setNanos(-1).build();
|
||||
TimeUtil.toString(value);
|
||||
Assert.fail("Exception is expected.");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// Expected.
|
||||
}
|
||||
|
||||
try {
|
||||
// Invalid nanos value.
|
||||
Timestamp value = Timestamp.newBuilder().setNanos(1000000000).build();
|
||||
TimeUtil.toString(value);
|
||||
Assert.fail("Exception is expected.");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// Expected.
|
||||
}
|
||||
|
||||
try {
|
||||
// Value to small.
|
||||
TimeUtil.parseTimestamp("0000-01-01T00:00:00Z");
|
||||
Assert.fail("Exception is expected.");
|
||||
} catch (ParseException e) {
|
||||
// Expected.
|
||||
}
|
||||
|
||||
try {
|
||||
// Value to large.
|
||||
TimeUtil.parseTimestamp("10000-01-01T00:00:00Z");
|
||||
Assert.fail("Exception is expected.");
|
||||
} catch (ParseException e) {
|
||||
// Expected.
|
||||
}
|
||||
|
||||
try {
|
||||
// Missing 'T'.
|
||||
TimeUtil.parseTimestamp("1970-01-01 00:00:00Z");
|
||||
Assert.fail("Exception is expected.");
|
||||
} catch (ParseException e) {
|
||||
// Expected.
|
||||
}
|
||||
|
||||
try {
|
||||
// Missing 'Z'.
|
||||
TimeUtil.parseTimestamp("1970-01-01T00:00:00");
|
||||
Assert.fail("Exception is expected.");
|
||||
} catch (ParseException e) {
|
||||
// Expected.
|
||||
}
|
||||
|
||||
try {
|
||||
// Invalid offset.
|
||||
TimeUtil.parseTimestamp("1970-01-01T00:00:00+0000");
|
||||
Assert.fail("Exception is expected.");
|
||||
} catch (ParseException e) {
|
||||
// Expected.
|
||||
}
|
||||
|
||||
try {
|
||||
// Trailing text.
|
||||
TimeUtil.parseTimestamp("1970-01-01T00:00:00Z0");
|
||||
Assert.fail("Exception is expected.");
|
||||
} catch (ParseException e) {
|
||||
// Expected.
|
||||
}
|
||||
|
||||
try {
|
||||
// Invalid nanosecond value.
|
||||
TimeUtil.parseTimestamp("1970-01-01T00:00:00.ABCZ");
|
||||
Assert.fail("Exception is expected.");
|
||||
} catch (ParseException e) {
|
||||
// Expected.
|
||||
}
|
||||
}
|
||||
|
||||
public void testDurationStringFormat() throws Exception {
|
||||
Timestamp start = TimeUtil.parseTimestamp("0001-01-01T00:00:00Z");
|
||||
Timestamp end = TimeUtil.parseTimestamp("9999-12-31T23:59:59.999999999Z");
|
||||
Duration duration = TimeUtil.distance(start, end);
|
||||
assertEquals("315537897599.999999999s", TimeUtil.toString(duration));
|
||||
duration = TimeUtil.distance(end, start);
|
||||
assertEquals("-315537897599.999999999s", TimeUtil.toString(duration));
|
||||
|
||||
// Generated output should contain 3, 6, or 9 fractional digits.
|
||||
duration = Duration.newBuilder().setSeconds(1).build();
|
||||
assertEquals("1s", TimeUtil.toString(duration));
|
||||
duration = Duration.newBuilder().setNanos(10000000).build();
|
||||
assertEquals("0.010s", TimeUtil.toString(duration));
|
||||
duration = Duration.newBuilder().setNanos(10000).build();
|
||||
assertEquals("0.000010s", TimeUtil.toString(duration));
|
||||
duration = Duration.newBuilder().setNanos(10).build();
|
||||
assertEquals("0.000000010s", TimeUtil.toString(duration));
|
||||
|
||||
// Parsing accepts an fractional digits as long as they fit into nano
|
||||
// precision.
|
||||
duration = TimeUtil.parseDuration("0.1s");
|
||||
assertEquals(100000000, duration.getNanos());
|
||||
duration = TimeUtil.parseDuration("0.0001s");
|
||||
assertEquals(100000, duration.getNanos());
|
||||
duration = TimeUtil.parseDuration("0.0000001s");
|
||||
assertEquals(100, duration.getNanos());
|
||||
|
||||
// Duration must support range from -315,576,000,000s to +315576000000s
|
||||
// which includes negative values.
|
||||
duration = TimeUtil.parseDuration("315576000000.999999999s");
|
||||
assertEquals(315576000000L, duration.getSeconds());
|
||||
assertEquals(999999999, duration.getNanos());
|
||||
duration = TimeUtil.parseDuration("-315576000000.999999999s");
|
||||
assertEquals(-315576000000L, duration.getSeconds());
|
||||
assertEquals(-999999999, duration.getNanos());
|
||||
}
|
||||
|
||||
public void testDurationInvalidFormat() throws Exception {
|
||||
try {
|
||||
// Value too small.
|
||||
Duration value = Duration.newBuilder()
|
||||
.setSeconds(TimeUtil.DURATION_SECONDS_MIN - 1).build();
|
||||
TimeUtil.toString(value);
|
||||
Assert.fail("Exception is expected.");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// Expected.
|
||||
}
|
||||
|
||||
try {
|
||||
// Value too large.
|
||||
Duration value = Duration.newBuilder()
|
||||
.setSeconds(TimeUtil.DURATION_SECONDS_MAX + 1).build();
|
||||
TimeUtil.toString(value);
|
||||
Assert.fail("Exception is expected.");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// Expected.
|
||||
}
|
||||
|
||||
try {
|
||||
// Invalid nanos value.
|
||||
Duration value = Duration.newBuilder().setSeconds(1).setNanos(-1)
|
||||
.build();
|
||||
TimeUtil.toString(value);
|
||||
Assert.fail("Exception is expected.");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// Expected.
|
||||
}
|
||||
|
||||
try {
|
||||
// Invalid nanos value.
|
||||
Duration value = Duration.newBuilder().setSeconds(-1).setNanos(1)
|
||||
.build();
|
||||
TimeUtil.toString(value);
|
||||
Assert.fail("Exception is expected.");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// Expected.
|
||||
}
|
||||
|
||||
try {
|
||||
// Value too small.
|
||||
TimeUtil.parseDuration("-315576000001s");
|
||||
Assert.fail("Exception is expected.");
|
||||
} catch (ParseException e) {
|
||||
// Expected.
|
||||
}
|
||||
|
||||
try {
|
||||
// Value too large.
|
||||
TimeUtil.parseDuration("315576000001s");
|
||||
Assert.fail("Exception is expected.");
|
||||
} catch (ParseException e) {
|
||||
// Expected.
|
||||
}
|
||||
|
||||
try {
|
||||
// Empty.
|
||||
TimeUtil.parseDuration("");
|
||||
Assert.fail("Exception is expected.");
|
||||
} catch (ParseException e) {
|
||||
// Expected.
|
||||
}
|
||||
|
||||
try {
|
||||
// Missing "s".
|
||||
TimeUtil.parseDuration("0");
|
||||
Assert.fail("Exception is expected.");
|
||||
} catch (ParseException e) {
|
||||
// Expected.
|
||||
}
|
||||
|
||||
try {
|
||||
// Invalid trailing data.
|
||||
TimeUtil.parseDuration("0s0");
|
||||
Assert.fail("Exception is expected.");
|
||||
} catch (ParseException e) {
|
||||
// Expected.
|
||||
}
|
||||
|
||||
try {
|
||||
// Invalid prefix.
|
||||
TimeUtil.parseDuration("--1s");
|
||||
Assert.fail("Exception is expected.");
|
||||
} catch (ParseException e) {
|
||||
// Expected.
|
||||
}
|
||||
}
|
||||
|
||||
public void testTimestampConversion() throws Exception {
|
||||
Timestamp timestamp =
|
||||
TimeUtil.parseTimestamp("1970-01-01T00:00:01.111111111Z");
|
||||
assertEquals(1111111111, TimeUtil.toNanos(timestamp));
|
||||
assertEquals(1111111, TimeUtil.toMicros(timestamp));
|
||||
assertEquals(1111, TimeUtil.toMillis(timestamp));
|
||||
timestamp = TimeUtil.createTimestampFromNanos(1111111111);
|
||||
assertEquals("1970-01-01T00:00:01.111111111Z", TimeUtil.toString(timestamp));
|
||||
timestamp = TimeUtil.createTimestampFromMicros(1111111);
|
||||
assertEquals("1970-01-01T00:00:01.111111Z", TimeUtil.toString(timestamp));
|
||||
timestamp = TimeUtil.createTimestampFromMillis(1111);
|
||||
assertEquals("1970-01-01T00:00:01.111Z", TimeUtil.toString(timestamp));
|
||||
|
||||
timestamp = TimeUtil.parseTimestamp("1969-12-31T23:59:59.111111111Z");
|
||||
assertEquals(-888888889, TimeUtil.toNanos(timestamp));
|
||||
assertEquals(-888889, TimeUtil.toMicros(timestamp));
|
||||
assertEquals(-889, TimeUtil.toMillis(timestamp));
|
||||
timestamp = TimeUtil.createTimestampFromNanos(-888888889);
|
||||
assertEquals("1969-12-31T23:59:59.111111111Z", TimeUtil.toString(timestamp));
|
||||
timestamp = TimeUtil.createTimestampFromMicros(-888889);
|
||||
assertEquals("1969-12-31T23:59:59.111111Z", TimeUtil.toString(timestamp));
|
||||
timestamp = TimeUtil.createTimestampFromMillis(-889);
|
||||
assertEquals("1969-12-31T23:59:59.111Z", TimeUtil.toString(timestamp));
|
||||
}
|
||||
|
||||
public void testDurationConversion() throws Exception {
|
||||
Duration duration = TimeUtil.parseDuration("1.111111111s");
|
||||
assertEquals(1111111111, TimeUtil.toNanos(duration));
|
||||
assertEquals(1111111, TimeUtil.toMicros(duration));
|
||||
assertEquals(1111, TimeUtil.toMillis(duration));
|
||||
duration = TimeUtil.createDurationFromNanos(1111111111);
|
||||
assertEquals("1.111111111s", TimeUtil.toString(duration));
|
||||
duration = TimeUtil.createDurationFromMicros(1111111);
|
||||
assertEquals("1.111111s", TimeUtil.toString(duration));
|
||||
duration = TimeUtil.createDurationFromMillis(1111);
|
||||
assertEquals("1.111s", TimeUtil.toString(duration));
|
||||
|
||||
duration = TimeUtil.parseDuration("-1.111111111s");
|
||||
assertEquals(-1111111111, TimeUtil.toNanos(duration));
|
||||
assertEquals(-1111111, TimeUtil.toMicros(duration));
|
||||
assertEquals(-1111, TimeUtil.toMillis(duration));
|
||||
duration = TimeUtil.createDurationFromNanos(-1111111111);
|
||||
assertEquals("-1.111111111s", TimeUtil.toString(duration));
|
||||
duration = TimeUtil.createDurationFromMicros(-1111111);
|
||||
assertEquals("-1.111111s", TimeUtil.toString(duration));
|
||||
duration = TimeUtil.createDurationFromMillis(-1111);
|
||||
assertEquals("-1.111s", TimeUtil.toString(duration));
|
||||
}
|
||||
|
||||
public void testTimeOperations() throws Exception {
|
||||
Timestamp start = TimeUtil.parseTimestamp("0001-01-01T00:00:00Z");
|
||||
Timestamp end = TimeUtil.parseTimestamp("9999-12-31T23:59:59.999999999Z");
|
||||
|
||||
Duration duration = TimeUtil.distance(start, end);
|
||||
assertEquals("315537897599.999999999s", TimeUtil.toString(duration));
|
||||
Timestamp value = TimeUtil.add(start, duration);
|
||||
assertEquals(end, value);
|
||||
value = TimeUtil.subtract(end, duration);
|
||||
assertEquals(start, value);
|
||||
|
||||
duration = TimeUtil.distance(end, start);
|
||||
assertEquals("-315537897599.999999999s", TimeUtil.toString(duration));
|
||||
value = TimeUtil.add(end, duration);
|
||||
assertEquals(start, value);
|
||||
value = TimeUtil.subtract(start, duration);
|
||||
assertEquals(end, value);
|
||||
|
||||
// Result is larger than Long.MAX_VALUE.
|
||||
try {
|
||||
duration = TimeUtil.parseDuration("315537897599.999999999s");
|
||||
duration = TimeUtil.multiply(duration, 315537897599.999999999);
|
||||
Assert.fail("Exception is expected.");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// Expected.
|
||||
}
|
||||
|
||||
// Result is lesser than Long.MIN_VALUE.
|
||||
try {
|
||||
duration = TimeUtil.parseDuration("315537897599.999999999s");
|
||||
duration = TimeUtil.multiply(duration, -315537897599.999999999);
|
||||
Assert.fail("Exception is expected.");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// Expected.
|
||||
}
|
||||
|
||||
duration = TimeUtil.parseDuration("-1.125s");
|
||||
duration = TimeUtil.divide(duration, 2.0);
|
||||
assertEquals("-0.562500s", TimeUtil.toString(duration));
|
||||
duration = TimeUtil.multiply(duration, 2.0);
|
||||
assertEquals("-1.125s", TimeUtil.toString(duration));
|
||||
|
||||
duration = TimeUtil.add(duration, duration);
|
||||
assertEquals("-2.250s", TimeUtil.toString(duration));
|
||||
|
||||
duration = TimeUtil.subtract(duration, TimeUtil.parseDuration("-1s"));
|
||||
assertEquals("-1.250s", TimeUtil.toString(duration));
|
||||
|
||||
// Multiplications (with results larger than Long.MAX_VALUE in nanoseconds).
|
||||
duration = TimeUtil.parseDuration("0.999999999s");
|
||||
assertEquals("315575999684.424s",
|
||||
TimeUtil.toString(TimeUtil.multiply(duration, 315576000000L)));
|
||||
duration = TimeUtil.parseDuration("-0.999999999s");
|
||||
assertEquals("-315575999684.424s",
|
||||
TimeUtil.toString(TimeUtil.multiply(duration, 315576000000L)));
|
||||
assertEquals("315575999684.424s",
|
||||
TimeUtil.toString(TimeUtil.multiply(duration, -315576000000L)));
|
||||
|
||||
// Divisions (with values larger than Long.MAX_VALUE in nanoseconds).
|
||||
Duration d1 = TimeUtil.parseDuration("315576000000s");
|
||||
Duration d2 = TimeUtil.subtract(d1, TimeUtil.createDurationFromNanos(1));
|
||||
assertEquals(1, TimeUtil.divide(d1, d2));
|
||||
assertEquals(0, TimeUtil.divide(d2, d1));
|
||||
assertEquals("0.000000001s", TimeUtil.toString(TimeUtil.remainder(d1, d2)));
|
||||
assertEquals("315575999999.999999999s",
|
||||
TimeUtil.toString(TimeUtil.remainder(d2, d1)));
|
||||
|
||||
// Divisions involving negative values.
|
||||
//
|
||||
// (-5) / 2 = -2, remainder = -1
|
||||
d1 = TimeUtil.parseDuration("-5s");
|
||||
d2 = TimeUtil.parseDuration("2s");
|
||||
assertEquals(-2, TimeUtil.divide(d1, d2));
|
||||
assertEquals(-2, TimeUtil.divide(d1, 2).getSeconds());
|
||||
assertEquals(-1, TimeUtil.remainder(d1, d2).getSeconds());
|
||||
// (-5) / (-2) = 2, remainder = -1
|
||||
d1 = TimeUtil.parseDuration("-5s");
|
||||
d2 = TimeUtil.parseDuration("-2s");
|
||||
assertEquals(2, TimeUtil.divide(d1, d2));
|
||||
assertEquals(2, TimeUtil.divide(d1, -2).getSeconds());
|
||||
assertEquals(-1, TimeUtil.remainder(d1, d2).getSeconds());
|
||||
// 5 / (-2) = -2, remainder = 1
|
||||
d1 = TimeUtil.parseDuration("5s");
|
||||
d2 = TimeUtil.parseDuration("-2s");
|
||||
assertEquals(-2, TimeUtil.divide(d1, d2));
|
||||
assertEquals(-2, TimeUtil.divide(d1, -2).getSeconds());
|
||||
assertEquals(1, TimeUtil.remainder(d1, d2).getSeconds());
|
||||
}
|
||||
}
|
158
java/util/src/test/java/com/google/protobuf/util/json_test.proto
Normal file
158
java/util/src/test/java/com/google/protobuf/util/json_test.proto
Normal file
@ -0,0 +1,158 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package json_test;
|
||||
|
||||
option java_package = "com.google.protobuf.util";
|
||||
option java_outer_classname = "JsonTestProto";
|
||||
|
||||
import "google/protobuf/any.proto";
|
||||
import "google/protobuf/wrappers.proto";
|
||||
import "google/protobuf/timestamp.proto";
|
||||
import "google/protobuf/duration.proto";
|
||||
import "google/protobuf/field_mask.proto";
|
||||
import "google/protobuf/struct.proto";
|
||||
|
||||
message TestAllTypes {
|
||||
enum NestedEnum {
|
||||
FOO = 0;
|
||||
BAR = 1;
|
||||
BAZ = 2;
|
||||
}
|
||||
message NestedMessage {
|
||||
int32 value = 1;
|
||||
}
|
||||
|
||||
int32 optional_int32 = 1;
|
||||
int64 optional_int64 = 2;
|
||||
uint32 optional_uint32 = 3;
|
||||
uint64 optional_uint64 = 4;
|
||||
sint32 optional_sint32 = 5;
|
||||
sint64 optional_sint64 = 6;
|
||||
fixed32 optional_fixed32 = 7;
|
||||
fixed64 optional_fixed64 = 8;
|
||||
sfixed32 optional_sfixed32 = 9;
|
||||
sfixed64 optional_sfixed64 = 10;
|
||||
float optional_float = 11;
|
||||
double optional_double = 12;
|
||||
bool optional_bool = 13;
|
||||
string optional_string = 14;
|
||||
bytes optional_bytes = 15;
|
||||
NestedMessage optional_nested_message = 18;
|
||||
NestedEnum optional_nested_enum = 21;
|
||||
|
||||
// Repeated
|
||||
repeated int32 repeated_int32 = 31;
|
||||
repeated int64 repeated_int64 = 32;
|
||||
repeated uint32 repeated_uint32 = 33;
|
||||
repeated uint64 repeated_uint64 = 34;
|
||||
repeated sint32 repeated_sint32 = 35;
|
||||
repeated sint64 repeated_sint64 = 36;
|
||||
repeated fixed32 repeated_fixed32 = 37;
|
||||
repeated fixed64 repeated_fixed64 = 38;
|
||||
repeated sfixed32 repeated_sfixed32 = 39;
|
||||
repeated sfixed64 repeated_sfixed64 = 40;
|
||||
repeated float repeated_float = 41;
|
||||
repeated double repeated_double = 42;
|
||||
repeated bool repeated_bool = 43;
|
||||
repeated string repeated_string = 44;
|
||||
repeated bytes repeated_bytes = 45;
|
||||
repeated NestedMessage repeated_nested_message = 48;
|
||||
repeated NestedEnum repeated_nested_enum = 51;
|
||||
}
|
||||
|
||||
message TestMap {
|
||||
// Instead of testing all combinations (too many), we only make sure all
|
||||
// valid types have been used at least in one field as key and in one
|
||||
// field as value.
|
||||
map<int32, int32> int32_to_int32_map = 1;
|
||||
map<int64, int32> int64_to_int32_map = 2;
|
||||
map<uint32, int32> uint32_to_int32_map = 3;
|
||||
map<uint64, int32> uint64_to_int32_map = 4;
|
||||
map<sint32, int32> sint32_to_int32_map = 5;
|
||||
map<sint64, int32> sint64_to_int32_map = 6;
|
||||
map<fixed32, int32> fixed32_to_int32_map = 7;
|
||||
map<fixed64, int32> fixed64_to_int32_map = 8;
|
||||
map<sfixed32, int32> sfixed32_to_int32_map = 9;
|
||||
map<sfixed64, int32> sfixed64_to_int32_map = 10;
|
||||
map<bool, int32> bool_to_int32_map = 11;
|
||||
map<string, int32> string_to_int32_map = 12;
|
||||
|
||||
map<int32, int64> int32_to_int64_map = 101;
|
||||
map<int32, uint32> int32_to_uint32_map = 102;
|
||||
map<int32, uint64> int32_to_uint64_map = 103;
|
||||
map<int32, sint32> int32_to_sint32_map = 104;
|
||||
map<int32, sint64> int32_to_sint64_map = 105;
|
||||
map<int32, fixed32> int32_to_fixed32_map = 106;
|
||||
map<int32, fixed64> int32_to_fixed64_map = 107;
|
||||
map<int32, sfixed32> int32_to_sfixed32_map = 108;
|
||||
map<int32, sfixed64> int32_to_sfixed64_map = 109;
|
||||
map<int32, float> int32_to_float_map = 110;
|
||||
map<int32, double> int32_to_double_map = 111;
|
||||
map<int32, bool> int32_to_bool_map = 112;
|
||||
map<int32, string> int32_to_string_map = 113;
|
||||
map<int32, bytes> int32_to_bytes_map = 114;
|
||||
map<int32, TestAllTypes.NestedMessage> int32_to_message_map = 115;
|
||||
map<int32, TestAllTypes.NestedEnum> int32_to_enum_map = 116;
|
||||
}
|
||||
|
||||
message TestWrappers {
|
||||
google.protobuf.Int32Value int32_value = 1;
|
||||
google.protobuf.UInt32Value uint32_value = 2;
|
||||
google.protobuf.Int64Value int64_value = 3;
|
||||
google.protobuf.UInt64Value uint64_value = 4;
|
||||
google.protobuf.FloatValue float_value = 5;
|
||||
google.protobuf.DoubleValue double_value = 6;
|
||||
google.protobuf.BoolValue bool_value = 7;
|
||||
google.protobuf.StringValue string_value = 8;
|
||||
google.protobuf.BytesValue bytes_value = 9;
|
||||
}
|
||||
|
||||
message TestTimestamp {
|
||||
google.protobuf.Timestamp timestamp_value = 1;
|
||||
}
|
||||
|
||||
message TestDuration {
|
||||
google.protobuf.Duration duration_value = 1;
|
||||
}
|
||||
|
||||
message TestFieldMask {
|
||||
google.protobuf.FieldMask field_mask_value = 1;
|
||||
}
|
||||
|
||||
message TestStruct {
|
||||
google.protobuf.Struct struct_value = 1;
|
||||
}
|
||||
|
||||
message TestAny {
|
||||
google.protobuf.Any any_value = 1;
|
||||
}
|
@ -41,6 +41,7 @@ are:
|
||||
|
||||
__author__ = 'petar@google.com (Petar Petrov)'
|
||||
|
||||
import collections
|
||||
import sys
|
||||
|
||||
if sys.version_info[0] < 3:
|
||||
@ -63,7 +64,6 @@ if sys.version_info[0] < 3:
|
||||
# Note: deriving from object is critical. It is the only thing that makes
|
||||
# this a true type, allowing us to derive from it in C++ cleanly and making
|
||||
# __slots__ properly disallow arbitrary element assignment.
|
||||
from collections import Mapping as _Mapping
|
||||
|
||||
class Mapping(object):
|
||||
__slots__ = ()
|
||||
@ -106,7 +106,7 @@ if sys.version_info[0] < 3:
|
||||
__hash__ = None
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, _Mapping):
|
||||
if not isinstance(other, collections.Mapping):
|
||||
return NotImplemented
|
||||
return dict(self.items()) == dict(other.items())
|
||||
|
||||
@ -173,12 +173,13 @@ if sys.version_info[0] < 3:
|
||||
self[key] = default
|
||||
return default
|
||||
|
||||
_Mapping.register(Mapping)
|
||||
collections.Mapping.register(Mapping)
|
||||
collections.MutableMapping.register(MutableMapping)
|
||||
|
||||
else:
|
||||
# In Python 3 we can just use MutableMapping directly, because it defines
|
||||
# __slots__.
|
||||
from collections import MutableMapping
|
||||
MutableMapping = collections.MutableMapping
|
||||
|
||||
|
||||
class BaseContainer(object):
|
||||
@ -336,6 +337,8 @@ class RepeatedScalarFieldContainer(BaseContainer):
|
||||
# We are presumably comparing against some other sequence type.
|
||||
return other == self._values
|
||||
|
||||
collections.MutableSequence.register(BaseContainer)
|
||||
|
||||
|
||||
class RepeatedCompositeFieldContainer(BaseContainer):
|
||||
|
||||
|
@ -47,6 +47,7 @@ from google.protobuf import unittest_custom_options_pb2
|
||||
from google.protobuf import unittest_import_pb2
|
||||
from google.protobuf import unittest_import_public_pb2
|
||||
from google.protobuf import unittest_mset_pb2
|
||||
from google.protobuf import unittest_mset_wire_format_pb2
|
||||
from google.protobuf import unittest_no_generic_services_pb2
|
||||
from google.protobuf import unittest_pb2
|
||||
from google.protobuf import service
|
||||
@ -142,7 +143,7 @@ class GeneratorTest(unittest.TestCase):
|
||||
self.assertTrue(not non_extension_descriptor.is_extension)
|
||||
|
||||
def testOptions(self):
|
||||
proto = unittest_mset_pb2.TestMessageSet()
|
||||
proto = unittest_mset_wire_format_pb2.TestMessageSet()
|
||||
self.assertTrue(proto.DESCRIPTOR.GetOptions().message_set_wire_format)
|
||||
|
||||
def testMessageWithCustomOptions(self):
|
||||
|
@ -43,6 +43,7 @@ abstract interface.
|
||||
|
||||
__author__ = 'gps@google.com (Gregory P. Smith)'
|
||||
|
||||
import collections
|
||||
import copy
|
||||
import math
|
||||
import operator
|
||||
@ -56,6 +57,7 @@ from google.protobuf import map_unittest_pb2
|
||||
from google.protobuf import unittest_pb2
|
||||
from google.protobuf import unittest_proto3_arena_pb2
|
||||
from google.protobuf.internal import api_implementation
|
||||
from google.protobuf.internal import packed_field_test_pb2
|
||||
from google.protobuf.internal import test_util
|
||||
from google.protobuf import message
|
||||
|
||||
@ -421,6 +423,31 @@ class MessageTest(unittest.TestCase):
|
||||
self.assertEqual(message.repeated_nested_message[4].bb, 5)
|
||||
self.assertEqual(message.repeated_nested_message[5].bb, 6)
|
||||
|
||||
def testSortingRepeatedCompositeFieldsStable(self, message_module):
|
||||
"""Check passing a custom comparator to sort a repeated composite field."""
|
||||
message = message_module.TestAllTypes()
|
||||
|
||||
message.repeated_nested_message.add().bb = 21
|
||||
message.repeated_nested_message.add().bb = 20
|
||||
message.repeated_nested_message.add().bb = 13
|
||||
message.repeated_nested_message.add().bb = 33
|
||||
message.repeated_nested_message.add().bb = 11
|
||||
message.repeated_nested_message.add().bb = 24
|
||||
message.repeated_nested_message.add().bb = 10
|
||||
message.repeated_nested_message.sort(key=lambda z: z.bb // 10)
|
||||
self.assertEquals(
|
||||
[13, 11, 10, 21, 20, 24, 33],
|
||||
[n.bb for n in message.repeated_nested_message])
|
||||
|
||||
# Make sure that for the C++ implementation, the underlying fields
|
||||
# are actually reordered.
|
||||
pb = message.SerializeToString()
|
||||
message.Clear()
|
||||
message.MergeFromString(pb)
|
||||
self.assertEquals(
|
||||
[13, 11, 10, 21, 20, 24, 33],
|
||||
[n.bb for n in message.repeated_nested_message])
|
||||
|
||||
def testRepeatedCompositeFieldSortArguments(self, message_module):
|
||||
"""Check sorting a repeated composite field using list.sort() arguments."""
|
||||
message = message_module.TestAllTypes()
|
||||
@ -514,6 +541,12 @@ class MessageTest(unittest.TestCase):
|
||||
|
||||
# TODO(anuraag): Implement extensiondict comparison in C++ and then add test
|
||||
|
||||
def testRepeatedFieldsAreSequences(self, message_module):
|
||||
m = message_module.TestAllTypes()
|
||||
self.assertIsInstance(m.repeated_int32, collections.MutableSequence)
|
||||
self.assertIsInstance(m.repeated_nested_message,
|
||||
collections.MutableSequence)
|
||||
|
||||
def ensureNestedMessageExists(self, msg, attribute):
|
||||
"""Make sure that a nested message object exists.
|
||||
|
||||
@ -556,6 +589,18 @@ class MessageTest(unittest.TestCase):
|
||||
self.assertFalse(m.HasField('oneof_uint32'))
|
||||
self.assertTrue(m.HasField('oneof_string'))
|
||||
|
||||
# Read nested message accessor without accessing submessage.
|
||||
m.oneof_nested_message
|
||||
self.assertEqual('oneof_string', m.WhichOneof('oneof_field'))
|
||||
self.assertTrue(m.HasField('oneof_string'))
|
||||
self.assertFalse(m.HasField('oneof_nested_message'))
|
||||
|
||||
# Read accessor of nested message without accessing submessage.
|
||||
m.oneof_nested_message.bb
|
||||
self.assertEqual('oneof_string', m.WhichOneof('oneof_field'))
|
||||
self.assertTrue(m.HasField('oneof_string'))
|
||||
self.assertFalse(m.HasField('oneof_nested_message'))
|
||||
|
||||
m.oneof_nested_message.bb = 11
|
||||
self.assertEqual('oneof_nested_message', m.WhichOneof('oneof_field'))
|
||||
self.assertFalse(m.HasField('oneof_string'))
|
||||
@ -1583,6 +1628,21 @@ class Proto3Test(unittest.TestCase):
|
||||
del msg.map_int32_int32[4]
|
||||
self.assertEqual(0, len(msg.map_int32_int32))
|
||||
|
||||
def testMapsAreMapping(self):
|
||||
msg = map_unittest_pb2.TestMap()
|
||||
self.assertIsInstance(msg.map_int32_int32, collections.Mapping)
|
||||
self.assertIsInstance(msg.map_int32_int32, collections.MutableMapping)
|
||||
self.assertIsInstance(msg.map_int32_foreign_message, collections.Mapping)
|
||||
self.assertIsInstance(msg.map_int32_foreign_message,
|
||||
collections.MutableMapping)
|
||||
|
||||
def testMapFindInitializationErrorsSmokeTest(self):
|
||||
msg = map_unittest_pb2.TestMap()
|
||||
msg.map_string_string['abc'] = '123'
|
||||
msg.map_int32_int32[35] = 64
|
||||
msg.map_string_foreign_message['foo'].c = 5
|
||||
self.assertEqual(0, len(msg.FindInitializationErrors()))
|
||||
|
||||
|
||||
|
||||
class ValidTypeNamesTest(unittest.TestCase):
|
||||
@ -1606,6 +1666,61 @@ class ValidTypeNamesTest(unittest.TestCase):
|
||||
self.assertImportFromName(pb.repeated_int32, 'Scalar')
|
||||
self.assertImportFromName(pb.repeated_nested_message, 'Composite')
|
||||
|
||||
class PackedFieldTest(unittest.TestCase):
|
||||
|
||||
def setMessage(self, message):
|
||||
message.repeated_int32.append(1)
|
||||
message.repeated_int64.append(1)
|
||||
message.repeated_uint32.append(1)
|
||||
message.repeated_uint64.append(1)
|
||||
message.repeated_sint32.append(1)
|
||||
message.repeated_sint64.append(1)
|
||||
message.repeated_fixed32.append(1)
|
||||
message.repeated_fixed64.append(1)
|
||||
message.repeated_sfixed32.append(1)
|
||||
message.repeated_sfixed64.append(1)
|
||||
message.repeated_float.append(1.0)
|
||||
message.repeated_double.append(1.0)
|
||||
message.repeated_bool.append(True)
|
||||
message.repeated_nested_enum.append(1)
|
||||
|
||||
def testPackedFields(self):
|
||||
message = packed_field_test_pb2.TestPackedTypes()
|
||||
self.setMessage(message)
|
||||
golden_data = (b'\x0A\x01\x01'
|
||||
b'\x12\x01\x01'
|
||||
b'\x1A\x01\x01'
|
||||
b'\x22\x01\x01'
|
||||
b'\x2A\x01\x02'
|
||||
b'\x32\x01\x02'
|
||||
b'\x3A\x04\x01\x00\x00\x00'
|
||||
b'\x42\x08\x01\x00\x00\x00\x00\x00\x00\x00'
|
||||
b'\x4A\x04\x01\x00\x00\x00'
|
||||
b'\x52\x08\x01\x00\x00\x00\x00\x00\x00\x00'
|
||||
b'\x5A\x04\x00\x00\x80\x3f'
|
||||
b'\x62\x08\x00\x00\x00\x00\x00\x00\xf0\x3f'
|
||||
b'\x6A\x01\x01'
|
||||
b'\x72\x01\x01')
|
||||
self.assertEqual(golden_data, message.SerializeToString())
|
||||
|
||||
def testUnpackedFields(self):
|
||||
message = packed_field_test_pb2.TestUnpackedTypes()
|
||||
self.setMessage(message)
|
||||
golden_data = (b'\x08\x01'
|
||||
b'\x10\x01'
|
||||
b'\x18\x01'
|
||||
b'\x20\x01'
|
||||
b'\x28\x02'
|
||||
b'\x30\x02'
|
||||
b'\x3D\x01\x00\x00\x00'
|
||||
b'\x41\x01\x00\x00\x00\x00\x00\x00\x00'
|
||||
b'\x4D\x01\x00\x00\x00'
|
||||
b'\x51\x01\x00\x00\x00\x00\x00\x00\x00'
|
||||
b'\x5D\x00\x00\x80\x3f'
|
||||
b'\x61\x00\x00\x00\x00\x00\x00\xf0\x3f'
|
||||
b'\x68\x01'
|
||||
b'\x70\x01')
|
||||
self.assertEqual(golden_data, message.SerializeToString())
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@ -0,0 +1,73 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.protobuf.python.internal;
|
||||
|
||||
message TestPackedTypes {
|
||||
enum NestedEnum {
|
||||
FOO = 0;
|
||||
BAR = 1;
|
||||
BAZ = 2;
|
||||
}
|
||||
|
||||
repeated int32 repeated_int32 = 1;
|
||||
repeated int64 repeated_int64 = 2;
|
||||
repeated uint32 repeated_uint32 = 3;
|
||||
repeated uint64 repeated_uint64 = 4;
|
||||
repeated sint32 repeated_sint32 = 5;
|
||||
repeated sint64 repeated_sint64 = 6;
|
||||
repeated fixed32 repeated_fixed32 = 7;
|
||||
repeated fixed64 repeated_fixed64 = 8;
|
||||
repeated sfixed32 repeated_sfixed32 = 9;
|
||||
repeated sfixed64 repeated_sfixed64 = 10;
|
||||
repeated float repeated_float = 11;
|
||||
repeated double repeated_double = 12;
|
||||
repeated bool repeated_bool = 13;
|
||||
repeated NestedEnum repeated_nested_enum = 14;
|
||||
}
|
||||
|
||||
message TestUnpackedTypes {
|
||||
repeated int32 repeated_int32 = 1 [packed = false];
|
||||
repeated int64 repeated_int64 = 2 [packed = false];
|
||||
repeated uint32 repeated_uint32 = 3 [packed = false];
|
||||
repeated uint64 repeated_uint64 = 4 [packed = false];
|
||||
repeated sint32 repeated_sint32 = 5 [packed = false];
|
||||
repeated sint64 repeated_sint64 = 6 [packed = false];
|
||||
repeated fixed32 repeated_fixed32 = 7 [packed = false];
|
||||
repeated fixed64 repeated_fixed64 = 8 [packed = false];
|
||||
repeated sfixed32 repeated_sfixed32 = 9 [packed = false];
|
||||
repeated sfixed64 repeated_sfixed64 = 10 [packed = false];
|
||||
repeated float repeated_float = 11 [packed = false];
|
||||
repeated double repeated_double = 12 [packed = false];
|
||||
repeated bool repeated_bool = 13 [packed = false];
|
||||
repeated TestPackedTypes.NestedEnum repeated_nested_enum = 14 [packed = false];
|
||||
}
|
@ -85,34 +85,108 @@ from google.protobuf import text_format
|
||||
_FieldDescriptor = descriptor_mod.FieldDescriptor
|
||||
|
||||
|
||||
def NewMessage(bases, descriptor, dictionary):
|
||||
_AddClassAttributesForNestedExtensions(descriptor, dictionary)
|
||||
_AddSlots(descriptor, dictionary)
|
||||
return bases
|
||||
class GeneratedProtocolMessageType(type):
|
||||
|
||||
"""Metaclass for protocol message classes created at runtime from Descriptors.
|
||||
|
||||
def InitMessage(descriptor, cls):
|
||||
cls._decoders_by_tag = {}
|
||||
cls._extensions_by_name = {}
|
||||
cls._extensions_by_number = {}
|
||||
if (descriptor.has_options and
|
||||
descriptor.GetOptions().message_set_wire_format):
|
||||
cls._decoders_by_tag[decoder.MESSAGE_SET_ITEM_TAG] = (
|
||||
decoder.MessageSetItemDecoder(cls._extensions_by_number), None)
|
||||
We add implementations for all methods described in the Message class. We
|
||||
also create properties to allow getting/setting all fields in the protocol
|
||||
message. Finally, we create slots to prevent users from accidentally
|
||||
"setting" nonexistent fields in the protocol message, which then wouldn't get
|
||||
serialized / deserialized properly.
|
||||
|
||||
# Attach stuff to each FieldDescriptor for quick lookup later on.
|
||||
for field in descriptor.fields:
|
||||
_AttachFieldHelpers(cls, field)
|
||||
The protocol compiler currently uses this metaclass to create protocol
|
||||
message classes at runtime. Clients can also manually create their own
|
||||
classes at runtime, as in this example:
|
||||
|
||||
descriptor._concrete_class = cls # pylint: disable=protected-access
|
||||
_AddEnumValues(descriptor, cls)
|
||||
_AddInitMethod(descriptor, cls)
|
||||
_AddPropertiesForFields(descriptor, cls)
|
||||
_AddPropertiesForExtensions(descriptor, cls)
|
||||
_AddStaticMethods(cls)
|
||||
_AddMessageMethods(descriptor, cls)
|
||||
_AddPrivateHelperMethods(descriptor, cls)
|
||||
copyreg.pickle(cls, lambda obj: (cls, (), obj.__getstate__()))
|
||||
mydescriptor = Descriptor(.....)
|
||||
class MyProtoClass(Message):
|
||||
__metaclass__ = GeneratedProtocolMessageType
|
||||
DESCRIPTOR = mydescriptor
|
||||
myproto_instance = MyProtoClass()
|
||||
myproto.foo_field = 23
|
||||
...
|
||||
|
||||
The above example will not work for nested types. If you wish to include them,
|
||||
use reflection.MakeClass() instead of manually instantiating the class in
|
||||
order to create the appropriate class structure.
|
||||
"""
|
||||
|
||||
# Must be consistent with the protocol-compiler code in
|
||||
# proto2/compiler/internal/generator.*.
|
||||
_DESCRIPTOR_KEY = 'DESCRIPTOR'
|
||||
|
||||
def __new__(cls, name, bases, dictionary):
|
||||
"""Custom allocation for runtime-generated class types.
|
||||
|
||||
We override __new__ because this is apparently the only place
|
||||
where we can meaningfully set __slots__ on the class we're creating(?).
|
||||
(The interplay between metaclasses and slots is not very well-documented).
|
||||
|
||||
Args:
|
||||
name: Name of the class (ignored, but required by the
|
||||
metaclass protocol).
|
||||
bases: Base classes of the class we're constructing.
|
||||
(Should be message.Message). We ignore this field, but
|
||||
it's required by the metaclass protocol
|
||||
dictionary: The class dictionary of the class we're
|
||||
constructing. dictionary[_DESCRIPTOR_KEY] must contain
|
||||
a Descriptor object describing this protocol message
|
||||
type.
|
||||
|
||||
Returns:
|
||||
Newly-allocated class.
|
||||
"""
|
||||
descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY]
|
||||
_AddClassAttributesForNestedExtensions(descriptor, dictionary)
|
||||
_AddSlots(descriptor, dictionary)
|
||||
|
||||
superclass = super(GeneratedProtocolMessageType, cls)
|
||||
new_class = superclass.__new__(cls, name, bases, dictionary)
|
||||
return new_class
|
||||
|
||||
def __init__(cls, name, bases, dictionary):
|
||||
"""Here we perform the majority of our work on the class.
|
||||
We add enum getters, an __init__ method, implementations
|
||||
of all Message methods, and properties for all fields
|
||||
in the protocol type.
|
||||
|
||||
Args:
|
||||
name: Name of the class (ignored, but required by the
|
||||
metaclass protocol).
|
||||
bases: Base classes of the class we're constructing.
|
||||
(Should be message.Message). We ignore this field, but
|
||||
it's required by the metaclass protocol
|
||||
dictionary: The class dictionary of the class we're
|
||||
constructing. dictionary[_DESCRIPTOR_KEY] must contain
|
||||
a Descriptor object describing this protocol message
|
||||
type.
|
||||
"""
|
||||
descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY]
|
||||
cls._decoders_by_tag = {}
|
||||
cls._extensions_by_name = {}
|
||||
cls._extensions_by_number = {}
|
||||
if (descriptor.has_options and
|
||||
descriptor.GetOptions().message_set_wire_format):
|
||||
cls._decoders_by_tag[decoder.MESSAGE_SET_ITEM_TAG] = (
|
||||
decoder.MessageSetItemDecoder(cls._extensions_by_number), None)
|
||||
|
||||
# Attach stuff to each FieldDescriptor for quick lookup later on.
|
||||
for field in descriptor.fields:
|
||||
_AttachFieldHelpers(cls, field)
|
||||
|
||||
descriptor._concrete_class = cls # pylint: disable=protected-access
|
||||
_AddEnumValues(descriptor, cls)
|
||||
_AddInitMethod(descriptor, cls)
|
||||
_AddPropertiesForFields(descriptor, cls)
|
||||
_AddPropertiesForExtensions(descriptor, cls)
|
||||
_AddStaticMethods(cls)
|
||||
_AddMessageMethods(descriptor, cls)
|
||||
_AddPrivateHelperMethods(descriptor, cls)
|
||||
copyreg.pickle(cls, lambda obj: (cls, (), obj.__getstate__()))
|
||||
|
||||
superclass = super(GeneratedProtocolMessageType, cls)
|
||||
superclass.__init__(name, bases, dictionary)
|
||||
|
||||
|
||||
# Stateless helpers for GeneratedProtocolMessageType below.
|
||||
@ -362,9 +436,10 @@ def _DefaultValueConstructorForField(field):
|
||||
message_type = field.message_type
|
||||
def MakeSubMessageDefault(message):
|
||||
result = message_type._concrete_class()
|
||||
result._SetListener(message._listener_for_children)
|
||||
if field.containing_oneof:
|
||||
message._UpdateOneofState(field)
|
||||
result._SetListener(
|
||||
_OneofListener(message, field)
|
||||
if field.containing_oneof is not None
|
||||
else message._listener_for_children)
|
||||
return result
|
||||
return MakeSubMessageDefault
|
||||
|
||||
@ -634,21 +709,11 @@ def _AddPropertiesForNonRepeatedCompositeField(field, cls):
|
||||
proto_field_name = field.name
|
||||
property_name = _PropertyName(proto_field_name)
|
||||
|
||||
# TODO(komarek): Can anyone explain to me why we cache the message_type this
|
||||
# way, instead of referring to field.message_type inside of getter(self)?
|
||||
# What if someone sets message_type later on (which makes for simpler
|
||||
# dyanmic proto descriptor and class creation code).
|
||||
message_type = field.message_type
|
||||
|
||||
def getter(self):
|
||||
field_value = self._fields.get(field)
|
||||
if field_value is None:
|
||||
# Construct a new object to represent this field.
|
||||
field_value = message_type._concrete_class() # use field.message_type?
|
||||
field_value._SetListener(
|
||||
_OneofListener(self, field)
|
||||
if field.containing_oneof is not None
|
||||
else self._listener_for_children)
|
||||
field_value = field._default_constructor(self)
|
||||
|
||||
# Atomically check if another thread has preempted us and, if not, swap
|
||||
# in the new object we just created. If someone has preempted us, we
|
||||
@ -1121,7 +1186,7 @@ def _AddIsInitializedMethod(message_descriptor, cls):
|
||||
if _IsMessageMapField(field):
|
||||
for key in value:
|
||||
element = value[key]
|
||||
prefix = "%s[%d]." % (name, key)
|
||||
prefix = "%s[%s]." % (name, key)
|
||||
sub_errors = element.FindInitializationErrors()
|
||||
errors += [prefix + error for error in sub_errors]
|
||||
else:
|
||||
@ -1173,8 +1238,6 @@ def _AddMergeFromMethod(cls):
|
||||
# Construct a new object to represent this field.
|
||||
field_value = field._default_constructor(self)
|
||||
fields[field] = field_value
|
||||
if field.containing_oneof:
|
||||
self._UpdateOneofState(field)
|
||||
field_value.MergeFrom(value)
|
||||
else:
|
||||
self._fields[field] = value
|
||||
|
@ -52,6 +52,7 @@ from google.protobuf import text_format
|
||||
from google.protobuf.internal import api_implementation
|
||||
from google.protobuf.internal import more_extensions_pb2
|
||||
from google.protobuf.internal import more_messages_pb2
|
||||
from google.protobuf.internal import message_set_extensions_pb2
|
||||
from google.protobuf.internal import wire_format
|
||||
from google.protobuf.internal import test_util
|
||||
from google.protobuf.internal import decoder
|
||||
@ -1682,8 +1683,8 @@ class ReflectionTest(unittest.TestCase):
|
||||
proto.optional_string = 'abc'
|
||||
|
||||
def testStringUTF8Serialization(self):
|
||||
proto = unittest_mset_pb2.TestMessageSet()
|
||||
extension_message = unittest_mset_pb2.TestMessageSetExtension2
|
||||
proto = message_set_extensions_pb2.TestMessageSet()
|
||||
extension_message = message_set_extensions_pb2.TestMessageSetExtension2
|
||||
extension = extension_message.message_set_extension
|
||||
|
||||
test_utf8 = u'Тест'
|
||||
@ -1703,15 +1704,14 @@ class ReflectionTest(unittest.TestCase):
|
||||
bytes_read = raw.MergeFromString(serialized)
|
||||
self.assertEqual(len(serialized), bytes_read)
|
||||
|
||||
message2 = unittest_mset_pb2.TestMessageSetExtension2()
|
||||
message2 = message_set_extensions_pb2.TestMessageSetExtension2()
|
||||
|
||||
self.assertEqual(1, len(raw.item))
|
||||
# Check that the type_id is the same as the tag ID in the .proto file.
|
||||
self.assertEqual(raw.item[0].type_id, 1547769)
|
||||
self.assertEqual(raw.item[0].type_id, 98418634)
|
||||
|
||||
# Check the actual bytes on the wire.
|
||||
self.assertTrue(
|
||||
raw.item[0].message.endswith(test_utf8_bytes))
|
||||
self.assertTrue(raw.item[0].message.endswith(test_utf8_bytes))
|
||||
bytes_read = message2.MergeFromString(raw.item[0].message)
|
||||
self.assertEqual(len(raw.item[0].message), bytes_read)
|
||||
|
||||
@ -2395,9 +2395,9 @@ class SerializationTest(unittest.TestCase):
|
||||
self.assertEqual(42, second_proto.optional_nested_message.bb)
|
||||
|
||||
def testMessageSetWireFormat(self):
|
||||
proto = unittest_mset_pb2.TestMessageSet()
|
||||
extension_message1 = unittest_mset_pb2.TestMessageSetExtension1
|
||||
extension_message2 = unittest_mset_pb2.TestMessageSetExtension2
|
||||
proto = message_set_extensions_pb2.TestMessageSet()
|
||||
extension_message1 = message_set_extensions_pb2.TestMessageSetExtension1
|
||||
extension_message2 = message_set_extensions_pb2.TestMessageSetExtension2
|
||||
extension1 = extension_message1.message_set_extension
|
||||
extension2 = extension_message2.message_set_extension
|
||||
proto.Extensions[extension1].i = 123
|
||||
@ -2415,20 +2415,20 @@ class SerializationTest(unittest.TestCase):
|
||||
raw.MergeFromString(serialized))
|
||||
self.assertEqual(2, len(raw.item))
|
||||
|
||||
message1 = unittest_mset_pb2.TestMessageSetExtension1()
|
||||
message1 = message_set_extensions_pb2.TestMessageSetExtension1()
|
||||
self.assertEqual(
|
||||
len(raw.item[0].message),
|
||||
message1.MergeFromString(raw.item[0].message))
|
||||
self.assertEqual(123, message1.i)
|
||||
|
||||
message2 = unittest_mset_pb2.TestMessageSetExtension2()
|
||||
message2 = message_set_extensions_pb2.TestMessageSetExtension2()
|
||||
self.assertEqual(
|
||||
len(raw.item[1].message),
|
||||
message2.MergeFromString(raw.item[1].message))
|
||||
self.assertEqual('foo', message2.str)
|
||||
|
||||
# Deserialize using the MessageSet wire format.
|
||||
proto2 = unittest_mset_pb2.TestMessageSet()
|
||||
proto2 = message_set_extensions_pb2.TestMessageSet()
|
||||
self.assertEqual(
|
||||
len(serialized),
|
||||
proto2.MergeFromString(serialized))
|
||||
@ -2446,37 +2446,37 @@ class SerializationTest(unittest.TestCase):
|
||||
|
||||
# Add an item.
|
||||
item = raw.item.add()
|
||||
item.type_id = 1545008
|
||||
extension_message1 = unittest_mset_pb2.TestMessageSetExtension1
|
||||
message1 = unittest_mset_pb2.TestMessageSetExtension1()
|
||||
item.type_id = 98418603
|
||||
extension_message1 = message_set_extensions_pb2.TestMessageSetExtension1
|
||||
message1 = message_set_extensions_pb2.TestMessageSetExtension1()
|
||||
message1.i = 12345
|
||||
item.message = message1.SerializeToString()
|
||||
|
||||
# Add a second, unknown extension.
|
||||
item = raw.item.add()
|
||||
item.type_id = 1545009
|
||||
extension_message1 = unittest_mset_pb2.TestMessageSetExtension1
|
||||
message1 = unittest_mset_pb2.TestMessageSetExtension1()
|
||||
item.type_id = 98418604
|
||||
extension_message1 = message_set_extensions_pb2.TestMessageSetExtension1
|
||||
message1 = message_set_extensions_pb2.TestMessageSetExtension1()
|
||||
message1.i = 12346
|
||||
item.message = message1.SerializeToString()
|
||||
|
||||
# Add another unknown extension.
|
||||
item = raw.item.add()
|
||||
item.type_id = 1545010
|
||||
message1 = unittest_mset_pb2.TestMessageSetExtension2()
|
||||
item.type_id = 98418605
|
||||
message1 = message_set_extensions_pb2.TestMessageSetExtension2()
|
||||
message1.str = 'foo'
|
||||
item.message = message1.SerializeToString()
|
||||
|
||||
serialized = raw.SerializeToString()
|
||||
|
||||
# Parse message using the message set wire format.
|
||||
proto = unittest_mset_pb2.TestMessageSet()
|
||||
proto = message_set_extensions_pb2.TestMessageSet()
|
||||
self.assertEqual(
|
||||
len(serialized),
|
||||
proto.MergeFromString(serialized))
|
||||
|
||||
# Check that the message parsed well.
|
||||
extension_message1 = unittest_mset_pb2.TestMessageSetExtension1
|
||||
extension_message1 = message_set_extensions_pb2.TestMessageSetExtension1
|
||||
extension1 = extension_message1.message_set_extension
|
||||
self.assertEquals(12345, proto.Extensions[extension1].i)
|
||||
|
||||
@ -2805,7 +2805,7 @@ class SerializationTest(unittest.TestCase):
|
||||
class OptionsTest(unittest.TestCase):
|
||||
|
||||
def testMessageOptions(self):
|
||||
proto = unittest_mset_pb2.TestMessageSet()
|
||||
proto = message_set_extensions_pb2.TestMessageSet()
|
||||
self.assertEqual(True,
|
||||
proto.DESCRIPTOR.GetOptions().message_set_wire_format)
|
||||
proto = unittest_pb2.TestAllTypes()
|
||||
@ -2824,7 +2824,7 @@ class OptionsTest(unittest.TestCase):
|
||||
proto.packed_double.append(3.0)
|
||||
for field_descriptor, _ in proto.ListFields():
|
||||
self.assertEqual(True, field_descriptor.GetOptions().packed)
|
||||
self.assertEqual(reflection._FieldDescriptor.LABEL_REPEATED,
|
||||
self.assertEqual(descriptor.FieldDescriptor.LABEL_REPEATED,
|
||||
field_descriptor.label)
|
||||
|
||||
|
||||
|
@ -604,7 +604,8 @@ def GoldenFile(filename):
|
||||
|
||||
# Search internally.
|
||||
path = '.'
|
||||
full_path = os.path.join(path, 'third_party/py/google/protobuf/testdata', filename)
|
||||
full_path = os.path.join(path, 'third_party/py/google/protobuf/testdata',
|
||||
filename)
|
||||
if os.path.exists(full_path):
|
||||
# Found it. Load the golden file from the testdata directory.
|
||||
return open(full_path, 'rb')
|
||||
|
@ -35,6 +35,7 @@
|
||||
__author__ = 'kenton@google.com (Kenton Varda)'
|
||||
|
||||
import re
|
||||
import string
|
||||
import unittest
|
||||
|
||||
import unittest
|
||||
@ -497,6 +498,36 @@ class OnlyWorksWithProto2RightNowTests(TextFormatBase):
|
||||
' }\n'
|
||||
'}\n')
|
||||
|
||||
def testMapOrderEnforcement(self):
|
||||
message = map_unittest_pb2.TestMap()
|
||||
for letter in string.ascii_uppercase[13:26]:
|
||||
message.map_string_string[letter] = 'dummy'
|
||||
for letter in reversed(string.ascii_uppercase[0:13]):
|
||||
message.map_string_string[letter] = 'dummy'
|
||||
golden = ''.join((
|
||||
'map_string_string {\n key: "%c"\n value: "dummy"\n}\n' % (letter,)
|
||||
for letter in string.ascii_uppercase))
|
||||
self.CompareToGoldenText(text_format.MessageToString(message), golden)
|
||||
|
||||
def testMapOrderSemantics(self):
|
||||
golden_lines = self.ReadGolden('map_test_data.txt')
|
||||
# The C++ implementation emits defaulted-value fields, while the Python
|
||||
# implementation does not. Adjusting for this is awkward, but it is
|
||||
# valuable to test against a common golden file.
|
||||
line_blacklist = (' key: 0\n',
|
||||
' value: 0\n',
|
||||
' key: false\n',
|
||||
' value: false\n')
|
||||
golden_lines = [line for line in golden_lines if line not in line_blacklist]
|
||||
|
||||
message = map_unittest_pb2.TestMap()
|
||||
text_format.ParseLines(golden_lines, message)
|
||||
candidate = text_format.MessageToString(message)
|
||||
# The Python implementation emits "1.0" for the double value that the C++
|
||||
# implementation emits as "1".
|
||||
candidate = candidate.replace('1.0', '1', 2)
|
||||
self.assertMultiLineEqual(candidate, ''.join(golden_lines))
|
||||
|
||||
|
||||
# Tests of proto2-only features (MessageSet, extensions, etc.).
|
||||
class Proto2Tests(TextFormatBase):
|
||||
|
@ -41,11 +41,18 @@ from google.protobuf import unittest_pb2
|
||||
from google.protobuf import unittest_proto3_arena_pb2
|
||||
from google.protobuf.internal import api_implementation
|
||||
from google.protobuf.internal import encoder
|
||||
from google.protobuf.internal import message_set_extensions_pb2
|
||||
from google.protobuf.internal import missing_enum_values_pb2
|
||||
from google.protobuf.internal import test_util
|
||||
from google.protobuf.internal import type_checkers
|
||||
|
||||
|
||||
def SkipIfCppImplementation(func):
|
||||
return unittest.skipIf(
|
||||
api_implementation.Type() == 'cpp' and api_implementation.Version() == 2,
|
||||
'C++ implementation does not expose unknown fields to Python')(func)
|
||||
|
||||
|
||||
class UnknownFieldsTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
@ -83,15 +90,15 @@ class UnknownFieldsTest(unittest.TestCase):
|
||||
|
||||
# Add an unknown extension.
|
||||
item = raw.item.add()
|
||||
item.type_id = 1545009
|
||||
message1 = unittest_mset_pb2.TestMessageSetExtension1()
|
||||
item.type_id = 98418603
|
||||
message1 = message_set_extensions_pb2.TestMessageSetExtension1()
|
||||
message1.i = 12345
|
||||
item.message = message1.SerializeToString()
|
||||
|
||||
serialized = raw.SerializeToString()
|
||||
|
||||
# Parse message using the message set wire format.
|
||||
proto = unittest_mset_pb2.TestMessageSet()
|
||||
proto = message_set_extensions_pb2.TestMessageSet()
|
||||
proto.MergeFromString(serialized)
|
||||
|
||||
# Verify that the unknown extension is serialized unchanged
|
||||
@ -100,13 +107,6 @@ class UnknownFieldsTest(unittest.TestCase):
|
||||
new_raw.MergeFromString(reserialized)
|
||||
self.assertEqual(raw, new_raw)
|
||||
|
||||
# C++ implementation for proto2 does not currently take into account unknown
|
||||
# fields when checking equality.
|
||||
#
|
||||
# TODO(haberman): fix this.
|
||||
@unittest.skipIf(
|
||||
api_implementation.Type() == 'cpp' and api_implementation.Version() == 2,
|
||||
'C++ implementation does not expose unknown fields to Python')
|
||||
def testEquals(self):
|
||||
message = unittest_pb2.TestEmptyMessage()
|
||||
message.ParseFromString(self.all_fields_data)
|
||||
@ -117,9 +117,6 @@ class UnknownFieldsTest(unittest.TestCase):
|
||||
self.assertNotEqual(self.empty_message, message)
|
||||
|
||||
|
||||
@unittest.skipIf(
|
||||
api_implementation.Type() == 'cpp' and api_implementation.Version() == 2,
|
||||
'C++ implementation does not expose unknown fields to Python')
|
||||
class UnknownFieldsAccessorsTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
@ -129,7 +126,14 @@ class UnknownFieldsAccessorsTest(unittest.TestCase):
|
||||
self.all_fields_data = self.all_fields.SerializeToString()
|
||||
self.empty_message = unittest_pb2.TestEmptyMessage()
|
||||
self.empty_message.ParseFromString(self.all_fields_data)
|
||||
self.unknown_fields = self.empty_message._unknown_fields
|
||||
if api_implementation.Type() != 'cpp':
|
||||
# _unknown_fields is an implementation detail.
|
||||
self.unknown_fields = self.empty_message._unknown_fields
|
||||
|
||||
# All the tests that use GetField() check an implementation detail of the
|
||||
# Python implementation, which stores unknown fields as serialized strings.
|
||||
# These tests are skipped by the C++ implementation: it's enough to check that
|
||||
# the message is correctly serialized.
|
||||
|
||||
def GetField(self, name):
|
||||
field_descriptor = self.descriptor.fields_by_name[name]
|
||||
@ -142,30 +146,37 @@ class UnknownFieldsAccessorsTest(unittest.TestCase):
|
||||
decoder(value, 0, len(value), self.all_fields, result_dict)
|
||||
return result_dict[field_descriptor]
|
||||
|
||||
@SkipIfCppImplementation
|
||||
def testEnum(self):
|
||||
value = self.GetField('optional_nested_enum')
|
||||
self.assertEqual(self.all_fields.optional_nested_enum, value)
|
||||
|
||||
@SkipIfCppImplementation
|
||||
def testRepeatedEnum(self):
|
||||
value = self.GetField('repeated_nested_enum')
|
||||
self.assertEqual(self.all_fields.repeated_nested_enum, value)
|
||||
|
||||
@SkipIfCppImplementation
|
||||
def testVarint(self):
|
||||
value = self.GetField('optional_int32')
|
||||
self.assertEqual(self.all_fields.optional_int32, value)
|
||||
|
||||
@SkipIfCppImplementation
|
||||
def testFixed32(self):
|
||||
value = self.GetField('optional_fixed32')
|
||||
self.assertEqual(self.all_fields.optional_fixed32, value)
|
||||
|
||||
@SkipIfCppImplementation
|
||||
def testFixed64(self):
|
||||
value = self.GetField('optional_fixed64')
|
||||
self.assertEqual(self.all_fields.optional_fixed64, value)
|
||||
|
||||
@SkipIfCppImplementation
|
||||
def testLengthDelimited(self):
|
||||
value = self.GetField('optional_string')
|
||||
self.assertEqual(self.all_fields.optional_string, value)
|
||||
|
||||
@SkipIfCppImplementation
|
||||
def testGroup(self):
|
||||
value = self.GetField('optionalgroup')
|
||||
self.assertEqual(self.all_fields.optionalgroup, value)
|
||||
@ -173,7 +184,7 @@ class UnknownFieldsAccessorsTest(unittest.TestCase):
|
||||
def testCopyFrom(self):
|
||||
message = unittest_pb2.TestEmptyMessage()
|
||||
message.CopyFrom(self.empty_message)
|
||||
self.assertEqual(self.unknown_fields, message._unknown_fields)
|
||||
self.assertEqual(message.SerializeToString(), self.all_fields_data)
|
||||
|
||||
def testMergeFrom(self):
|
||||
message = unittest_pb2.TestAllTypes()
|
||||
@ -187,27 +198,26 @@ class UnknownFieldsAccessorsTest(unittest.TestCase):
|
||||
message.optional_uint32 = 4
|
||||
destination = unittest_pb2.TestEmptyMessage()
|
||||
destination.ParseFromString(message.SerializeToString())
|
||||
unknown_fields = destination._unknown_fields[:]
|
||||
|
||||
destination.MergeFrom(source)
|
||||
self.assertEqual(unknown_fields + source._unknown_fields,
|
||||
destination._unknown_fields)
|
||||
# Check that the fields where correctly merged, even stored in the unknown
|
||||
# fields set.
|
||||
message.ParseFromString(destination.SerializeToString())
|
||||
self.assertEqual(message.optional_int32, 1)
|
||||
self.assertEqual(message.optional_uint32, 2)
|
||||
self.assertEqual(message.optional_int64, 3)
|
||||
|
||||
def testClear(self):
|
||||
self.empty_message.Clear()
|
||||
self.assertEqual(0, len(self.empty_message._unknown_fields))
|
||||
# All cleared, even unknown fields.
|
||||
self.assertEqual(self.empty_message.SerializeToString(), b'')
|
||||
|
||||
def testUnknownExtensions(self):
|
||||
message = unittest_pb2.TestEmptyMessageWithExtensions()
|
||||
message.ParseFromString(self.all_fields_data)
|
||||
self.assertEqual(self.empty_message._unknown_fields,
|
||||
message._unknown_fields)
|
||||
self.assertEqual(message.SerializeToString(), self.all_fields_data)
|
||||
|
||||
|
||||
|
||||
@unittest.skipIf(
|
||||
api_implementation.Type() == 'cpp' and api_implementation.Version() == 2,
|
||||
'C++ implementation does not expose unknown fields to Python')
|
||||
class UnknownEnumValuesTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
@ -227,7 +237,14 @@ class UnknownEnumValuesTest(unittest.TestCase):
|
||||
self.message_data = self.message.SerializeToString()
|
||||
self.missing_message = missing_enum_values_pb2.TestMissingEnumValues()
|
||||
self.missing_message.ParseFromString(self.message_data)
|
||||
self.unknown_fields = self.missing_message._unknown_fields
|
||||
if api_implementation.Type() != 'cpp':
|
||||
# _unknown_fields is an implementation detail.
|
||||
self.unknown_fields = self.missing_message._unknown_fields
|
||||
|
||||
# All the tests that use GetField() check an implementation detail of the
|
||||
# Python implementation, which stores unknown fields as serialized strings.
|
||||
# These tests are skipped by the C++ implementation: it's enough to check that
|
||||
# the message is correctly serialized.
|
||||
|
||||
def GetField(self, name):
|
||||
field_descriptor = self.descriptor.fields_by_name[name]
|
||||
@ -241,15 +258,18 @@ class UnknownEnumValuesTest(unittest.TestCase):
|
||||
decoder(value, 0, len(value), self.message, result_dict)
|
||||
return result_dict[field_descriptor]
|
||||
|
||||
@SkipIfCppImplementation
|
||||
def testUnknownEnumValue(self):
|
||||
self.assertFalse(self.missing_message.HasField('optional_nested_enum'))
|
||||
value = self.GetField('optional_nested_enum')
|
||||
self.assertEqual(self.message.optional_nested_enum, value)
|
||||
|
||||
@SkipIfCppImplementation
|
||||
def testUnknownRepeatedEnumValue(self):
|
||||
value = self.GetField('repeated_nested_enum')
|
||||
self.assertEqual(self.message.repeated_nested_enum, value)
|
||||
|
||||
@SkipIfCppImplementation
|
||||
def testUnknownPackedEnumValue(self):
|
||||
value = self.GetField('packed_nested_enum')
|
||||
self.assertEqual(self.message.packed_nested_enum, value)
|
||||
|
@ -37,21 +37,29 @@ Descriptor objects at runtime backed by the protocol buffer C++ API.
|
||||
__author__ = 'tibell@google.com (Johan Tibell)'
|
||||
|
||||
from google.protobuf.pyext import _message
|
||||
from google.protobuf import message
|
||||
|
||||
|
||||
def NewMessage(bases, message_descriptor, dictionary):
|
||||
"""Creates a new protocol message *class*."""
|
||||
new_bases = []
|
||||
for base in bases:
|
||||
if base is message.Message:
|
||||
# _message.Message must come before message.Message as it
|
||||
# overrides methods in that class.
|
||||
new_bases.append(_message.Message)
|
||||
new_bases.append(base)
|
||||
return tuple(new_bases)
|
||||
class GeneratedProtocolMessageType(_message.MessageMeta):
|
||||
|
||||
"""Metaclass for protocol message classes created at runtime from Descriptors.
|
||||
|
||||
def InitMessage(message_descriptor, cls):
|
||||
"""Finalizes the creation of a message class."""
|
||||
cls.AddDescriptors(message_descriptor)
|
||||
The protocol compiler currently uses this metaclass to create protocol
|
||||
message classes at runtime. Clients can also manually create their own
|
||||
classes at runtime, as in this example:
|
||||
|
||||
mydescriptor = Descriptor(.....)
|
||||
class MyProtoClass(Message):
|
||||
__metaclass__ = GeneratedProtocolMessageType
|
||||
DESCRIPTOR = mydescriptor
|
||||
myproto_instance = MyProtoClass()
|
||||
myproto.foo_field = 23
|
||||
...
|
||||
|
||||
The above example will not work for nested types. If you wish to include them,
|
||||
use reflection.MakeClass() instead of manually instantiating the class in
|
||||
order to create the appropriate class structure.
|
||||
"""
|
||||
|
||||
# Must be consistent with the protocol-compiler code in
|
||||
# proto2/compiler/internal/generator.*.
|
||||
_DESCRIPTOR_KEY = 'DESCRIPTOR'
|
||||
|
@ -62,6 +62,14 @@ namespace google {
|
||||
namespace protobuf {
|
||||
namespace python {
|
||||
|
||||
// Store interned descriptors, so that the same C++ descriptor yields the same
|
||||
// Python object. Objects are not immortal: this map does not own the
|
||||
// references, and items are deleted when the last reference to the object is
|
||||
// released.
|
||||
// This is enough to support the "is" operator on live objects.
|
||||
// All descriptors are stored here.
|
||||
hash_map<const void*, PyObject*> interned_descriptors;
|
||||
|
||||
PyObject* PyString_FromCppString(const string& str) {
|
||||
return PyString_FromStringAndSize(str.c_str(), str.size());
|
||||
}
|
||||
@ -147,6 +155,24 @@ static int CheckCalledFromGeneratedFile(const char* attr_name) {
|
||||
|
||||
// Helper functions for descriptor objects.
|
||||
|
||||
// A set of templates to retrieve the C++ FileDescriptor of any descriptor.
|
||||
template<class DescriptorClass>
|
||||
const FileDescriptor* GetFileDescriptor(const DescriptorClass* descriptor) {
|
||||
return descriptor->file();
|
||||
}
|
||||
template<>
|
||||
const FileDescriptor* GetFileDescriptor(const FileDescriptor* descriptor) {
|
||||
return descriptor;
|
||||
}
|
||||
template<>
|
||||
const FileDescriptor* GetFileDescriptor(const EnumValueDescriptor* descriptor) {
|
||||
return descriptor->type()->file();
|
||||
}
|
||||
template<>
|
||||
const FileDescriptor* GetFileDescriptor(const OneofDescriptor* descriptor) {
|
||||
return descriptor->containing_type()->file();
|
||||
}
|
||||
|
||||
// Converts options into a Python protobuf, and cache the result.
|
||||
//
|
||||
// This is a bit tricky because options can contain extension fields defined in
|
||||
@ -156,8 +182,13 @@ static int CheckCalledFromGeneratedFile(const char* attr_name) {
|
||||
// Always returns a new reference.
|
||||
template<class DescriptorClass>
|
||||
static PyObject* GetOrBuildOptions(const DescriptorClass *descriptor) {
|
||||
// Options (and their extensions) are completely resolved in the proto file
|
||||
// containing the descriptor.
|
||||
PyDescriptorPool* pool = GetDescriptorPool_FromPool(
|
||||
GetFileDescriptor(descriptor)->pool());
|
||||
|
||||
hash_map<const void*, PyObject*>* descriptor_options =
|
||||
GetDescriptorPool()->descriptor_options;
|
||||
pool->descriptor_options;
|
||||
// First search in the cache.
|
||||
if (descriptor_options->find(descriptor) != descriptor_options->end()) {
|
||||
PyObject *value = (*descriptor_options)[descriptor];
|
||||
@ -170,7 +201,7 @@ static PyObject* GetOrBuildOptions(const DescriptorClass *descriptor) {
|
||||
const Message& options(descriptor->options());
|
||||
const Descriptor *message_type = options.GetDescriptor();
|
||||
PyObject* message_class(cdescriptor_pool::GetMessageClass(
|
||||
GetDescriptorPool(), message_type));
|
||||
pool, message_type));
|
||||
if (message_class == NULL) {
|
||||
PyErr_Format(PyExc_TypeError, "Could not retrieve class for Options: %s",
|
||||
message_type->full_name().c_str());
|
||||
@ -192,8 +223,8 @@ static PyObject* GetOrBuildOptions(const DescriptorClass *descriptor) {
|
||||
options.SerializeToString(&serialized);
|
||||
io::CodedInputStream input(
|
||||
reinterpret_cast<const uint8*>(serialized.c_str()), serialized.size());
|
||||
input.SetExtensionRegistry(GetDescriptorPool()->pool,
|
||||
cmessage::GetMessageFactory());
|
||||
input.SetExtensionRegistry(pool->pool,
|
||||
GetDescriptorPool()->message_factory);
|
||||
bool success = cmsg->message->MergePartialFromCodedStream(&input);
|
||||
if (!success) {
|
||||
PyErr_Format(PyExc_ValueError, "Error parsing Options message");
|
||||
@ -203,7 +234,7 @@ static PyObject* GetOrBuildOptions(const DescriptorClass *descriptor) {
|
||||
|
||||
// Cache the result.
|
||||
Py_INCREF(value);
|
||||
(*GetDescriptorPool()->descriptor_options)[descriptor] = value.get();
|
||||
(*pool->descriptor_options)[descriptor] = value.get();
|
||||
|
||||
return value.release();
|
||||
}
|
||||
@ -237,6 +268,9 @@ typedef struct PyBaseDescriptor {
|
||||
// Pointer to the C++ proto2 descriptor.
|
||||
// Like all descriptors, it is owned by the global DescriptorPool.
|
||||
const void* descriptor;
|
||||
|
||||
// Owned reference to the DescriptorPool, to ensure it is kept alive.
|
||||
PyDescriptorPool* pool;
|
||||
} PyBaseDescriptor;
|
||||
|
||||
|
||||
@ -258,7 +292,9 @@ namespace descriptor {
|
||||
// 'was_created' is an optional pointer to a bool, and is set to true if a new
|
||||
// object was allocated.
|
||||
// Always return a new reference.
|
||||
PyObject* NewInternedDescriptor(PyTypeObject* type, const void* descriptor,
|
||||
template<class DescriptorClass>
|
||||
PyObject* NewInternedDescriptor(PyTypeObject* type,
|
||||
const DescriptorClass* descriptor,
|
||||
bool* was_created) {
|
||||
if (was_created) {
|
||||
*was_created = false;
|
||||
@ -270,8 +306,8 @@ PyObject* NewInternedDescriptor(PyTypeObject* type, const void* descriptor,
|
||||
|
||||
// See if the object is in the map of interned descriptors
|
||||
hash_map<const void*, PyObject*>::iterator it =
|
||||
GetDescriptorPool()->interned_descriptors->find(descriptor);
|
||||
if (it != GetDescriptorPool()->interned_descriptors->end()) {
|
||||
interned_descriptors.find(descriptor);
|
||||
if (it != interned_descriptors.end()) {
|
||||
GOOGLE_DCHECK(Py_TYPE(it->second) == type);
|
||||
Py_INCREF(it->second);
|
||||
return it->second;
|
||||
@ -283,10 +319,21 @@ PyObject* NewInternedDescriptor(PyTypeObject* type, const void* descriptor,
|
||||
return NULL;
|
||||
}
|
||||
py_descriptor->descriptor = descriptor;
|
||||
|
||||
// and cache it.
|
||||
GetDescriptorPool()->interned_descriptors->insert(
|
||||
interned_descriptors.insert(
|
||||
std::make_pair(descriptor, reinterpret_cast<PyObject*>(py_descriptor)));
|
||||
|
||||
// Ensures that the DescriptorPool stays alive.
|
||||
PyDescriptorPool* pool = GetDescriptorPool_FromPool(
|
||||
GetFileDescriptor(descriptor)->pool());
|
||||
if (pool == NULL) {
|
||||
Py_DECREF(py_descriptor);
|
||||
return NULL;
|
||||
}
|
||||
Py_INCREF(pool);
|
||||
py_descriptor->pool = pool;
|
||||
|
||||
if (was_created) {
|
||||
*was_created = true;
|
||||
}
|
||||
@ -295,7 +342,8 @@ PyObject* NewInternedDescriptor(PyTypeObject* type, const void* descriptor,
|
||||
|
||||
static void Dealloc(PyBaseDescriptor* self) {
|
||||
// Remove from interned dictionary
|
||||
GetDescriptorPool()->interned_descriptors->erase(self->descriptor);
|
||||
interned_descriptors.erase(self->descriptor);
|
||||
Py_CLEAR(self->pool);
|
||||
Py_TYPE(self)->tp_free(reinterpret_cast<PyObject*>(self));
|
||||
}
|
||||
|
||||
|
@ -33,6 +33,7 @@
|
||||
#include <Python.h>
|
||||
|
||||
#include <google/protobuf/descriptor.pb.h>
|
||||
#include <google/protobuf/dynamic_message.h>
|
||||
#include <google/protobuf/pyext/descriptor_pool.h>
|
||||
#include <google/protobuf/pyext/descriptor.h>
|
||||
#include <google/protobuf/pyext/message.h>
|
||||
@ -53,9 +54,13 @@ namespace google {
|
||||
namespace protobuf {
|
||||
namespace python {
|
||||
|
||||
// A map to cache Python Pools per C++ pointer.
|
||||
// Pointers are not owned here, and belong to the PyDescriptorPool.
|
||||
static hash_map<const DescriptorPool*, PyDescriptorPool*> descriptor_pool_map;
|
||||
|
||||
namespace cdescriptor_pool {
|
||||
|
||||
PyDescriptorPool* NewDescriptorPool() {
|
||||
static PyDescriptorPool* NewDescriptorPool() {
|
||||
PyDescriptorPool* cdescriptor_pool = PyObject_New(
|
||||
PyDescriptorPool, &PyDescriptorPool_Type);
|
||||
if (cdescriptor_pool == NULL) {
|
||||
@ -67,32 +72,43 @@ PyDescriptorPool* NewDescriptorPool() {
|
||||
// as underlay.
|
||||
cdescriptor_pool->pool = new DescriptorPool(DescriptorPool::generated_pool());
|
||||
|
||||
DynamicMessageFactory* message_factory = new DynamicMessageFactory();
|
||||
// This option might be the default some day.
|
||||
message_factory->SetDelegateToGeneratedFactory(true);
|
||||
cdescriptor_pool->message_factory = message_factory;
|
||||
|
||||
// TODO(amauryfa): Rewrite the SymbolDatabase in C so that it uses the same
|
||||
// storage.
|
||||
cdescriptor_pool->classes_by_descriptor =
|
||||
new PyDescriptorPool::ClassesByMessageMap();
|
||||
cdescriptor_pool->interned_descriptors =
|
||||
new hash_map<const void*, PyObject *>();
|
||||
cdescriptor_pool->descriptor_options =
|
||||
new hash_map<const void*, PyObject *>();
|
||||
|
||||
if (!descriptor_pool_map.insert(
|
||||
std::make_pair(cdescriptor_pool->pool, cdescriptor_pool)).second) {
|
||||
// Should never happen -- would indicate an internal error / bug.
|
||||
PyErr_SetString(PyExc_ValueError, "DescriptorPool already registered");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
return cdescriptor_pool;
|
||||
}
|
||||
|
||||
static void Dealloc(PyDescriptorPool* self) {
|
||||
typedef PyDescriptorPool::ClassesByMessageMap::iterator iterator;
|
||||
descriptor_pool_map.erase(self->pool);
|
||||
for (iterator it = self->classes_by_descriptor->begin();
|
||||
it != self->classes_by_descriptor->end(); ++it) {
|
||||
Py_DECREF(it->second);
|
||||
}
|
||||
delete self->classes_by_descriptor;
|
||||
delete self->interned_descriptors; // its references were borrowed.
|
||||
for (hash_map<const void*, PyObject*>::iterator it =
|
||||
self->descriptor_options->begin();
|
||||
it != self->descriptor_options->end(); ++it) {
|
||||
Py_DECREF(it->second);
|
||||
}
|
||||
delete self->descriptor_options;
|
||||
delete self->message_factory;
|
||||
Py_TYPE(self)->tp_free(reinterpret_cast<PyObject*>(self));
|
||||
}
|
||||
|
||||
@ -384,22 +400,43 @@ PyTypeObject PyDescriptorPool_Type = {
|
||||
PyObject_Del, // tp_free
|
||||
};
|
||||
|
||||
static PyDescriptorPool* global_cdescriptor_pool = NULL;
|
||||
// This is the DescriptorPool which contains all the definitions from the
|
||||
// generated _pb2.py modules.
|
||||
static PyDescriptorPool* python_generated_pool = NULL;
|
||||
|
||||
bool InitDescriptorPool() {
|
||||
if (PyType_Ready(&PyDescriptorPool_Type) < 0)
|
||||
return false;
|
||||
|
||||
global_cdescriptor_pool = cdescriptor_pool::NewDescriptorPool();
|
||||
if (global_cdescriptor_pool == NULL) {
|
||||
python_generated_pool = cdescriptor_pool::NewDescriptorPool();
|
||||
if (python_generated_pool == NULL) {
|
||||
return false;
|
||||
}
|
||||
// Register this pool to be found for C++-generated descriptors.
|
||||
descriptor_pool_map.insert(
|
||||
std::make_pair(DescriptorPool::generated_pool(),
|
||||
python_generated_pool));
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
PyDescriptorPool* GetDescriptorPool() {
|
||||
return global_cdescriptor_pool;
|
||||
return python_generated_pool;
|
||||
}
|
||||
|
||||
PyDescriptorPool* GetDescriptorPool_FromPool(const DescriptorPool* pool) {
|
||||
// Fast path for standard descriptors.
|
||||
if (pool == python_generated_pool->pool ||
|
||||
pool == DescriptorPool::generated_pool()) {
|
||||
return python_generated_pool;
|
||||
}
|
||||
hash_map<const DescriptorPool*, PyDescriptorPool*>::iterator it =
|
||||
descriptor_pool_map.find(pool);
|
||||
if (it != descriptor_pool_map.end()) {
|
||||
PyErr_SetString(PyExc_KeyError, "Unknown descriptor pool");
|
||||
return NULL;
|
||||
}
|
||||
return it->second;
|
||||
}
|
||||
|
||||
} // namespace python
|
||||
|
@ -38,6 +38,8 @@
|
||||
|
||||
namespace google {
|
||||
namespace protobuf {
|
||||
class MessageFactory;
|
||||
|
||||
namespace python {
|
||||
|
||||
// Wraps operations to the global DescriptorPool which contains information
|
||||
@ -55,6 +57,14 @@ typedef struct PyDescriptorPool {
|
||||
|
||||
DescriptorPool* pool;
|
||||
|
||||
// DynamicMessageFactory used to create C++ instances of messages.
|
||||
// This object cache the descriptors that were used, so the DescriptorPool
|
||||
// needs to get rid of it before it can delete itself.
|
||||
//
|
||||
// Note: A C++ MessageFactory is different from the Python MessageFactory.
|
||||
// The C++ one creates messages, when the Python one creates classes.
|
||||
MessageFactory* message_factory;
|
||||
|
||||
// Make our own mapping to retrieve Python classes from C++ descriptors.
|
||||
//
|
||||
// Descriptor pointers stored here are owned by the DescriptorPool above.
|
||||
@ -62,14 +72,6 @@ typedef struct PyDescriptorPool {
|
||||
typedef hash_map<const Descriptor*, PyObject*> ClassesByMessageMap;
|
||||
ClassesByMessageMap* classes_by_descriptor;
|
||||
|
||||
// Store interned descriptors, so that the same C++ descriptor yields the same
|
||||
// Python object. Objects are not immortal: this map does not own the
|
||||
// references, and items are deleted when the last reference to the object is
|
||||
// released.
|
||||
// This is enough to support the "is" operator on live objects.
|
||||
// All descriptors are stored here.
|
||||
hash_map<const void*, PyObject*>* interned_descriptors;
|
||||
|
||||
// Cache the options for any kind of descriptor.
|
||||
// Descriptor pointers are owned by the DescriptorPool above.
|
||||
// Python objects are owned by the map.
|
||||
@ -81,9 +83,6 @@ extern PyTypeObject PyDescriptorPool_Type;
|
||||
|
||||
namespace cdescriptor_pool {
|
||||
|
||||
// Builds a new DescriptorPool. Normally called only once per process.
|
||||
PyDescriptorPool* NewDescriptorPool();
|
||||
|
||||
// Looks up a message by name.
|
||||
// Returns a message Descriptor, or NULL if not found.
|
||||
const Descriptor* FindMessageTypeByName(PyDescriptorPool* self,
|
||||
@ -140,6 +139,10 @@ PyObject* FindOneofByName(PyDescriptorPool* self, PyObject* arg);
|
||||
// Returns a *borrowed* reference.
|
||||
PyDescriptorPool* GetDescriptorPool();
|
||||
|
||||
// Retrieve the python descriptor pool owning a C++ descriptor pool.
|
||||
// Returns a *borrowed* reference.
|
||||
PyDescriptorPool* GetDescriptorPool_FromPool(const DescriptorPool* pool);
|
||||
|
||||
// Initialize objects used by this module.
|
||||
bool InitDescriptorPool();
|
||||
|
||||
|
@ -33,6 +33,7 @@
|
||||
|
||||
#include <google/protobuf/pyext/extension_dict.h>
|
||||
|
||||
#include <google/protobuf/stubs/logging.h>
|
||||
#include <google/protobuf/stubs/common.h>
|
||||
#include <google/protobuf/descriptor.h>
|
||||
#include <google/protobuf/dynamic_message.h>
|
||||
@ -183,7 +184,8 @@ PyObject* ClearExtension(ExtensionDict* self, PyObject* extension) {
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
if (cmessage::ClearFieldByDescriptor(self->parent, descriptor) == NULL) {
|
||||
if (ScopedPyObjectPtr(cmessage::ClearFieldByDescriptor(
|
||||
self->parent, descriptor)) == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
if (PyDict_DelItem(self->values, extension) < 0) {
|
||||
@ -268,7 +270,7 @@ PyTypeObject ExtensionDict_Type = {
|
||||
0, // tp_as_number
|
||||
0, // tp_as_sequence
|
||||
&extension_dict::MpMethods, // tp_as_mapping
|
||||
0, // tp_hash
|
||||
PyObject_HashNotImplemented, // tp_hash
|
||||
0, // tp_call
|
||||
0, // tp_str
|
||||
0, // tp_getattro
|
||||
|
@ -49,9 +49,10 @@
|
||||
#endif
|
||||
#include <google/protobuf/descriptor.pb.h>
|
||||
#include <google/protobuf/stubs/common.h>
|
||||
#include <google/protobuf/stubs/logging.h>
|
||||
#include <google/protobuf/io/coded_stream.h>
|
||||
#include <google/protobuf/util/message_differencer.h>
|
||||
#include <google/protobuf/descriptor.h>
|
||||
#include <google/protobuf/dynamic_message.h>
|
||||
#include <google/protobuf/message.h>
|
||||
#include <google/protobuf/text_format.h>
|
||||
#include <google/protobuf/pyext/descriptor.h>
|
||||
@ -88,12 +89,308 @@ namespace google {
|
||||
namespace protobuf {
|
||||
namespace python {
|
||||
|
||||
static PyObject* kDESCRIPTOR;
|
||||
static PyObject* k_extensions_by_name;
|
||||
static PyObject* k_extensions_by_number;
|
||||
PyObject* EnumTypeWrapper_class;
|
||||
static PyObject* PythonMessage_class;
|
||||
static PyObject* kEmptyWeakref;
|
||||
|
||||
// Defines the Metaclass of all Message classes.
|
||||
// It allows us to cache some C++ pointers in the class object itself, they are
|
||||
// faster to extract than from the type's dictionary.
|
||||
|
||||
struct PyMessageMeta {
|
||||
// This is how CPython subclasses C structures: the base structure must be
|
||||
// the first member of the object.
|
||||
PyHeapTypeObject super;
|
||||
|
||||
// C++ descriptor of this message.
|
||||
const Descriptor* message_descriptor;
|
||||
// Owned reference, used to keep the pointer above alive.
|
||||
PyObject* py_message_descriptor;
|
||||
};
|
||||
|
||||
namespace message_meta {
|
||||
|
||||
static int InsertEmptyWeakref(PyTypeObject* base);
|
||||
|
||||
// Add the number of a field descriptor to the containing message class.
|
||||
// Equivalent to:
|
||||
// _cls.<field>_FIELD_NUMBER = <number>
|
||||
static bool AddFieldNumberToClass(
|
||||
PyObject* cls, const FieldDescriptor* field_descriptor) {
|
||||
string constant_name = field_descriptor->name() + "_FIELD_NUMBER";
|
||||
UpperString(&constant_name);
|
||||
ScopedPyObjectPtr attr_name(PyString_FromStringAndSize(
|
||||
constant_name.c_str(), constant_name.size()));
|
||||
if (attr_name == NULL) {
|
||||
return false;
|
||||
}
|
||||
ScopedPyObjectPtr number(PyInt_FromLong(field_descriptor->number()));
|
||||
if (number == NULL) {
|
||||
return false;
|
||||
}
|
||||
if (PyObject_SetAttr(cls, attr_name, number) == -1) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
// Finalize the creation of the Message class.
|
||||
// Called from its metaclass: GeneratedProtocolMessageType.__init__().
|
||||
static int AddDescriptors(PyObject* cls, PyObject* descriptor) {
|
||||
const Descriptor* message_descriptor =
|
||||
cdescriptor_pool::RegisterMessageClass(
|
||||
GetDescriptorPool(), cls, descriptor);
|
||||
if (message_descriptor == NULL) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
// If there are extension_ranges, the message is "extendable", and extension
|
||||
// classes will register themselves in this class.
|
||||
if (message_descriptor->extension_range_count() > 0) {
|
||||
ScopedPyObjectPtr by_name(PyDict_New());
|
||||
if (PyObject_SetAttr(cls, k_extensions_by_name, by_name) < 0) {
|
||||
return -1;
|
||||
}
|
||||
ScopedPyObjectPtr by_number(PyDict_New());
|
||||
if (PyObject_SetAttr(cls, k_extensions_by_number, by_number) < 0) {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
// For each field set: cls.<field>_FIELD_NUMBER = <number>
|
||||
for (int i = 0; i < message_descriptor->field_count(); ++i) {
|
||||
if (!AddFieldNumberToClass(cls, message_descriptor->field(i))) {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
// For each enum set cls.<enum name> = EnumTypeWrapper(<enum descriptor>).
|
||||
//
|
||||
// The enum descriptor we get from
|
||||
// <messagedescriptor>.enum_types_by_name[name]
|
||||
// which was built previously.
|
||||
for (int i = 0; i < message_descriptor->enum_type_count(); ++i) {
|
||||
const EnumDescriptor* enum_descriptor = message_descriptor->enum_type(i);
|
||||
ScopedPyObjectPtr enum_type(
|
||||
PyEnumDescriptor_FromDescriptor(enum_descriptor));
|
||||
if (enum_type == NULL) {
|
||||
return -1;
|
||||
}
|
||||
// Add wrapped enum type to message class.
|
||||
ScopedPyObjectPtr wrapped(PyObject_CallFunctionObjArgs(
|
||||
EnumTypeWrapper_class, enum_type.get(), NULL));
|
||||
if (wrapped == NULL) {
|
||||
return -1;
|
||||
}
|
||||
if (PyObject_SetAttrString(
|
||||
cls, enum_descriptor->name().c_str(), wrapped) == -1) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
// For each enum value add cls.<name> = <number>
|
||||
for (int j = 0; j < enum_descriptor->value_count(); ++j) {
|
||||
const EnumValueDescriptor* enum_value_descriptor =
|
||||
enum_descriptor->value(j);
|
||||
ScopedPyObjectPtr value_number(PyInt_FromLong(
|
||||
enum_value_descriptor->number()));
|
||||
if (value_number == NULL) {
|
||||
return -1;
|
||||
}
|
||||
if (PyObject_SetAttrString(
|
||||
cls, enum_value_descriptor->name().c_str(), value_number) == -1) {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// For each extension set cls.<extension name> = <extension descriptor>.
|
||||
//
|
||||
// Extension descriptors come from
|
||||
// <message descriptor>.extensions_by_name[name]
|
||||
// which was defined previously.
|
||||
for (int i = 0; i < message_descriptor->extension_count(); ++i) {
|
||||
const google::protobuf::FieldDescriptor* field = message_descriptor->extension(i);
|
||||
ScopedPyObjectPtr extension_field(PyFieldDescriptor_FromDescriptor(field));
|
||||
if (extension_field == NULL) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
// Add the extension field to the message class.
|
||||
if (PyObject_SetAttrString(
|
||||
cls, field->name().c_str(), extension_field) == -1) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
// For each extension set cls.<extension name>_FIELD_NUMBER = <number>.
|
||||
if (!AddFieldNumberToClass(cls, field)) {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static PyObject* New(PyTypeObject* type,
|
||||
PyObject* args, PyObject* kwargs) {
|
||||
static char *kwlist[] = {"name", "bases", "dict", 0};
|
||||
PyObject *bases, *dict;
|
||||
const char* name;
|
||||
|
||||
// Check arguments: (name, bases, dict)
|
||||
if (!PyArg_ParseTupleAndKeywords(args, kwargs, "sO!O!:type", kwlist,
|
||||
&name,
|
||||
&PyTuple_Type, &bases,
|
||||
&PyDict_Type, &dict)) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
// Check bases: only (), or (message.Message,) are allowed
|
||||
if (!(PyTuple_GET_SIZE(bases) == 0 ||
|
||||
(PyTuple_GET_SIZE(bases) == 1 &&
|
||||
PyTuple_GET_ITEM(bases, 0) == PythonMessage_class))) {
|
||||
PyErr_SetString(PyExc_TypeError,
|
||||
"A Message class can only inherit from Message");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
// Check dict['DESCRIPTOR']
|
||||
PyObject* descriptor = PyDict_GetItem(dict, kDESCRIPTOR);
|
||||
if (descriptor == NULL) {
|
||||
PyErr_SetString(PyExc_TypeError, "Message class has no DESCRIPTOR");
|
||||
return NULL;
|
||||
}
|
||||
if (!PyObject_TypeCheck(descriptor, &PyMessageDescriptor_Type)) {
|
||||
PyErr_Format(PyExc_TypeError, "Expected a message Descriptor, got %s",
|
||||
descriptor->ob_type->tp_name);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
// Build the arguments to the base metaclass.
|
||||
// We change the __bases__ classes.
|
||||
ScopedPyObjectPtr new_args(Py_BuildValue(
|
||||
"s(OO)O", name, &CMessage_Type, PythonMessage_class, dict));
|
||||
if (new_args == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
// Call the base metaclass.
|
||||
ScopedPyObjectPtr result(PyType_Type.tp_new(type, new_args, NULL));
|
||||
if (result == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
PyMessageMeta* newtype = reinterpret_cast<PyMessageMeta*>(result.get());
|
||||
|
||||
// Insert the empty weakref into the base classes.
|
||||
if (InsertEmptyWeakref(
|
||||
reinterpret_cast<PyTypeObject*>(PythonMessage_class)) < 0 ||
|
||||
InsertEmptyWeakref(&CMessage_Type) < 0) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
// Cache the descriptor, both as Python object and as C++ pointer.
|
||||
const Descriptor* message_descriptor =
|
||||
PyMessageDescriptor_AsDescriptor(descriptor);
|
||||
if (message_descriptor == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
Py_INCREF(descriptor);
|
||||
newtype->py_message_descriptor = descriptor;
|
||||
newtype->message_descriptor = message_descriptor;
|
||||
|
||||
// Continue with type initialization: add other descriptors, enum values...
|
||||
if (AddDescriptors(result, descriptor) < 0) {
|
||||
return NULL;
|
||||
}
|
||||
return result.release();
|
||||
}
|
||||
|
||||
static void Dealloc(PyMessageMeta *self) {
|
||||
Py_DECREF(self->py_message_descriptor);
|
||||
Py_TYPE(self)->tp_free(reinterpret_cast<PyObject*>(self));
|
||||
}
|
||||
|
||||
static PyObject* GetDescriptor(PyMessageMeta *self, void *closure) {
|
||||
Py_INCREF(self->py_message_descriptor);
|
||||
return self->py_message_descriptor;
|
||||
}
|
||||
|
||||
|
||||
// This function inserts and empty weakref at the end of the list of
|
||||
// subclasses for the main protocol buffer Message class.
|
||||
//
|
||||
// This eliminates a O(n^2) behaviour in the internal add_subclass
|
||||
// routine.
|
||||
static int InsertEmptyWeakref(PyTypeObject *base_type) {
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
// Python 3.4 has already included the fix for the issue that this
|
||||
// hack addresses. For further background and the fix please see
|
||||
// https://bugs.python.org/issue17936.
|
||||
return 0;
|
||||
#else
|
||||
PyObject *subclasses = base_type->tp_subclasses;
|
||||
if (subclasses && PyList_CheckExact(subclasses)) {
|
||||
return PyList_Append(subclasses, kEmptyWeakref);
|
||||
}
|
||||
return 0;
|
||||
#endif // PY_MAJOR_VERSION >= 3
|
||||
}
|
||||
|
||||
} // namespace message_meta
|
||||
|
||||
PyTypeObject PyMessageMeta_Type {
|
||||
PyVarObject_HEAD_INIT(&PyType_Type, 0)
|
||||
FULL_MODULE_NAME ".MessageMeta", // tp_name
|
||||
sizeof(PyMessageMeta), // tp_basicsize
|
||||
0, // tp_itemsize
|
||||
(destructor)message_meta::Dealloc, // tp_dealloc
|
||||
0, // tp_print
|
||||
0, // tp_getattr
|
||||
0, // tp_setattr
|
||||
0, // tp_compare
|
||||
0, // tp_repr
|
||||
0, // tp_as_number
|
||||
0, // tp_as_sequence
|
||||
0, // tp_as_mapping
|
||||
0, // tp_hash
|
||||
0, // tp_call
|
||||
0, // tp_str
|
||||
0, // tp_getattro
|
||||
0, // tp_setattro
|
||||
0, // tp_as_buffer
|
||||
Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, // tp_flags
|
||||
"The metaclass of ProtocolMessages", // tp_doc
|
||||
0, // tp_traverse
|
||||
0, // tp_clear
|
||||
0, // tp_richcompare
|
||||
0, // tp_weaklistoffset
|
||||
0, // tp_iter
|
||||
0, // tp_iternext
|
||||
0, // tp_methods
|
||||
0, // tp_members
|
||||
0, // tp_getset
|
||||
0, // tp_base
|
||||
0, // tp_dict
|
||||
0, // tp_descr_get
|
||||
0, // tp_descr_set
|
||||
0, // tp_dictoffset
|
||||
0, // tp_init
|
||||
0, // tp_alloc
|
||||
message_meta::New, // tp_new
|
||||
};
|
||||
|
||||
static const Descriptor* GetMessageDescriptor(PyTypeObject* cls) {
|
||||
if (!PyObject_TypeCheck(cls, &PyMessageMeta_Type)) {
|
||||
PyErr_Format(PyExc_TypeError, "Class %s is not a Message", cls->tp_name);
|
||||
return NULL;
|
||||
}
|
||||
return reinterpret_cast<PyMessageMeta*>(cls)->message_descriptor;
|
||||
}
|
||||
|
||||
// Forward declarations
|
||||
namespace cmessage {
|
||||
static const FieldDescriptor* GetFieldDescriptor(
|
||||
CMessage* self, PyObject* name);
|
||||
static const Descriptor* GetMessageDescriptor(PyTypeObject* cls);
|
||||
static string GetMessageName(CMessage* self);
|
||||
int InternalReleaseFieldByDescriptor(
|
||||
CMessage* self,
|
||||
const FieldDescriptor* field_descriptor,
|
||||
@ -180,7 +477,7 @@ int ForEachCompositeField(CMessage* self, Visitor visitor) {
|
||||
if (self->composite_fields) {
|
||||
// Never use self->message in this function, it may be already freed.
|
||||
const Descriptor* message_descriptor =
|
||||
cmessage::GetMessageDescriptor(Py_TYPE(self));
|
||||
GetMessageDescriptor(Py_TYPE(self));
|
||||
while (PyDict_Next(self->composite_fields, &pos, &key, &field)) {
|
||||
Py_ssize_t key_str_size;
|
||||
char *key_str_data;
|
||||
@ -213,8 +510,6 @@ int ForEachCompositeField(CMessage* self, Visitor visitor) {
|
||||
|
||||
// ---------------------------------------------------------------------
|
||||
|
||||
static DynamicMessageFactory* message_factory;
|
||||
|
||||
// Constants used for integer type range checking.
|
||||
PyObject* kPythonZero;
|
||||
PyObject* kint32min_py;
|
||||
@ -224,17 +519,13 @@ PyObject* kint64min_py;
|
||||
PyObject* kint64max_py;
|
||||
PyObject* kuint64max_py;
|
||||
|
||||
PyObject* EnumTypeWrapper_class;
|
||||
PyObject* EncodeError_class;
|
||||
PyObject* DecodeError_class;
|
||||
PyObject* PickleError_class;
|
||||
|
||||
// Constant PyString values used for GetAttr/GetItem.
|
||||
static PyObject* kDESCRIPTOR;
|
||||
static PyObject* k_cdescriptor;
|
||||
static PyObject* kfull_name;
|
||||
static PyObject* k_extensions_by_name;
|
||||
static PyObject* k_extensions_by_number;
|
||||
|
||||
/* Is 64bit */
|
||||
void FormatTypeError(PyObject* arg, char* expected_types) {
|
||||
@ -432,10 +723,6 @@ bool CheckFieldBelongsToMessage(const FieldDescriptor* field_descriptor,
|
||||
|
||||
namespace cmessage {
|
||||
|
||||
DynamicMessageFactory* GetMessageFactory() {
|
||||
return message_factory;
|
||||
}
|
||||
|
||||
static int MaybeReleaseOverlappingOneofField(
|
||||
CMessage* cmessage,
|
||||
const FieldDescriptor* field) {
|
||||
@ -486,7 +773,7 @@ static Message* GetMutableMessage(
|
||||
return NULL;
|
||||
}
|
||||
return reflection->MutableMessage(
|
||||
parent_message, parent_field, message_factory);
|
||||
parent_message, parent_field, GetDescriptorPool()->message_factory);
|
||||
}
|
||||
|
||||
struct FixupMessageReference : public ChildVisitor {
|
||||
@ -527,8 +814,9 @@ int AssureWritable(CMessage* self) {
|
||||
// If parent is NULL but we are trying to modify a read-only message, this
|
||||
// is a reference to a constant default instance that needs to be replaced
|
||||
// with a mutable top-level message.
|
||||
const Message* prototype = message_factory->GetPrototype(
|
||||
self->message->GetDescriptor());
|
||||
const Message* prototype =
|
||||
GetDescriptorPool()->message_factory->GetPrototype(
|
||||
self->message->GetDescriptor());
|
||||
self->message = prototype->New();
|
||||
self->owner.reset(self->message);
|
||||
// Cascade the new owner to eventual children: even if this message is
|
||||
@ -567,23 +855,6 @@ int AssureWritable(CMessage* self) {
|
||||
|
||||
// --- Globals:
|
||||
|
||||
// Retrieve the C++ Descriptor of a message class.
|
||||
// On error, returns NULL with an exception set.
|
||||
static const Descriptor* GetMessageDescriptor(PyTypeObject* cls) {
|
||||
ScopedPyObjectPtr descriptor(PyObject_GetAttr(
|
||||
reinterpret_cast<PyObject*>(cls), kDESCRIPTOR));
|
||||
if (descriptor == NULL) {
|
||||
PyErr_SetString(PyExc_TypeError, "Message class has no DESCRIPTOR");
|
||||
return NULL;
|
||||
}
|
||||
if (!PyObject_TypeCheck(descriptor, &PyMessageDescriptor_Type)) {
|
||||
PyErr_Format(PyExc_TypeError, "Expected a message Descriptor, got %s",
|
||||
descriptor->ob_type->tp_name);
|
||||
return NULL;
|
||||
}
|
||||
return PyMessageDescriptor_AsDescriptor(descriptor);
|
||||
}
|
||||
|
||||
// Retrieve a C++ FieldDescriptor for a message attribute.
|
||||
// The C++ message must be valid.
|
||||
// TODO(amauryfa): This function should stay internal, because exception
|
||||
@ -846,9 +1117,9 @@ int InitAttributes(CMessage* self, PyObject* kwargs) {
|
||||
return -1;
|
||||
}
|
||||
} else {
|
||||
if (repeated_scalar_container::Extend(
|
||||
if (ScopedPyObjectPtr(repeated_scalar_container::Extend(
|
||||
reinterpret_cast<RepeatedScalarContainer*>(container.get()),
|
||||
value) ==
|
||||
value)) ==
|
||||
NULL) {
|
||||
return -1;
|
||||
}
|
||||
@ -927,7 +1198,7 @@ static PyObject* New(PyTypeObject* type,
|
||||
return NULL;
|
||||
}
|
||||
const Message* default_message =
|
||||
message_factory->GetPrototype(message_descriptor);
|
||||
GetDescriptorPool()->message_factory->GetPrototype(message_descriptor);
|
||||
if (default_message == NULL) {
|
||||
PyErr_SetString(PyExc_TypeError, message_descriptor->full_name().c_str());
|
||||
return NULL;
|
||||
@ -1257,6 +1528,7 @@ int SetOwner(CMessage* self, const shared_ptr<Message>& new_owner) {
|
||||
Message* ReleaseMessage(CMessage* self,
|
||||
const Descriptor* descriptor,
|
||||
const FieldDescriptor* field_descriptor) {
|
||||
MessageFactory* message_factory = GetDescriptorPool()->message_factory;
|
||||
Message* released_message = self->message->GetReflection()->ReleaseMessage(
|
||||
self->message, field_descriptor, message_factory);
|
||||
// ReleaseMessage will return NULL which differs from
|
||||
@ -1492,34 +1764,35 @@ static PyObject* SerializePartialToString(CMessage* self) {
|
||||
// appropriate.
|
||||
class PythonFieldValuePrinter : public TextFormat::FieldValuePrinter {
|
||||
public:
|
||||
PythonFieldValuePrinter() : float_holder_(PyFloat_FromDouble(0)) {}
|
||||
|
||||
// Python has some differences from C++ when printing floating point numbers.
|
||||
//
|
||||
// 1) Trailing .0 is always printed.
|
||||
// 2) Outputted is rounded to 12 digits.
|
||||
// 2) (Python2) Output is rounded to 12 digits.
|
||||
// 3) (Python3) The full precision of the double is preserved (and Python uses
|
||||
// David M. Gay's dtoa(), when the C++ code uses SimpleDtoa. There are some
|
||||
// differences, but they rarely happen)
|
||||
//
|
||||
// We override floating point printing with the C-API function for printing
|
||||
// Python floats to ensure consistency.
|
||||
string PrintFloat(float value) const { return PrintDouble(value); }
|
||||
string PrintDouble(double value) const {
|
||||
reinterpret_cast<PyFloatObject*>(float_holder_.get())->ob_fval = value;
|
||||
ScopedPyObjectPtr s(PyObject_Str(float_holder_.get()));
|
||||
if (s == NULL) return string();
|
||||
// Same as float.__str__()
|
||||
char* buf = PyOS_double_to_string(
|
||||
value,
|
||||
#if PY_MAJOR_VERSION < 3
|
||||
char *cstr = PyBytes_AS_STRING(static_cast<PyObject*>(s));
|
||||
'g', PyFloat_STR_PRECISION, // Output is rounded to 12 digits.
|
||||
#else
|
||||
char *cstr = PyUnicode_AsUTF8(s);
|
||||
'r', 0,
|
||||
#endif
|
||||
return string(cstr);
|
||||
Py_DTSF_ADD_DOT_0, // Trailing .0 is always printed.
|
||||
NULL);
|
||||
if (!buf) {
|
||||
return string();
|
||||
}
|
||||
string result(buf);
|
||||
PyMem_Free(buf);
|
||||
return result;
|
||||
}
|
||||
|
||||
private:
|
||||
// Holder for a python float object which we use to allow us to use
|
||||
// the Python API for printing doubles. We initialize once and then
|
||||
// directly modify it for every float printed to save on allocations
|
||||
// and refcounting.
|
||||
ScopedPyObjectPtr float_holder_;
|
||||
};
|
||||
|
||||
static PyObject* ToStr(CMessage* self) {
|
||||
@ -1590,7 +1863,7 @@ static PyObject* CopyFrom(CMessage* self, PyObject* arg) {
|
||||
|
||||
// CopyFrom on the message will not clean up self->composite_fields,
|
||||
// which can leave us in an inconsistent state, so clear it out here.
|
||||
Clear(self);
|
||||
(void)ScopedPyObjectPtr(Clear(self));
|
||||
|
||||
self->message->CopyFrom(*other_message->message);
|
||||
|
||||
@ -1607,7 +1880,8 @@ static PyObject* MergeFromString(CMessage* self, PyObject* arg) {
|
||||
AssureWritable(self);
|
||||
io::CodedInputStream input(
|
||||
reinterpret_cast<const uint8*>(data), data_length);
|
||||
input.SetExtensionRegistry(GetDescriptorPool()->pool, message_factory);
|
||||
input.SetExtensionRegistry(GetDescriptorPool()->pool,
|
||||
GetDescriptorPool()->message_factory);
|
||||
bool success = self->message->MergePartialFromCodedStream(&input);
|
||||
if (success) {
|
||||
return PyInt_FromLong(input.CurrentPosition());
|
||||
@ -1618,7 +1892,7 @@ static PyObject* MergeFromString(CMessage* self, PyObject* arg) {
|
||||
}
|
||||
|
||||
static PyObject* ParseFromString(CMessage* self, PyObject* arg) {
|
||||
if (Clear(self) == NULL) {
|
||||
if (ScopedPyObjectPtr(Clear(self)) == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
return MergeFromString(self, arg);
|
||||
@ -1790,6 +2064,7 @@ static PyObject* ListFields(CMessage* self) {
|
||||
// Steals reference to 'extension'
|
||||
PyTuple_SET_ITEM(t.get(), 1, extension);
|
||||
} else {
|
||||
// Normal field
|
||||
const string& field_name = fields[i]->name();
|
||||
ScopedPyObjectPtr py_field_name(PyString_FromStringAndSize(
|
||||
field_name.c_str(), field_name.length()));
|
||||
@ -1841,28 +2116,34 @@ PyObject* FindInitializationErrors(CMessage* self) {
|
||||
}
|
||||
|
||||
static PyObject* RichCompare(CMessage* self, PyObject* other, int opid) {
|
||||
if (!PyObject_TypeCheck(other, &CMessage_Type)) {
|
||||
if (opid == Py_EQ) {
|
||||
Py_RETURN_FALSE;
|
||||
} else if (opid == Py_NE) {
|
||||
Py_RETURN_TRUE;
|
||||
}
|
||||
}
|
||||
if (opid == Py_EQ || opid == Py_NE) {
|
||||
ScopedPyObjectPtr self_fields(ListFields(self));
|
||||
if (!self_fields) {
|
||||
return NULL;
|
||||
}
|
||||
ScopedPyObjectPtr other_fields(ListFields(
|
||||
reinterpret_cast<CMessage*>(other)));
|
||||
if (!other_fields) {
|
||||
return NULL;
|
||||
}
|
||||
return PyObject_RichCompare(self_fields, other_fields, opid);
|
||||
} else {
|
||||
// Only equality comparisons are implemented.
|
||||
if (opid != Py_EQ && opid != Py_NE) {
|
||||
Py_INCREF(Py_NotImplemented);
|
||||
return Py_NotImplemented;
|
||||
}
|
||||
bool equals = true;
|
||||
// If other is not a message, it cannot be equal.
|
||||
if (!PyObject_TypeCheck(other, &CMessage_Type)) {
|
||||
equals = false;
|
||||
}
|
||||
const google::protobuf::Message* other_message =
|
||||
reinterpret_cast<CMessage*>(other)->message;
|
||||
// If messages don't have the same descriptors, they are not equal.
|
||||
if (equals &&
|
||||
self->message->GetDescriptor() != other_message->GetDescriptor()) {
|
||||
equals = false;
|
||||
}
|
||||
// Check the message contents.
|
||||
if (equals && !google::protobuf::util::MessageDifferencer::Equals(
|
||||
*self->message,
|
||||
*reinterpret_cast<CMessage*>(other)->message)) {
|
||||
equals = false;
|
||||
}
|
||||
if (equals ^ (opid == Py_EQ)) {
|
||||
Py_RETURN_FALSE;
|
||||
} else {
|
||||
Py_RETURN_TRUE;
|
||||
}
|
||||
}
|
||||
|
||||
PyObject* InternalGetScalar(const Message* message,
|
||||
@ -1950,7 +2231,7 @@ PyObject* InternalGetSubMessage(
|
||||
CMessage* self, const FieldDescriptor* field_descriptor) {
|
||||
const Reflection* reflection = self->message->GetReflection();
|
||||
const Message& sub_message = reflection->GetMessage(
|
||||
*self->message, field_descriptor, message_factory);
|
||||
*self->message, field_descriptor, GetDescriptorPool()->message_factory);
|
||||
|
||||
PyObject *message_class = cdescriptor_pool::GetMessageClass(
|
||||
GetDescriptorPool(), field_descriptor->message_type());
|
||||
@ -2085,125 +2366,6 @@ PyObject* FromString(PyTypeObject* cls, PyObject* serialized) {
|
||||
return py_cmsg;
|
||||
}
|
||||
|
||||
// Add the number of a field descriptor to the containing message class.
|
||||
// Equivalent to:
|
||||
// _cls.<field>_FIELD_NUMBER = <number>
|
||||
static bool AddFieldNumberToClass(
|
||||
PyObject* cls, const FieldDescriptor* field_descriptor) {
|
||||
string constant_name = field_descriptor->name() + "_FIELD_NUMBER";
|
||||
UpperString(&constant_name);
|
||||
ScopedPyObjectPtr attr_name(PyString_FromStringAndSize(
|
||||
constant_name.c_str(), constant_name.size()));
|
||||
if (attr_name == NULL) {
|
||||
return false;
|
||||
}
|
||||
ScopedPyObjectPtr number(PyInt_FromLong(field_descriptor->number()));
|
||||
if (number == NULL) {
|
||||
return false;
|
||||
}
|
||||
if (PyObject_SetAttr(cls, attr_name, number) == -1) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
// Finalize the creation of the Message class.
|
||||
// Called from its metaclass: GeneratedProtocolMessageType.__init__().
|
||||
static PyObject* AddDescriptors(PyObject* cls, PyObject* descriptor) {
|
||||
const Descriptor* message_descriptor =
|
||||
cdescriptor_pool::RegisterMessageClass(
|
||||
GetDescriptorPool(), cls, descriptor);
|
||||
if (message_descriptor == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
// If there are extension_ranges, the message is "extendable", and extension
|
||||
// classes will register themselves in this class.
|
||||
if (message_descriptor->extension_range_count() > 0) {
|
||||
ScopedPyObjectPtr by_name(PyDict_New());
|
||||
if (PyObject_SetAttr(cls, k_extensions_by_name, by_name) < 0) {
|
||||
return NULL;
|
||||
}
|
||||
ScopedPyObjectPtr by_number(PyDict_New());
|
||||
if (PyObject_SetAttr(cls, k_extensions_by_number, by_number) < 0) {
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
|
||||
// For each field set: cls.<field>_FIELD_NUMBER = <number>
|
||||
for (int i = 0; i < message_descriptor->field_count(); ++i) {
|
||||
if (!AddFieldNumberToClass(cls, message_descriptor->field(i))) {
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
|
||||
// For each enum set cls.<enum name> = EnumTypeWrapper(<enum descriptor>).
|
||||
//
|
||||
// The enum descriptor we get from
|
||||
// <messagedescriptor>.enum_types_by_name[name]
|
||||
// which was built previously.
|
||||
for (int i = 0; i < message_descriptor->enum_type_count(); ++i) {
|
||||
const EnumDescriptor* enum_descriptor = message_descriptor->enum_type(i);
|
||||
ScopedPyObjectPtr enum_type(
|
||||
PyEnumDescriptor_FromDescriptor(enum_descriptor));
|
||||
if (enum_type == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
// Add wrapped enum type to message class.
|
||||
ScopedPyObjectPtr wrapped(PyObject_CallFunctionObjArgs(
|
||||
EnumTypeWrapper_class, enum_type.get(), NULL));
|
||||
if (wrapped == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
if (PyObject_SetAttrString(
|
||||
cls, enum_descriptor->name().c_str(), wrapped) == -1) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
// For each enum value add cls.<name> = <number>
|
||||
for (int j = 0; j < enum_descriptor->value_count(); ++j) {
|
||||
const EnumValueDescriptor* enum_value_descriptor =
|
||||
enum_descriptor->value(j);
|
||||
ScopedPyObjectPtr value_number(PyInt_FromLong(
|
||||
enum_value_descriptor->number()));
|
||||
if (value_number == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
if (PyObject_SetAttrString(
|
||||
cls, enum_value_descriptor->name().c_str(), value_number) == -1) {
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// For each extension set cls.<extension name> = <extension descriptor>.
|
||||
//
|
||||
// Extension descriptors come from
|
||||
// <message descriptor>.extensions_by_name[name]
|
||||
// which was defined previously.
|
||||
for (int i = 0; i < message_descriptor->extension_count(); ++i) {
|
||||
const google::protobuf::FieldDescriptor* field = message_descriptor->extension(i);
|
||||
ScopedPyObjectPtr extension_field(PyFieldDescriptor_FromDescriptor(field));
|
||||
if (extension_field == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
// Add the extension field to the message class.
|
||||
if (PyObject_SetAttrString(
|
||||
cls, field->name().c_str(), extension_field) == -1) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
// For each extension set cls.<extension name>_FIELD_NUMBER = <number>.
|
||||
if (!AddFieldNumberToClass(cls, field)) {
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
|
||||
Py_RETURN_NONE;
|
||||
}
|
||||
|
||||
PyObject* DeepCopy(CMessage* self, PyObject* arg) {
|
||||
PyObject* clone = PyObject_CallObject(
|
||||
reinterpret_cast<PyObject*>(Py_TYPE(self)), NULL);
|
||||
@ -2214,8 +2376,9 @@ PyObject* DeepCopy(CMessage* self, PyObject* arg) {
|
||||
Py_DECREF(clone);
|
||||
return NULL;
|
||||
}
|
||||
if (MergeFrom(reinterpret_cast<CMessage*>(clone),
|
||||
reinterpret_cast<PyObject*>(self)) == NULL) {
|
||||
if (ScopedPyObjectPtr(MergeFrom(
|
||||
reinterpret_cast<CMessage*>(clone),
|
||||
reinterpret_cast<PyObject*>(self))) == NULL) {
|
||||
Py_DECREF(clone);
|
||||
return NULL;
|
||||
}
|
||||
@ -2281,7 +2444,7 @@ PyObject* SetState(CMessage* self, PyObject* state) {
|
||||
if (serialized == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
if (ParseFromString(self, serialized) == NULL) {
|
||||
if (ScopedPyObjectPtr(ParseFromString(self, serialized)) == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
Py_RETURN_NONE;
|
||||
@ -2314,8 +2477,6 @@ static PyMethodDef Methods[] = {
|
||||
"Inputs picklable representation of the message." },
|
||||
{ "__unicode__", (PyCFunction)ToUnicode, METH_NOARGS,
|
||||
"Outputs a unicode representation of the message." },
|
||||
{ "AddDescriptors", (PyCFunction)AddDescriptors, METH_O | METH_CLASS,
|
||||
"Adds field descriptors to the class" },
|
||||
{ "ByteSize", (PyCFunction)ByteSize, METH_NOARGS,
|
||||
"Returns the size of the message in bytes." },
|
||||
{ "Clear", (PyCFunction)Clear, METH_NOARGS,
|
||||
@ -2441,6 +2602,9 @@ PyObject* GetAttr(CMessage* self, PyObject* name) {
|
||||
|
||||
if (field_descriptor->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE) {
|
||||
PyObject* sub_message = InternalGetSubMessage(self, field_descriptor);
|
||||
if (sub_message == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
if (!SetCompositeField(self, name, sub_message)) {
|
||||
Py_DECREF(sub_message);
|
||||
return NULL;
|
||||
@ -2484,7 +2648,7 @@ int SetAttr(CMessage* self, PyObject* name, PyObject* value) {
|
||||
} // namespace cmessage
|
||||
|
||||
PyTypeObject CMessage_Type = {
|
||||
PyVarObject_HEAD_INIT(&PyType_Type, 0)
|
||||
PyVarObject_HEAD_INIT(&PyMessageMeta_Type, 0)
|
||||
FULL_MODULE_NAME ".CMessage", // tp_name
|
||||
sizeof(CMessage), // tp_basicsize
|
||||
0, // tp_itemsize
|
||||
@ -2497,7 +2661,7 @@ PyTypeObject CMessage_Type = {
|
||||
0, // tp_as_number
|
||||
0, // tp_as_sequence
|
||||
0, // tp_as_mapping
|
||||
0, // tp_hash
|
||||
PyObject_HashNotImplemented, // tp_hash
|
||||
0, // tp_call
|
||||
(reprfunc)cmessage::ToStr, // tp_str
|
||||
(getattrofunc)cmessage::GetAttr, // tp_getattro
|
||||
@ -2580,8 +2744,9 @@ void InitGlobals() {
|
||||
k_extensions_by_name = PyString_FromString("_extensions_by_name");
|
||||
k_extensions_by_number = PyString_FromString("_extensions_by_number");
|
||||
|
||||
message_factory = new DynamicMessageFactory();
|
||||
message_factory->SetDelegateToGeneratedFactory(true);
|
||||
PyObject *dummy_obj = PySet_New(NULL);
|
||||
kEmptyWeakref = PyWeakref_NewRef(dummy_obj, NULL);
|
||||
Py_DECREF(dummy_obj);
|
||||
}
|
||||
|
||||
bool InitProto2MessageModule(PyObject *m) {
|
||||
@ -2598,7 +2763,13 @@ bool InitProto2MessageModule(PyObject *m) {
|
||||
// Initialize constants defined in this file.
|
||||
InitGlobals();
|
||||
|
||||
CMessage_Type.tp_hash = PyObject_HashNotImplemented;
|
||||
PyMessageMeta_Type.tp_base = &PyType_Type;
|
||||
if (PyType_Ready(&PyMessageMeta_Type) < 0) {
|
||||
return false;
|
||||
}
|
||||
PyModule_AddObject(m, "MessageMeta",
|
||||
reinterpret_cast<PyObject*>(&PyMessageMeta_Type));
|
||||
|
||||
if (PyType_Ready(&CMessage_Type) < 0) {
|
||||
return false;
|
||||
}
|
||||
@ -2628,86 +2799,106 @@ bool InitProto2MessageModule(PyObject *m) {
|
||||
|
||||
PyModule_AddObject(m, "Message", reinterpret_cast<PyObject*>(&CMessage_Type));
|
||||
|
||||
RepeatedScalarContainer_Type.tp_hash =
|
||||
PyObject_HashNotImplemented;
|
||||
if (PyType_Ready(&RepeatedScalarContainer_Type) < 0) {
|
||||
return false;
|
||||
// Initialize Repeated container types.
|
||||
{
|
||||
if (PyType_Ready(&RepeatedScalarContainer_Type) < 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
PyModule_AddObject(m, "RepeatedScalarContainer",
|
||||
reinterpret_cast<PyObject*>(
|
||||
&RepeatedScalarContainer_Type));
|
||||
|
||||
if (PyType_Ready(&RepeatedCompositeContainer_Type) < 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
PyModule_AddObject(
|
||||
m, "RepeatedCompositeContainer",
|
||||
reinterpret_cast<PyObject*>(
|
||||
&RepeatedCompositeContainer_Type));
|
||||
|
||||
// Register them as collections.Sequence
|
||||
ScopedPyObjectPtr collections(PyImport_ImportModule("collections"));
|
||||
if (collections == NULL) {
|
||||
return false;
|
||||
}
|
||||
ScopedPyObjectPtr mutable_sequence(PyObject_GetAttrString(
|
||||
collections, "MutableSequence"));
|
||||
if (mutable_sequence == NULL) {
|
||||
return false;
|
||||
}
|
||||
if (ScopedPyObjectPtr(PyObject_CallMethod(mutable_sequence, "register", "O",
|
||||
&RepeatedScalarContainer_Type))
|
||||
== NULL) {
|
||||
return false;
|
||||
}
|
||||
if (ScopedPyObjectPtr(PyObject_CallMethod(mutable_sequence, "register", "O",
|
||||
&RepeatedCompositeContainer_Type))
|
||||
== NULL) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
PyModule_AddObject(m, "RepeatedScalarContainer",
|
||||
reinterpret_cast<PyObject*>(
|
||||
&RepeatedScalarContainer_Type));
|
||||
// Initialize Map container types.
|
||||
{
|
||||
// ScalarMapContainer_Type derives from our MutableMapping type.
|
||||
ScopedPyObjectPtr containers(PyImport_ImportModule(
|
||||
"google.protobuf.internal.containers"));
|
||||
if (containers == NULL) {
|
||||
return false;
|
||||
}
|
||||
|
||||
RepeatedCompositeContainer_Type.tp_hash = PyObject_HashNotImplemented;
|
||||
if (PyType_Ready(&RepeatedCompositeContainer_Type) < 0) {
|
||||
return false;
|
||||
ScopedPyObjectPtr mutable_mapping(
|
||||
PyObject_GetAttrString(containers, "MutableMapping"));
|
||||
if (mutable_mapping == NULL) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!PyObject_TypeCheck(mutable_mapping, &PyType_Type)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Py_INCREF(mutable_mapping);
|
||||
ScalarMapContainer_Type.tp_base =
|
||||
reinterpret_cast<PyTypeObject*>(mutable_mapping.get());
|
||||
|
||||
if (PyType_Ready(&ScalarMapContainer_Type) < 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
PyModule_AddObject(m, "ScalarMapContainer",
|
||||
reinterpret_cast<PyObject*>(&ScalarMapContainer_Type));
|
||||
|
||||
if (PyType_Ready(&ScalarMapIterator_Type) < 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
PyModule_AddObject(m, "ScalarMapIterator",
|
||||
reinterpret_cast<PyObject*>(&ScalarMapIterator_Type));
|
||||
|
||||
Py_INCREF(mutable_mapping);
|
||||
MessageMapContainer_Type.tp_base =
|
||||
reinterpret_cast<PyTypeObject*>(mutable_mapping.get());
|
||||
|
||||
if (PyType_Ready(&MessageMapContainer_Type) < 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
PyModule_AddObject(m, "MessageMapContainer",
|
||||
reinterpret_cast<PyObject*>(&MessageMapContainer_Type));
|
||||
|
||||
if (PyType_Ready(&MessageMapIterator_Type) < 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
PyModule_AddObject(m, "MessageMapIterator",
|
||||
reinterpret_cast<PyObject*>(&MessageMapIterator_Type));
|
||||
}
|
||||
|
||||
PyModule_AddObject(
|
||||
m, "RepeatedCompositeContainer",
|
||||
reinterpret_cast<PyObject*>(
|
||||
&RepeatedCompositeContainer_Type));
|
||||
|
||||
// ScalarMapContainer_Type derives from our MutableMapping type.
|
||||
PyObject* containers =
|
||||
PyImport_ImportModule("google.protobuf.internal.containers");
|
||||
if (containers == NULL) {
|
||||
return false;
|
||||
}
|
||||
|
||||
PyObject* mutable_mapping =
|
||||
PyObject_GetAttrString(containers, "MutableMapping");
|
||||
Py_DECREF(containers);
|
||||
|
||||
if (mutable_mapping == NULL) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!PyObject_TypeCheck(mutable_mapping, &PyType_Type)) {
|
||||
Py_DECREF(mutable_mapping);
|
||||
return false;
|
||||
}
|
||||
|
||||
ScalarMapContainer_Type.tp_base =
|
||||
reinterpret_cast<PyTypeObject*>(mutable_mapping);
|
||||
|
||||
if (PyType_Ready(&ScalarMapContainer_Type) < 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
PyModule_AddObject(m, "ScalarMapContainer",
|
||||
reinterpret_cast<PyObject*>(&ScalarMapContainer_Type));
|
||||
|
||||
if (PyType_Ready(&ScalarMapIterator_Type) < 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
PyModule_AddObject(m, "ScalarMapIterator",
|
||||
reinterpret_cast<PyObject*>(&ScalarMapIterator_Type));
|
||||
|
||||
Py_INCREF(mutable_mapping);
|
||||
MessageMapContainer_Type.tp_base =
|
||||
reinterpret_cast<PyTypeObject*>(mutable_mapping);
|
||||
|
||||
if (PyType_Ready(&MessageMapContainer_Type) < 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
PyModule_AddObject(m, "MessageMapContainer",
|
||||
reinterpret_cast<PyObject*>(&MessageMapContainer_Type));
|
||||
|
||||
if (PyType_Ready(&MessageMapIterator_Type) < 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
PyModule_AddObject(m, "MessageMapIterator",
|
||||
reinterpret_cast<PyObject*>(&MessageMapIterator_Type));
|
||||
|
||||
ExtensionDict_Type.tp_hash = PyObject_HashNotImplemented;
|
||||
if (PyType_Ready(&ExtensionDict_Type) < 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
PyModule_AddObject(
|
||||
m, "ExtensionDict",
|
||||
reinterpret_cast<PyObject*>(&ExtensionDict_Type));
|
||||
@ -2751,6 +2942,7 @@ bool InitProto2MessageModule(PyObject *m) {
|
||||
}
|
||||
EncodeError_class = PyObject_GetAttrString(message_module, "EncodeError");
|
||||
DecodeError_class = PyObject_GetAttrString(message_module, "DecodeError");
|
||||
PythonMessage_class = PyObject_GetAttrString(message_module, "Message");
|
||||
Py_DECREF(message_module);
|
||||
|
||||
PyObject* pickle_module = PyImport_ImportModule("pickle");
|
||||
|
@ -49,7 +49,6 @@ class Message;
|
||||
class Reflection;
|
||||
class FieldDescriptor;
|
||||
class Descriptor;
|
||||
class DynamicMessageFactory;
|
||||
|
||||
using internal::shared_ptr;
|
||||
|
||||
@ -221,9 +220,6 @@ PyObject* FindInitializationErrors(CMessage* self);
|
||||
int SetOwner(CMessage* self, const shared_ptr<Message>& new_owner);
|
||||
|
||||
int AssureWritable(CMessage* self);
|
||||
|
||||
DynamicMessageFactory* GetMessageFactory();
|
||||
|
||||
} // namespace cmessage
|
||||
|
||||
|
||||
|
@ -32,6 +32,7 @@
|
||||
|
||||
#include <google/protobuf/pyext/message_map_container.h>
|
||||
|
||||
#include <google/protobuf/stubs/logging.h>
|
||||
#include <google/protobuf/stubs/common.h>
|
||||
#include <google/protobuf/message.h>
|
||||
#include <google/protobuf/pyext/message.h>
|
||||
|
@ -38,11 +38,13 @@
|
||||
#include <google/protobuf/stubs/shared_ptr.h>
|
||||
#endif
|
||||
|
||||
#include <google/protobuf/stubs/logging.h>
|
||||
#include <google/protobuf/stubs/common.h>
|
||||
#include <google/protobuf/descriptor.h>
|
||||
#include <google/protobuf/dynamic_message.h>
|
||||
#include <google/protobuf/message.h>
|
||||
#include <google/protobuf/pyext/descriptor.h>
|
||||
#include <google/protobuf/pyext/descriptor_pool.h>
|
||||
#include <google/protobuf/pyext/message.h>
|
||||
#include <google/protobuf/pyext/scoped_pyobject_ptr.h>
|
||||
|
||||
@ -74,125 +76,6 @@ namespace repeated_composite_container {
|
||||
GOOGLE_CHECK((self)->parent == NULL); \
|
||||
} while (0);
|
||||
|
||||
// Returns a new reference.
|
||||
static PyObject* GetKey(PyObject* x) {
|
||||
// Just the identity function.
|
||||
Py_INCREF(x);
|
||||
return x;
|
||||
}
|
||||
|
||||
#define GET_KEY(keyfunc, value) \
|
||||
((keyfunc) == NULL ? \
|
||||
GetKey((value)) : \
|
||||
PyObject_CallFunctionObjArgs((keyfunc), (value), NULL))
|
||||
|
||||
// Converts a comparison function that returns -1, 0, or 1 into a
|
||||
// less-than predicate.
|
||||
//
|
||||
// Returns -1 on error, 1 if x < y, 0 if x >= y.
|
||||
static int islt(PyObject *x, PyObject *y, PyObject *compare) {
|
||||
if (compare == NULL)
|
||||
return PyObject_RichCompareBool(x, y, Py_LT);
|
||||
|
||||
ScopedPyObjectPtr res(PyObject_CallFunctionObjArgs(compare, x, y, NULL));
|
||||
if (res == NULL)
|
||||
return -1;
|
||||
if (!PyInt_Check(res)) {
|
||||
PyErr_Format(PyExc_TypeError,
|
||||
"comparison function must return int, not %.200s",
|
||||
Py_TYPE(res)->tp_name);
|
||||
return -1;
|
||||
}
|
||||
return PyInt_AsLong(res) < 0;
|
||||
}
|
||||
|
||||
// Copied from uarrsort.c but swaps memcpy swaps with protobuf/python swaps
|
||||
// TODO(anuraag): Is there a better way to do this then reinventing the wheel?
|
||||
static int InternalQuickSort(RepeatedCompositeContainer* self,
|
||||
Py_ssize_t start,
|
||||
Py_ssize_t limit,
|
||||
PyObject* cmp,
|
||||
PyObject* keyfunc) {
|
||||
if (limit - start <= 1)
|
||||
return 0; // Nothing to sort.
|
||||
|
||||
GOOGLE_CHECK_ATTACHED(self);
|
||||
|
||||
Message* message = self->message;
|
||||
const Reflection* reflection = message->GetReflection();
|
||||
const FieldDescriptor* descriptor = self->parent_field_descriptor;
|
||||
Py_ssize_t left;
|
||||
Py_ssize_t right;
|
||||
|
||||
PyObject* children = self->child_messages;
|
||||
|
||||
do {
|
||||
left = start;
|
||||
right = limit;
|
||||
ScopedPyObjectPtr mid(
|
||||
GET_KEY(keyfunc, PyList_GET_ITEM(children, (start + limit) / 2)));
|
||||
do {
|
||||
ScopedPyObjectPtr key(GET_KEY(keyfunc, PyList_GET_ITEM(children, left)));
|
||||
int is_lt = islt(key, mid, cmp);
|
||||
if (is_lt == -1)
|
||||
return -1;
|
||||
/* array[left]<x */
|
||||
while (is_lt) {
|
||||
++left;
|
||||
ScopedPyObjectPtr key(GET_KEY(keyfunc,
|
||||
PyList_GET_ITEM(children, left)));
|
||||
is_lt = islt(key, mid, cmp);
|
||||
if (is_lt == -1)
|
||||
return -1;
|
||||
}
|
||||
key.reset(GET_KEY(keyfunc, PyList_GET_ITEM(children, right - 1)));
|
||||
is_lt = islt(mid, key, cmp);
|
||||
if (is_lt == -1)
|
||||
return -1;
|
||||
while (is_lt) {
|
||||
--right;
|
||||
ScopedPyObjectPtr key(GET_KEY(keyfunc,
|
||||
PyList_GET_ITEM(children, right - 1)));
|
||||
is_lt = islt(mid, key, cmp);
|
||||
if (is_lt == -1)
|
||||
return -1;
|
||||
}
|
||||
if (left < right) {
|
||||
--right;
|
||||
if (left < right) {
|
||||
reflection->SwapElements(message, descriptor, left, right);
|
||||
PyObject* tmp = PyList_GET_ITEM(children, left);
|
||||
PyList_SET_ITEM(children, left, PyList_GET_ITEM(children, right));
|
||||
PyList_SET_ITEM(children, right, tmp);
|
||||
}
|
||||
++left;
|
||||
}
|
||||
} while (left < right);
|
||||
|
||||
if ((right - start) < (limit - left)) {
|
||||
/* sort [start..right[ */
|
||||
if (start < (right - 1)) {
|
||||
InternalQuickSort(self, start, right, cmp, keyfunc);
|
||||
}
|
||||
|
||||
/* sort [left..limit[ */
|
||||
start = left;
|
||||
} else {
|
||||
/* sort [left..limit[ */
|
||||
if (left < (limit - 1)) {
|
||||
InternalQuickSort(self, left, limit, cmp, keyfunc);
|
||||
}
|
||||
|
||||
/* sort [start..right[ */
|
||||
limit = right;
|
||||
}
|
||||
} while (start < (limit - 1));
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
#undef GET_KEY
|
||||
|
||||
// ---------------------------------------------------------------------
|
||||
// len()
|
||||
|
||||
@ -329,7 +212,7 @@ PyObject* Extend(RepeatedCompositeContainer* self, PyObject* value) {
|
||||
return NULL;
|
||||
}
|
||||
CMessage* new_cmessage = reinterpret_cast<CMessage*>(new_message.get());
|
||||
if (cmessage::MergeFrom(new_cmessage, next) == NULL) {
|
||||
if (ScopedPyObjectPtr(cmessage::MergeFrom(new_cmessage, next)) == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
@ -455,58 +338,39 @@ static PyObject* RichCompare(RepeatedCompositeContainer* self,
|
||||
// ---------------------------------------------------------------------
|
||||
// sort()
|
||||
|
||||
static PyObject* SortAttached(RepeatedCompositeContainer* self,
|
||||
PyObject* args,
|
||||
PyObject* kwds) {
|
||||
// Sort the underlying Message array.
|
||||
PyObject *compare = NULL;
|
||||
int reverse = 0;
|
||||
PyObject *keyfunc = NULL;
|
||||
static char *kwlist[] = {"cmp", "key", "reverse", 0};
|
||||
|
||||
if (args != NULL) {
|
||||
if (!PyArg_ParseTupleAndKeywords(args, kwds, "|OOi:sort",
|
||||
kwlist, &compare, &keyfunc, &reverse))
|
||||
return NULL;
|
||||
}
|
||||
if (compare == Py_None)
|
||||
compare = NULL;
|
||||
if (keyfunc == Py_None)
|
||||
keyfunc = NULL;
|
||||
|
||||
static void ReorderAttached(RepeatedCompositeContainer* self) {
|
||||
Message* message = self->message;
|
||||
const Reflection* reflection = message->GetReflection();
|
||||
const FieldDescriptor* descriptor = self->parent_field_descriptor;
|
||||
const Py_ssize_t length = Length(self);
|
||||
if (InternalQuickSort(self, 0, length, compare, keyfunc) < 0)
|
||||
return NULL;
|
||||
|
||||
// Finally reverse the result if requested.
|
||||
if (reverse) {
|
||||
Message* message = self->message;
|
||||
const Reflection* reflection = message->GetReflection();
|
||||
const FieldDescriptor* descriptor = self->parent_field_descriptor;
|
||||
// Since Python protobuf objects are never arena-allocated, adding and
|
||||
// removing message pointers to the underlying array is just updating
|
||||
// pointers.
|
||||
for (Py_ssize_t i = 0; i < length; ++i)
|
||||
reflection->ReleaseLast(message, descriptor);
|
||||
|
||||
// Reverse the Message array.
|
||||
for (int i = 0; i < length / 2; ++i)
|
||||
reflection->SwapElements(message, descriptor, i, length - i - 1);
|
||||
|
||||
// Reverse the Python list.
|
||||
ScopedPyObjectPtr res(PyObject_CallMethod(self->child_messages,
|
||||
"reverse", NULL));
|
||||
if (res == NULL)
|
||||
return NULL;
|
||||
for (Py_ssize_t i = 0; i < length; ++i) {
|
||||
CMessage* py_cmsg = reinterpret_cast<CMessage*>(
|
||||
PyList_GET_ITEM(self->child_messages, i));
|
||||
reflection->AddAllocatedMessage(message, descriptor, py_cmsg->message);
|
||||
}
|
||||
|
||||
Py_RETURN_NONE;
|
||||
}
|
||||
|
||||
static PyObject* SortReleased(RepeatedCompositeContainer* self,
|
||||
PyObject* args,
|
||||
PyObject* kwds) {
|
||||
// Returns 0 if successful; returns -1 and sets an exception if
|
||||
// unsuccessful.
|
||||
static int SortPythonMessages(RepeatedCompositeContainer* self,
|
||||
PyObject* args,
|
||||
PyObject* kwds) {
|
||||
ScopedPyObjectPtr m(PyObject_GetAttrString(self->child_messages, "sort"));
|
||||
if (m == NULL)
|
||||
return NULL;
|
||||
return -1;
|
||||
if (PyObject_Call(m, args, kwds) == NULL)
|
||||
return NULL;
|
||||
Py_RETURN_NONE;
|
||||
return -1;
|
||||
if (self->message != NULL) {
|
||||
ReorderAttached(self);
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
static PyObject* Sort(RepeatedCompositeContainer* self,
|
||||
@ -527,11 +391,10 @@ static PyObject* Sort(RepeatedCompositeContainer* self,
|
||||
if (UpdateChildMessages(self) < 0) {
|
||||
return NULL;
|
||||
}
|
||||
if (self->message == NULL) {
|
||||
return SortReleased(self, args, kwds);
|
||||
} else {
|
||||
return SortAttached(self, args, kwds);
|
||||
if (SortPythonMessages(self, args, kwds) < 0) {
|
||||
return NULL;
|
||||
}
|
||||
Py_RETURN_NONE;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------
|
||||
@ -584,18 +447,6 @@ void ReleaseLastTo(CMessage* parent,
|
||||
parent->message->GetReflection()->ReleaseLast(parent->message, field));
|
||||
// TODO(tibell): Deal with proto1.
|
||||
|
||||
// ReleaseMessage will return NULL which differs from
|
||||
// child_cmessage->message, if the field does not exist. In this case,
|
||||
// the latter points to the default instance via a const_cast<>, so we
|
||||
// have to reset it to a new mutable object since we are taking ownership.
|
||||
if (released_message.get() == NULL) {
|
||||
const Message* prototype =
|
||||
cmessage::GetMessageFactory()->GetPrototype(
|
||||
target->message->GetDescriptor());
|
||||
GOOGLE_CHECK_NOTNULL(prototype);
|
||||
released_message.reset(prototype->New());
|
||||
}
|
||||
|
||||
target->parent = NULL;
|
||||
target->parent_field_descriptor = NULL;
|
||||
target->message = released_message.get();
|
||||
@ -732,7 +583,7 @@ PyTypeObject RepeatedCompositeContainer_Type = {
|
||||
0, // tp_as_number
|
||||
&repeated_composite_container::SqMethods, // tp_as_sequence
|
||||
&repeated_composite_container::MpMethods, // tp_as_mapping
|
||||
0, // tp_hash
|
||||
PyObject_HashNotImplemented, // tp_hash
|
||||
0, // tp_call
|
||||
0, // tp_str
|
||||
0, // tp_getattro
|
||||
|
@ -39,10 +39,12 @@
|
||||
#endif
|
||||
|
||||
#include <google/protobuf/stubs/common.h>
|
||||
#include <google/protobuf/stubs/logging.h>
|
||||
#include <google/protobuf/descriptor.h>
|
||||
#include <google/protobuf/dynamic_message.h>
|
||||
#include <google/protobuf/message.h>
|
||||
#include <google/protobuf/pyext/descriptor.h>
|
||||
#include <google/protobuf/pyext/descriptor_pool.h>
|
||||
#include <google/protobuf/pyext/message.h>
|
||||
#include <google/protobuf/pyext/scoped_pyobject_ptr.h>
|
||||
|
||||
@ -68,7 +70,7 @@ static int InternalAssignRepeatedField(
|
||||
self->parent_field_descriptor);
|
||||
for (Py_ssize_t i = 0; i < PyList_GET_SIZE(list); ++i) {
|
||||
PyObject* value = PyList_GET_ITEM(list, i);
|
||||
if (Append(self, value) == NULL) {
|
||||
if (ScopedPyObjectPtr(Append(self, value)) == NULL) {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
@ -510,7 +512,7 @@ PyObject* Extend(RepeatedScalarContainer* self, PyObject* value) {
|
||||
}
|
||||
ScopedPyObjectPtr next;
|
||||
while ((next.reset(PyIter_Next(iter))) != NULL) {
|
||||
if (Append(self, next) == NULL) {
|
||||
if (ScopedPyObjectPtr(Append(self, next)) == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
@ -690,8 +692,7 @@ static int InitializeAndCopyToParentContainer(
|
||||
if (values == NULL) {
|
||||
return -1;
|
||||
}
|
||||
Message* new_message = cmessage::GetMessageFactory()->GetPrototype(
|
||||
from->message->GetDescriptor())->New();
|
||||
Message* new_message = from->message->New();
|
||||
to->parent = NULL;
|
||||
to->parent_field_descriptor = from->parent_field_descriptor;
|
||||
to->message = new_message;
|
||||
@ -781,7 +782,7 @@ PyTypeObject RepeatedScalarContainer_Type = {
|
||||
0, // tp_as_number
|
||||
&repeated_scalar_container::SqMethods, // tp_as_sequence
|
||||
&repeated_scalar_container::MpMethods, // tp_as_mapping
|
||||
0, // tp_hash
|
||||
PyObject_HashNotImplemented, // tp_hash
|
||||
0, // tp_call
|
||||
0, // tp_str
|
||||
0, // tp_getattro
|
||||
|
@ -32,6 +32,7 @@
|
||||
|
||||
#include <google/protobuf/pyext/scalar_map_container.h>
|
||||
|
||||
#include <google/protobuf/stubs/logging.h>
|
||||
#include <google/protobuf/stubs/common.h>
|
||||
#include <google/protobuf/message.h>
|
||||
#include <google/protobuf/pyext/message.h>
|
||||
|
@ -51,16 +51,22 @@ class ScopedPyObjectPtr {
|
||||
|
||||
// Reset. Deletes the current owned object, if any.
|
||||
// Then takes ownership of a new object, if given.
|
||||
// this->reset(this->get()) works.
|
||||
// This function must be called with a reference that you own.
|
||||
// this->reset(this->get()) is wrong!
|
||||
// this->reset(this->release()) is OK.
|
||||
PyObject* reset(PyObject* p = NULL) {
|
||||
if (p != ptr_) {
|
||||
Py_XDECREF(ptr_);
|
||||
ptr_ = p;
|
||||
}
|
||||
Py_XDECREF(ptr_);
|
||||
ptr_ = p;
|
||||
return ptr_;
|
||||
}
|
||||
|
||||
// ScopedPyObjectPtr should not be copied.
|
||||
// We explicitly list and delete this overload to avoid automatic conversion
|
||||
// to PyObject*, which is wrong in this case.
|
||||
PyObject* reset(const ScopedPyObjectPtr& other) = delete;
|
||||
|
||||
// Releases ownership of the object.
|
||||
// The caller now owns the returned reference.
|
||||
PyObject* release() {
|
||||
PyObject* p = ptr_;
|
||||
ptr_ = NULL;
|
||||
|
@ -49,101 +49,23 @@ __author__ = 'robinson@google.com (Will Robinson)'
|
||||
|
||||
|
||||
from google.protobuf.internal import api_implementation
|
||||
from google.protobuf import descriptor as descriptor_mod
|
||||
from google.protobuf import message
|
||||
|
||||
_FieldDescriptor = descriptor_mod.FieldDescriptor
|
||||
|
||||
|
||||
if api_implementation.Type() == 'cpp':
|
||||
from google.protobuf.pyext import cpp_message as message_impl
|
||||
else:
|
||||
from google.protobuf.internal import python_message as message_impl
|
||||
|
||||
_NewMessage = message_impl.NewMessage
|
||||
_InitMessage = message_impl.InitMessage
|
||||
|
||||
|
||||
class GeneratedProtocolMessageType(type):
|
||||
|
||||
"""Metaclass for protocol message classes created at runtime from Descriptors.
|
||||
|
||||
We add implementations for all methods described in the Message class. We
|
||||
also create properties to allow getting/setting all fields in the protocol
|
||||
message. Finally, we create slots to prevent users from accidentally
|
||||
"setting" nonexistent fields in the protocol message, which then wouldn't get
|
||||
serialized / deserialized properly.
|
||||
|
||||
The protocol compiler currently uses this metaclass to create protocol
|
||||
message classes at runtime. Clients can also manually create their own
|
||||
classes at runtime, as in this example:
|
||||
|
||||
mydescriptor = Descriptor(.....)
|
||||
class MyProtoClass(Message):
|
||||
__metaclass__ = GeneratedProtocolMessageType
|
||||
DESCRIPTOR = mydescriptor
|
||||
myproto_instance = MyProtoClass()
|
||||
myproto.foo_field = 23
|
||||
...
|
||||
|
||||
The above example will not work for nested types. If you wish to include them,
|
||||
use reflection.MakeClass() instead of manually instantiating the class in
|
||||
order to create the appropriate class structure.
|
||||
"""
|
||||
|
||||
# Must be consistent with the protocol-compiler code in
|
||||
# proto2/compiler/internal/generator.*.
|
||||
_DESCRIPTOR_KEY = 'DESCRIPTOR'
|
||||
|
||||
def __new__(cls, name, bases, dictionary):
|
||||
"""Custom allocation for runtime-generated class types.
|
||||
|
||||
We override __new__ because this is apparently the only place
|
||||
where we can meaningfully set __slots__ on the class we're creating(?).
|
||||
(The interplay between metaclasses and slots is not very well-documented).
|
||||
|
||||
Args:
|
||||
name: Name of the class (ignored, but required by the
|
||||
metaclass protocol).
|
||||
bases: Base classes of the class we're constructing.
|
||||
(Should be message.Message). We ignore this field, but
|
||||
it's required by the metaclass protocol
|
||||
dictionary: The class dictionary of the class we're
|
||||
constructing. dictionary[_DESCRIPTOR_KEY] must contain
|
||||
a Descriptor object describing this protocol message
|
||||
type.
|
||||
|
||||
Returns:
|
||||
Newly-allocated class.
|
||||
"""
|
||||
descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY]
|
||||
bases = _NewMessage(bases, descriptor, dictionary)
|
||||
superclass = super(GeneratedProtocolMessageType, cls)
|
||||
|
||||
new_class = superclass.__new__(cls, name, bases, dictionary)
|
||||
return new_class
|
||||
|
||||
def __init__(cls, name, bases, dictionary):
|
||||
"""Here we perform the majority of our work on the class.
|
||||
We add enum getters, an __init__ method, implementations
|
||||
of all Message methods, and properties for all fields
|
||||
in the protocol type.
|
||||
|
||||
Args:
|
||||
name: Name of the class (ignored, but required by the
|
||||
metaclass protocol).
|
||||
bases: Base classes of the class we're constructing.
|
||||
(Should be message.Message). We ignore this field, but
|
||||
it's required by the metaclass protocol
|
||||
dictionary: The class dictionary of the class we're
|
||||
constructing. dictionary[_DESCRIPTOR_KEY] must contain
|
||||
a Descriptor object describing this protocol message
|
||||
type.
|
||||
"""
|
||||
descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY]
|
||||
_InitMessage(descriptor, cls)
|
||||
superclass = super(GeneratedProtocolMessageType, cls)
|
||||
superclass.__init__(name, bases, dictionary)
|
||||
# The type of all Message classes.
|
||||
# Part of the public interface.
|
||||
#
|
||||
# Used by generated files, but clients can also use it at runtime:
|
||||
# mydescriptor = pool.FindDescriptor(.....)
|
||||
# class MyProtoClass(Message):
|
||||
# __metaclass__ = GeneratedProtocolMessageType
|
||||
# DESCRIPTOR = mydescriptor
|
||||
GeneratedProtocolMessageType = message_impl.GeneratedProtocolMessageType
|
||||
|
||||
|
||||
def ParseMessage(descriptor, byte_str):
|
||||
|
@ -113,7 +113,7 @@ def PrintMessage(message, out, indent=0, as_utf8=False, as_one_line=False,
|
||||
fields.sort(key=lambda x: x[0].index)
|
||||
for field, value in fields:
|
||||
if _IsMapEntry(field):
|
||||
for key in value:
|
||||
for key in sorted(value):
|
||||
# This is slow for maps with submessage entires because it copies the
|
||||
# entire tree. Unfortunately this would take significant refactoring
|
||||
# of this file to work around.
|
||||
|
@ -191,6 +191,7 @@ libprotobuf_lite_la_SOURCES = \
|
||||
google/protobuf/stubs/stringpiece.h \
|
||||
google/protobuf/stubs/stringprintf.cc \
|
||||
google/protobuf/stubs/stringprintf.h \
|
||||
google/protobuf/stubs/structurally_valid.cc \
|
||||
google/protobuf/stubs/strutil.cc \
|
||||
google/protobuf/stubs/strutil.h \
|
||||
google/protobuf/stubs/time.cc \
|
||||
@ -232,7 +233,6 @@ libprotobuf_la_SOURCES = \
|
||||
google/protobuf/service.cc \
|
||||
google/protobuf/source_context.pb.cc \
|
||||
google/protobuf/struct.pb.cc \
|
||||
google/protobuf/stubs/structurally_valid.cc \
|
||||
google/protobuf/stubs/substitute.cc \
|
||||
google/protobuf/stubs/substitute.h \
|
||||
google/protobuf/text_format.cc \
|
||||
@ -659,7 +659,8 @@ COMMON_TEST_SOURCES = \
|
||||
google/protobuf/testing/file.h
|
||||
|
||||
check_PROGRAMS = protoc protobuf-test protobuf-lazy-descriptor-test \
|
||||
protobuf-lite-test test_plugin $(GZCHECKPROGRAMS)
|
||||
protobuf-lite-test test_plugin protobuf-lite-arena-test \
|
||||
$(GZCHECKPROGRAMS)
|
||||
protobuf_test_LDADD = $(PTHREAD_LIBS) libprotobuf.la libprotoc.la \
|
||||
../gmock/gtest/lib/libgtest.la \
|
||||
../gmock/lib/libgmock.la \
|
||||
@ -756,21 +757,40 @@ protobuf_lazy_descriptor_test_SOURCES = \
|
||||
$(COMMON_TEST_SOURCES)
|
||||
nodist_protobuf_lazy_descriptor_test_SOURCES = $(protoc_outputs)
|
||||
|
||||
# Build lite_unittest separately, since it doesn't use gtest.
|
||||
protobuf_lite_test_LDADD = $(PTHREAD_LIBS) libprotobuf-lite.la
|
||||
protobuf_lite_test_CXXFLAGS = $(NO_OPT_CXXFLAGS)
|
||||
protobuf_lite_test_SOURCES = \
|
||||
COMMON_LITE_TEST_SOURCES = \
|
||||
google/protobuf/arena_test_util.cc \
|
||||
google/protobuf/arena_test_util.h \
|
||||
google/protobuf/lite_unittest.cc \
|
||||
google/protobuf/map_lite_test_util.cc \
|
||||
google/protobuf/map_lite_test_util.h \
|
||||
google/protobuf/test_util_lite.cc \
|
||||
google/protobuf/test_util_lite.h
|
||||
# TODO(teboring) add the file back and make the test build.
|
||||
# google/protobuf/map_lite_test.cc
|
||||
|
||||
# Build lite_unittest separately, since it doesn't use gtest. It can't
|
||||
# depend on gtest because our internal version of gtest depend on proto
|
||||
# full runtime and we want to make sure this test builds without full
|
||||
# runtime.
|
||||
protobuf_lite_test_LDADD = $(PTHREAD_LIBS) libprotobuf-lite.la
|
||||
protobuf_lite_test_CXXFLAGS = $(NO_OPT_CXXFLAGS)
|
||||
protobuf_lite_test_SOURCES = \
|
||||
google/protobuf/lite_unittest.cc \
|
||||
$(COMMON_LITE_TEST_SOURCES)
|
||||
nodist_protobuf_lite_test_SOURCES = $(protoc_lite_outputs)
|
||||
|
||||
# lite_arena_unittest depends on gtest because teboring@ found that without
|
||||
# gtest when building the test internally our memory sanitizer doesn't detect
|
||||
# memory leaks (don't know why).
|
||||
protobuf_lite_arena_test_LDADD = $(PTHREAD_LIBS) libprotobuf-lite.la \
|
||||
../gmock/gtest/lib/libgtest.la \
|
||||
../gmock/lib/libgmock.la \
|
||||
../gmock/lib/libgmock_main.la
|
||||
protobuf_lite_arena_test_CPPFLAGS = -I$(srcdir)/../gmock/include \
|
||||
-I$(srcdir)/../gmock/gtest/include
|
||||
protobuf_lite_arena_test_CXXFLAGS = $(NO_OPT_CXXFLAGS)
|
||||
protobuf_lite_arena_test_SOURCES = \
|
||||
google/protobuf/lite_arena_unittest.cc \
|
||||
$(COMMON_LITE_TEST_SOURCES)
|
||||
nodist_protobuf_lite_arena_test_SOURCES = $(protoc_lite_outputs)
|
||||
|
||||
# Test plugin binary.
|
||||
test_plugin_LDADD = $(PTHREAD_LIBS) libprotobuf.la libprotoc.la \
|
||||
../gmock/gtest/lib/libgtest.la
|
||||
@ -790,4 +810,5 @@ zcgunzip_SOURCES = google/protobuf/testing/zcgunzip.cc
|
||||
endif
|
||||
|
||||
TESTS = protobuf-test protobuf-lazy-descriptor-test protobuf-lite-test \
|
||||
google/protobuf/compiler/zip_output_unittest.sh $(GZTESTS)
|
||||
google/protobuf/compiler/zip_output_unittest.sh $(GZTESTS) \
|
||||
protobuf-lite-arena-test
|
||||
|
@ -50,7 +50,7 @@ AnyMetadata::AnyMetadata(UrlType* type_url, ValueType* value)
|
||||
|
||||
void AnyMetadata::PackFrom(const Message& message) {
|
||||
type_url_->SetNoArena(&::google::protobuf::internal::GetEmptyString(),
|
||||
GetTypeUrl(message.GetDescriptor()));
|
||||
GetTypeUrl(message.GetDescriptor()));
|
||||
message.SerializeToString(value_->MutableNoArena(
|
||||
&::google::protobuf::internal::GetEmptyStringAlreadyInited()));
|
||||
}
|
||||
@ -76,7 +76,7 @@ bool ParseAnyTypeUrl(const string& type_url, string* full_type_name) {
|
||||
type_url.size() - prefix_len);
|
||||
return true;
|
||||
}
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
|
@ -34,9 +34,9 @@
|
||||
#include <string>
|
||||
|
||||
#include <google/protobuf/stubs/common.h>
|
||||
#include <google/protobuf/arenastring.h>
|
||||
#include <google/protobuf/descriptor.h>
|
||||
#include <google/protobuf/message.h>
|
||||
#include <google/protobuf/arenastring.h>
|
||||
|
||||
namespace google {
|
||||
namespace protobuf {
|
||||
|
@ -66,8 +66,12 @@ void Arena::Init() {
|
||||
first_block->size = options_.initial_block_size;
|
||||
first_block->pos = kHeaderSize;
|
||||
first_block->next = NULL;
|
||||
first_block->owner = &first_block->owner;
|
||||
AddBlock(first_block);
|
||||
// Thread which calls Init() owns the first block. This allows the
|
||||
// single-threaded case to allocate on the first block without taking any
|
||||
// locks.
|
||||
first_block->owner = &thread_cache();
|
||||
SetThreadCacheBlock(first_block);
|
||||
AddBlockInternal(first_block);
|
||||
owns_first_block_ = false;
|
||||
}
|
||||
|
||||
@ -80,7 +84,7 @@ void Arena::Init() {
|
||||
}
|
||||
|
||||
Arena::~Arena() {
|
||||
uint64 space_allocated = Reset();
|
||||
uint64 space_allocated = ResetInternal();
|
||||
|
||||
// Call the destruction hook
|
||||
if (options_.on_arena_destruction != NULL) {
|
||||
@ -89,10 +93,14 @@ Arena::~Arena() {
|
||||
}
|
||||
|
||||
uint64 Arena::Reset() {
|
||||
CleanupList();
|
||||
uint64 space_allocated = FreeBlocks();
|
||||
// Invalidate any ThreadCaches pointing to any blocks we just destroyed.
|
||||
lifecycle_id_ = lifecycle_id_generator_.GetNext();
|
||||
return ResetInternal();
|
||||
}
|
||||
|
||||
uint64 Arena::ResetInternal() {
|
||||
CleanupList();
|
||||
uint64 space_allocated = FreeBlocks();
|
||||
|
||||
// Call the reset hook
|
||||
if (options_.on_arena_reset != NULL) {
|
||||
@ -137,6 +145,10 @@ Arena::Block* Arena::NewBlock(void* me, Block* my_last_block, size_t n,
|
||||
|
||||
void Arena::AddBlock(Block* b) {
|
||||
MutexLock l(&blocks_lock_);
|
||||
AddBlockInternal(b);
|
||||
}
|
||||
|
||||
void Arena::AddBlockInternal(Block* b) {
|
||||
b->next = reinterpret_cast<Block*>(google::protobuf::internal::NoBarrier_Load(&blocks_));
|
||||
google::protobuf::internal::Release_Store(&blocks_, reinterpret_cast<google::protobuf::internal::AtomicWord>(b));
|
||||
if (b->avail() != 0) {
|
||||
@ -181,16 +193,6 @@ void* Arena::AllocateAligned(const std::type_info* allocated, size_t n) {
|
||||
void* me = &thread_cache();
|
||||
Block* b = reinterpret_cast<Block*>(google::protobuf::internal::Acquire_Load(&hint_));
|
||||
if (!b || b->owner != me || b->avail() < n) {
|
||||
// If the next block to allocate from is the first block, try to claim it
|
||||
// for this thread.
|
||||
if (!owns_first_block_ && b->next == NULL) {
|
||||
MutexLock l(&blocks_lock_);
|
||||
if (b->owner == &b->owner && b->avail() >= n) {
|
||||
b->owner = me;
|
||||
SetThreadCacheBlock(b);
|
||||
return AllocFromBlock(b, n);
|
||||
}
|
||||
}
|
||||
return SlowAlloc(n);
|
||||
}
|
||||
return AllocFromBlock(b, n);
|
||||
@ -267,8 +269,12 @@ uint64 Arena::FreeBlocks() {
|
||||
// Make the first block that was passed in through ArenaOptions
|
||||
// available for reuse.
|
||||
first_block->pos = kHeaderSize;
|
||||
first_block->owner = &first_block->owner;
|
||||
AddBlock(first_block);
|
||||
// Thread which calls Reset() owns the first block. This allows the
|
||||
// single-threaded case to allocate on the first block without taking any
|
||||
// locks.
|
||||
first_block->owner = &thread_cache();
|
||||
SetThreadCacheBlock(first_block);
|
||||
AddBlockInternal(first_block);
|
||||
}
|
||||
return space_allocated;
|
||||
}
|
||||
|
@ -31,6 +31,7 @@
|
||||
#ifndef GOOGLE_PROTOBUF_ARENA_H__
|
||||
#define GOOGLE_PROTOBUF_ARENA_H__
|
||||
|
||||
#include <limits>
|
||||
#if __cplusplus >= 201103L
|
||||
#include <google/protobuf/stubs/type_traits.h>
|
||||
#endif
|
||||
@ -39,7 +40,8 @@
|
||||
#include <google/protobuf/stubs/atomic_sequence_num.h>
|
||||
#include <google/protobuf/stubs/atomicops.h>
|
||||
#include <google/protobuf/stubs/common.h>
|
||||
#include <google/protobuf/stubs/platform_macros.h>
|
||||
#include <google/protobuf/stubs/logging.h>
|
||||
#include <google/protobuf/stubs/mutex.h>
|
||||
#include <google/protobuf/stubs/type_traits.h>
|
||||
|
||||
namespace google {
|
||||
@ -414,6 +416,9 @@ class LIBPROTOBUF_EXPORT Arena {
|
||||
// trivially destructible.
|
||||
template <typename T> GOOGLE_ATTRIBUTE_ALWAYS_INLINE
|
||||
static T* CreateArray(::google::protobuf::Arena* arena, size_t num_elements) {
|
||||
GOOGLE_CHECK_LE(num_elements,
|
||||
std::numeric_limits<size_t>::max() / sizeof(T))
|
||||
<< "Requested size is too large to fit into size_t.";
|
||||
if (arena == NULL) {
|
||||
return static_cast<T*>(::operator new[](num_elements * sizeof(T)));
|
||||
} else {
|
||||
@ -425,16 +430,16 @@ class LIBPROTOBUF_EXPORT Arena {
|
||||
// of the underlying blocks. The total space used may not include the new
|
||||
// blocks that are allocated by this arena from other threads concurrently
|
||||
// with the call to this method.
|
||||
uint64 SpaceAllocated() const GOOGLE_ATTRIBUTE_NOINLINE;
|
||||
GOOGLE_ATTRIBUTE_NOINLINE uint64 SpaceAllocated() const;
|
||||
// As above, but does not include any free space in underlying blocks.
|
||||
uint64 SpaceUsed() const GOOGLE_ATTRIBUTE_NOINLINE;
|
||||
GOOGLE_ATTRIBUTE_NOINLINE uint64 SpaceUsed() const;
|
||||
|
||||
// Frees all storage allocated by this arena after calling destructors
|
||||
// registered with OwnDestructor() and freeing objects registered with Own().
|
||||
// Any objects allocated on this arena are unusable after this call. It also
|
||||
// returns the total space used by the arena which is the sums of the sizes
|
||||
// of the allocated blocks. This method is not thread-safe.
|
||||
uint64 Reset() GOOGLE_ATTRIBUTE_NOINLINE;
|
||||
GOOGLE_ATTRIBUTE_NOINLINE uint64 Reset();
|
||||
|
||||
// Adds |object| to a list of heap-allocated objects to be freed with |delete|
|
||||
// when the arena is destroyed or reset.
|
||||
@ -459,8 +464,8 @@ class LIBPROTOBUF_EXPORT Arena {
|
||||
// will be manually called when the arena is destroyed or reset. This differs
|
||||
// from OwnDestructor() in that any member function may be specified, not only
|
||||
// the class destructor.
|
||||
void OwnCustomDestructor(void* object, void (*destruct)(void*))
|
||||
GOOGLE_ATTRIBUTE_NOINLINE {
|
||||
GOOGLE_ATTRIBUTE_NOINLINE void OwnCustomDestructor(void* object,
|
||||
void (*destruct)(void*)) {
|
||||
AddListNode(object, destruct);
|
||||
}
|
||||
|
||||
@ -469,7 +474,7 @@ class LIBPROTOBUF_EXPORT Arena {
|
||||
// latter is a virtual call, while this method is a templated call that
|
||||
// resolves at compile-time.
|
||||
template<typename T> GOOGLE_ATTRIBUTE_ALWAYS_INLINE
|
||||
static inline ::google::protobuf::Arena* GetArena(const T* value) {
|
||||
static ::google::protobuf::Arena* GetArena(const T* value) {
|
||||
return GetArenaInternal(value, static_cast<T*>(0));
|
||||
}
|
||||
|
||||
@ -507,7 +512,7 @@ class LIBPROTOBUF_EXPORT Arena {
|
||||
// aligned at a multiple of 8 bytes.
|
||||
size_t pos;
|
||||
size_t size; // total size of the block.
|
||||
size_t avail() const GOOGLE_ATTRIBUTE_ALWAYS_INLINE { return size - pos; }
|
||||
GOOGLE_ATTRIBUTE_ALWAYS_INLINE size_t avail() const { return size - pos; }
|
||||
// data follows
|
||||
};
|
||||
|
||||
@ -555,6 +560,33 @@ class LIBPROTOBUF_EXPORT Arena {
|
||||
return google::protobuf::internal::has_trivial_destructor<T>::value;
|
||||
}
|
||||
|
||||
// Helper typetrait that indicates whether the desctructor of type T should be
|
||||
// called when arena is destroyed at compile time. This is only to allow
|
||||
// construction of higher-level templated utilities.
|
||||
// is_destructor_skippable<T>::value is an instance of google::protobuf::internal::true_type if the
|
||||
// destructor of the message type T should not be called when arena is
|
||||
// destroyed or google::protobuf::internal::has_trivial_destructor<T>::value == true, and
|
||||
// google::protobuf::internal::false_type otherwise.
|
||||
//
|
||||
// This is inside Arena because only Arena has the friend relationships
|
||||
// necessary to see the underlying generated code traits.
|
||||
template<typename T>
|
||||
struct is_destructor_skippable {
|
||||
template<typename U>
|
||||
static char DestructorSkippable(
|
||||
const typename U::DestructorSkippable_*);
|
||||
template<typename U>
|
||||
static double DestructorSkippable(...);
|
||||
|
||||
// This will resolve to either google::protobuf::internal::true_type or google::protobuf::internal::false_type.
|
||||
typedef google::protobuf::internal::integral_constant<bool,
|
||||
sizeof(DestructorSkippable<const T>(static_cast<const T*>(0))) ==
|
||||
sizeof(char) || google::protobuf::internal::has_trivial_destructor<T>::value == true>
|
||||
type;
|
||||
static const type value;
|
||||
};
|
||||
|
||||
|
||||
// CreateMessage<T> requires that T supports arenas, but this private method
|
||||
// works whether or not T supports arenas. These are not exposed to user code
|
||||
// as it can cause confusing API usages, and end up having double free in
|
||||
@ -574,14 +606,16 @@ class LIBPROTOBUF_EXPORT Arena {
|
||||
// Just allocate the required size for the given type assuming the
|
||||
// type has a trivial constructor.
|
||||
template<typename T> GOOGLE_ATTRIBUTE_ALWAYS_INLINE
|
||||
inline T* CreateInternalRawArray(size_t num_elements) {
|
||||
T* CreateInternalRawArray(size_t num_elements) {
|
||||
GOOGLE_CHECK_LE(num_elements,
|
||||
std::numeric_limits<size_t>::max() / sizeof(T))
|
||||
<< "Requested size is too large to fit into size_t.";
|
||||
return static_cast<T*>(
|
||||
AllocateAligned(RTTI_TYPE_ID(T), sizeof(T) * num_elements));
|
||||
}
|
||||
|
||||
template <typename T> GOOGLE_ATTRIBUTE_ALWAYS_INLINE
|
||||
inline T* CreateInternal(
|
||||
bool skip_explicit_ownership) {
|
||||
T* CreateInternal(bool skip_explicit_ownership) {
|
||||
T* t = new (AllocateAligned(RTTI_TYPE_ID(T), sizeof(T))) T();
|
||||
if (!skip_explicit_ownership) {
|
||||
AddListNode(t, &internal::arena_destruct_object<T>);
|
||||
@ -590,8 +624,7 @@ class LIBPROTOBUF_EXPORT Arena {
|
||||
}
|
||||
|
||||
template <typename T, typename Arg> GOOGLE_ATTRIBUTE_ALWAYS_INLINE
|
||||
inline T* CreateInternal(
|
||||
bool skip_explicit_ownership, const Arg& arg) {
|
||||
T* CreateInternal(bool skip_explicit_ownership, const Arg& arg) {
|
||||
T* t = new (AllocateAligned(RTTI_TYPE_ID(T), sizeof(T))) T(arg);
|
||||
if (!skip_explicit_ownership) {
|
||||
AddListNode(t, &internal::arena_destruct_object<T>);
|
||||
@ -600,7 +633,7 @@ class LIBPROTOBUF_EXPORT Arena {
|
||||
}
|
||||
|
||||
template <typename T, typename Arg1, typename Arg2> GOOGLE_ATTRIBUTE_ALWAYS_INLINE
|
||||
inline T* CreateInternal(
|
||||
T* CreateInternal(
|
||||
bool skip_explicit_ownership, const Arg1& arg1, const Arg2& arg2) {
|
||||
T* t = new (AllocateAligned(RTTI_TYPE_ID(T), sizeof(T))) T(arg1, arg2);
|
||||
if (!skip_explicit_ownership) {
|
||||
@ -610,10 +643,10 @@ class LIBPROTOBUF_EXPORT Arena {
|
||||
}
|
||||
|
||||
template <typename T, typename Arg1, typename Arg2, typename Arg3>
|
||||
GOOGLE_ATTRIBUTE_ALWAYS_INLINE inline T* CreateInternal(bool skip_explicit_ownership,
|
||||
const Arg1& arg1,
|
||||
const Arg2& arg2,
|
||||
const Arg3& arg3) {
|
||||
GOOGLE_ATTRIBUTE_ALWAYS_INLINE T* CreateInternal(bool skip_explicit_ownership,
|
||||
const Arg1& arg1,
|
||||
const Arg2& arg2,
|
||||
const Arg3& arg3) {
|
||||
T* t = new (AllocateAligned(RTTI_TYPE_ID(T), sizeof(T)))
|
||||
T(arg1, arg2, arg3);
|
||||
if (!skip_explicit_ownership) {
|
||||
@ -624,11 +657,11 @@ class LIBPROTOBUF_EXPORT Arena {
|
||||
|
||||
template <typename T, typename Arg1, typename Arg2, typename Arg3,
|
||||
typename Arg4>
|
||||
GOOGLE_ATTRIBUTE_ALWAYS_INLINE inline T* CreateInternal(bool skip_explicit_ownership,
|
||||
const Arg1& arg1,
|
||||
const Arg2& arg2,
|
||||
const Arg3& arg3,
|
||||
const Arg4& arg4) {
|
||||
GOOGLE_ATTRIBUTE_ALWAYS_INLINE T* CreateInternal(bool skip_explicit_ownership,
|
||||
const Arg1& arg1,
|
||||
const Arg2& arg2,
|
||||
const Arg3& arg3,
|
||||
const Arg4& arg4) {
|
||||
T* t = new (AllocateAligned(RTTI_TYPE_ID(T), sizeof(T)))
|
||||
T(arg1, arg2, arg3, arg4);
|
||||
if (!skip_explicit_ownership) {
|
||||
@ -639,12 +672,12 @@ class LIBPROTOBUF_EXPORT Arena {
|
||||
|
||||
template <typename T, typename Arg1, typename Arg2, typename Arg3,
|
||||
typename Arg4, typename Arg5>
|
||||
GOOGLE_ATTRIBUTE_ALWAYS_INLINE inline T* CreateInternal(bool skip_explicit_ownership,
|
||||
const Arg1& arg1,
|
||||
const Arg2& arg2,
|
||||
const Arg3& arg3,
|
||||
const Arg4& arg4,
|
||||
const Arg5& arg5) {
|
||||
GOOGLE_ATTRIBUTE_ALWAYS_INLINE T* CreateInternal(bool skip_explicit_ownership,
|
||||
const Arg1& arg1,
|
||||
const Arg2& arg2,
|
||||
const Arg3& arg3,
|
||||
const Arg4& arg4,
|
||||
const Arg5& arg5) {
|
||||
T* t = new (AllocateAligned(RTTI_TYPE_ID(T), sizeof(T)))
|
||||
T(arg1, arg2, arg3, arg4, arg5);
|
||||
if (!skip_explicit_ownership) {
|
||||
@ -655,13 +688,13 @@ class LIBPROTOBUF_EXPORT Arena {
|
||||
|
||||
template <typename T, typename Arg1, typename Arg2, typename Arg3,
|
||||
typename Arg4, typename Arg5, typename Arg6>
|
||||
GOOGLE_ATTRIBUTE_ALWAYS_INLINE inline T* CreateInternal(bool skip_explicit_ownership,
|
||||
const Arg1& arg1,
|
||||
const Arg2& arg2,
|
||||
const Arg3& arg3,
|
||||
const Arg4& arg4,
|
||||
const Arg5& arg5,
|
||||
const Arg6& arg6) {
|
||||
GOOGLE_ATTRIBUTE_ALWAYS_INLINE T* CreateInternal(bool skip_explicit_ownership,
|
||||
const Arg1& arg1,
|
||||
const Arg2& arg2,
|
||||
const Arg3& arg3,
|
||||
const Arg4& arg4,
|
||||
const Arg5& arg5,
|
||||
const Arg6& arg6) {
|
||||
T* t = new (AllocateAligned(RTTI_TYPE_ID(T), sizeof(T)))
|
||||
T(arg1, arg2, arg3, arg4, arg5, arg6);
|
||||
if (!skip_explicit_ownership) {
|
||||
@ -672,14 +705,14 @@ class LIBPROTOBUF_EXPORT Arena {
|
||||
|
||||
template <typename T, typename Arg1, typename Arg2, typename Arg3,
|
||||
typename Arg4, typename Arg5, typename Arg6, typename Arg7>
|
||||
GOOGLE_ATTRIBUTE_ALWAYS_INLINE inline T* CreateInternal(bool skip_explicit_ownership,
|
||||
const Arg1& arg1,
|
||||
const Arg2& arg2,
|
||||
const Arg3& arg3,
|
||||
const Arg4& arg4,
|
||||
const Arg5& arg5,
|
||||
const Arg6& arg6,
|
||||
const Arg7& arg7) {
|
||||
GOOGLE_ATTRIBUTE_ALWAYS_INLINE T* CreateInternal(bool skip_explicit_ownership,
|
||||
const Arg1& arg1,
|
||||
const Arg2& arg2,
|
||||
const Arg3& arg3,
|
||||
const Arg4& arg4,
|
||||
const Arg5& arg5,
|
||||
const Arg6& arg6,
|
||||
const Arg7& arg7) {
|
||||
T* t = new (AllocateAligned(RTTI_TYPE_ID(T), sizeof(T)))
|
||||
T(arg1, arg2, arg3, arg4, arg5, arg6, arg7);
|
||||
if (!skip_explicit_ownership) {
|
||||
@ -691,15 +724,15 @@ class LIBPROTOBUF_EXPORT Arena {
|
||||
template <typename T, typename Arg1, typename Arg2, typename Arg3,
|
||||
typename Arg4, typename Arg5, typename Arg6, typename Arg7,
|
||||
typename Arg8>
|
||||
GOOGLE_ATTRIBUTE_ALWAYS_INLINE inline T* CreateInternal(bool skip_explicit_ownership,
|
||||
const Arg1& arg1,
|
||||
const Arg2& arg2,
|
||||
const Arg3& arg3,
|
||||
const Arg4& arg4,
|
||||
const Arg5& arg5,
|
||||
const Arg6& arg6,
|
||||
const Arg7& arg7,
|
||||
const Arg8& arg8) {
|
||||
GOOGLE_ATTRIBUTE_ALWAYS_INLINE T* CreateInternal(bool skip_explicit_ownership,
|
||||
const Arg1& arg1,
|
||||
const Arg2& arg2,
|
||||
const Arg3& arg3,
|
||||
const Arg4& arg4,
|
||||
const Arg5& arg5,
|
||||
const Arg6& arg6,
|
||||
const Arg7& arg7,
|
||||
const Arg8& arg8) {
|
||||
T* t = new (AllocateAligned(RTTI_TYPE_ID(T), sizeof(T)))
|
||||
T(arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8);
|
||||
if (!skip_explicit_ownership) {
|
||||
@ -709,21 +742,21 @@ class LIBPROTOBUF_EXPORT Arena {
|
||||
}
|
||||
|
||||
template <typename T> GOOGLE_ATTRIBUTE_ALWAYS_INLINE
|
||||
inline T* CreateMessageInternal(typename T::InternalArenaConstructable_*) {
|
||||
T* CreateMessageInternal(typename T::InternalArenaConstructable_*) {
|
||||
return CreateInternal<T, Arena*>(SkipDeleteList<T>(static_cast<T*>(0)),
|
||||
this);
|
||||
}
|
||||
|
||||
template <typename T, typename Arg> GOOGLE_ATTRIBUTE_ALWAYS_INLINE
|
||||
inline T* CreateMessageInternal(typename T::InternalArenaConstructable_*,
|
||||
const Arg& arg) {
|
||||
T* CreateMessageInternal(typename T::InternalArenaConstructable_*,
|
||||
const Arg& arg) {
|
||||
return CreateInternal<T, Arena*>(SkipDeleteList<T>(static_cast<T*>(0)),
|
||||
this, arg);
|
||||
}
|
||||
|
||||
template <typename T, typename Arg1, typename Arg2> GOOGLE_ATTRIBUTE_ALWAYS_INLINE
|
||||
inline T* CreateMessageInternal(typename T::InternalArenaConstructable_*,
|
||||
const Arg1& arg1, const Arg2& arg2) {
|
||||
T* CreateMessageInternal(typename T::InternalArenaConstructable_*,
|
||||
const Arg1& arg1, const Arg2& arg2) {
|
||||
return CreateInternal<T, Arena*>(SkipDeleteList<T>(static_cast<T*>(0)),
|
||||
this, arg1, arg2);
|
||||
}
|
||||
@ -734,19 +767,29 @@ class LIBPROTOBUF_EXPORT Arena {
|
||||
template <typename T>
|
||||
static void CreateInArenaStorage(T* ptr, Arena* arena) {
|
||||
CreateInArenaStorageInternal(ptr, arena, is_arena_constructable<T>::value);
|
||||
RegisterDestructorInternal(ptr, arena, is_destructor_skippable<T>::value);
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
static void CreateInArenaStorageInternal(
|
||||
T* ptr, Arena* arena, google::protobuf::internal::true_type) {
|
||||
new (ptr) T(arena);
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
static void CreateInArenaStorageInternal(
|
||||
T* ptr, Arena* arena, google::protobuf::internal::false_type) {
|
||||
new (ptr) T;
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
static void RegisterDestructorInternal(
|
||||
T* ptr, Arena* arena, google::protobuf::internal::true_type) {}
|
||||
template <typename T>
|
||||
static void RegisterDestructorInternal(
|
||||
T* ptr, Arena* arena, google::protobuf::internal::false_type) {
|
||||
arena->OwnDestructor(ptr);
|
||||
}
|
||||
|
||||
// These implement Own(), which registers an object for deletion (destructor
|
||||
// call and operator delete()). The second parameter has type 'true_type' if T
|
||||
// is a subtype of ::google::protobuf::Message and 'false_type' otherwise. Collapsing
|
||||
@ -769,13 +812,13 @@ class LIBPROTOBUF_EXPORT Arena {
|
||||
// InternalArenaConstructable_ tags can be associated with an arena, and such
|
||||
// objects must implement a GetArenaNoVirtual() method.
|
||||
template<typename T> GOOGLE_ATTRIBUTE_ALWAYS_INLINE
|
||||
static inline ::google::protobuf::Arena* GetArenaInternal(const T* value,
|
||||
typename T::InternalArenaConstructable_*) {
|
||||
static ::google::protobuf::Arena* GetArenaInternal(
|
||||
const T* value, typename T::InternalArenaConstructable_*) {
|
||||
return value->GetArenaNoVirtual();
|
||||
}
|
||||
|
||||
template<typename T> GOOGLE_ATTRIBUTE_ALWAYS_INLINE
|
||||
static inline ::google::protobuf::Arena* GetArenaInternal(const T* value, ...) {
|
||||
static ::google::protobuf::Arena* GetArenaInternal(const T* value, ...) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
@ -785,7 +828,7 @@ class LIBPROTOBUF_EXPORT Arena {
|
||||
void* AllocateAligned(const std::type_info* allocated, size_t n);
|
||||
|
||||
// Allocate an internal allocation, avoiding optional typed monitoring.
|
||||
GOOGLE_ATTRIBUTE_ALWAYS_INLINE inline void* AllocateAligned(size_t n) {
|
||||
GOOGLE_ATTRIBUTE_ALWAYS_INLINE void* AllocateAligned(size_t n) {
|
||||
return AllocateAligned(NULL, n);
|
||||
}
|
||||
|
||||
@ -803,6 +846,7 @@ class LIBPROTOBUF_EXPORT Arena {
|
||||
void AddListNode(void* elem, void (*cleanup)(void*));
|
||||
// Delete or Destruct all objects owned by the arena.
|
||||
void CleanupList();
|
||||
uint64 ResetInternal();
|
||||
|
||||
inline void SetThreadCacheBlock(Block* block) {
|
||||
thread_cache().last_block_used_ = block;
|
||||
@ -829,6 +873,9 @@ class LIBPROTOBUF_EXPORT Arena {
|
||||
Mutex blocks_lock_;
|
||||
|
||||
void AddBlock(Block* b);
|
||||
// Access must be synchronized, either by blocks_lock_ or by being called from
|
||||
// Init()/Reset().
|
||||
void AddBlockInternal(Block* b);
|
||||
void* SlowAlloc(size_t n);
|
||||
Block* FindBlock(void* me);
|
||||
Block* NewBlock(void* me, Block* my_last_block, size_t n,
|
||||
@ -854,6 +901,11 @@ const typename Arena::is_arena_constructable<T>::type
|
||||
Arena::is_arena_constructable<T>::value =
|
||||
typename Arena::is_arena_constructable<T>::type();
|
||||
|
||||
template<typename T>
|
||||
const typename Arena::is_destructor_skippable<T>::type
|
||||
Arena::is_destructor_skippable<T>::value =
|
||||
typename Arena::is_destructor_skippable<T>::type();
|
||||
|
||||
} // namespace protobuf
|
||||
|
||||
} // namespace google
|
||||
|
@ -28,6 +28,7 @@
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
#include <google/protobuf/stubs/logging.h>
|
||||
#include <google/protobuf/stubs/common.h>
|
||||
#include <google/protobuf/arena_test_util.h>
|
||||
|
||||
|
@ -40,7 +40,9 @@
|
||||
#include <typeinfo>
|
||||
#include <vector>
|
||||
|
||||
#include <google/protobuf/stubs/logging.h>
|
||||
#include <google/protobuf/stubs/common.h>
|
||||
#include <google/protobuf/stubs/scoped_ptr.h>
|
||||
#include <google/protobuf/arena_test_util.h>
|
||||
#include <google/protobuf/test_util.h>
|
||||
#include <google/protobuf/unittest.pb.h>
|
||||
@ -619,8 +621,6 @@ TEST(ArenaTest, RepeatedPtrFieldAddClearedTest) {
|
||||
}
|
||||
}
|
||||
|
||||
// N.B.: no reflection version of this test because all the arena-specific code
|
||||
// is in RepeatedPtrField, and the reflection works implicitly based on that.
|
||||
TEST(ArenaTest, AddAllocatedToRepeatedField) {
|
||||
// Heap->arena case.
|
||||
Arena arena1;
|
||||
@ -680,6 +680,55 @@ TEST(ArenaTest, AddAllocatedToRepeatedField) {
|
||||
}
|
||||
}
|
||||
|
||||
TEST(ArenaTest, AddAllocatedToRepeatedFieldViaReflection) {
|
||||
// Heap->arena case.
|
||||
Arena arena1;
|
||||
TestAllTypes* arena1_message = Arena::CreateMessage<TestAllTypes>(&arena1);
|
||||
const Reflection* r = arena1_message->GetReflection();
|
||||
const Descriptor* d = arena1_message->GetDescriptor();
|
||||
const FieldDescriptor* fd =
|
||||
d->FindFieldByName("repeated_nested_message");
|
||||
for (int i = 0; i < 10; i++) {
|
||||
TestAllTypes::NestedMessage* heap_submessage =
|
||||
new TestAllTypes::NestedMessage;
|
||||
heap_submessage->set_bb(42);
|
||||
r->AddAllocatedMessage(arena1_message, fd, heap_submessage);
|
||||
// Should not copy object -- will use arena_->Own().
|
||||
EXPECT_EQ(heap_submessage,
|
||||
&arena1_message->repeated_nested_message(i));
|
||||
EXPECT_EQ(42, arena1_message->repeated_nested_message(i).bb());
|
||||
}
|
||||
|
||||
// Arena1->Arena2 case.
|
||||
arena1_message->Clear();
|
||||
for (int i = 0; i < 10; i++) {
|
||||
Arena arena2;
|
||||
TestAllTypes::NestedMessage* arena2_submessage =
|
||||
Arena::CreateMessage<TestAllTypes::NestedMessage>(&arena2);
|
||||
arena2_submessage->set_bb(42);
|
||||
r->AddAllocatedMessage(arena1_message, fd, arena2_submessage);
|
||||
// Should copy object.
|
||||
EXPECT_NE(arena2_submessage,
|
||||
&arena1_message->repeated_nested_message(i));
|
||||
EXPECT_EQ(42, arena1_message->repeated_nested_message(i).bb());
|
||||
}
|
||||
|
||||
// Arena->heap case.
|
||||
TestAllTypes* heap_message = new TestAllTypes;
|
||||
for (int i = 0; i < 10; i++) {
|
||||
Arena arena2;
|
||||
TestAllTypes::NestedMessage* arena2_submessage =
|
||||
Arena::CreateMessage<TestAllTypes::NestedMessage>(&arena2);
|
||||
arena2_submessage->set_bb(42);
|
||||
r->AddAllocatedMessage(heap_message, fd, arena2_submessage);
|
||||
// Should copy object.
|
||||
EXPECT_NE(arena2_submessage,
|
||||
&heap_message->repeated_nested_message(i));
|
||||
EXPECT_EQ(42, heap_message->repeated_nested_message(i).bb());
|
||||
}
|
||||
delete heap_message;
|
||||
}
|
||||
|
||||
TEST(ArenaTest, ReleaseLastRepeatedField) {
|
||||
// Release from arena-allocated repeated field and ensure that returned object
|
||||
// is heap-allocated.
|
||||
@ -1230,7 +1279,7 @@ TEST(ArenaTest, ArenaHooksSanity) {
|
||||
EXPECT_EQ(1, ArenaHooksTestUtil::num_init);
|
||||
EXPECT_EQ(0, ArenaHooksTestUtil::num_allocations);
|
||||
::google::protobuf::Arena::Create<uint64>(&arena);
|
||||
if (::google::protobuf::internal::has_trivial_destructor<uint64>::value) {
|
||||
if (google::protobuf::internal::has_trivial_destructor<uint64>::value) {
|
||||
EXPECT_EQ(1, ArenaHooksTestUtil::num_allocations);
|
||||
} else {
|
||||
EXPECT_EQ(2, ArenaHooksTestUtil::num_allocations);
|
||||
|
@ -33,6 +33,7 @@
|
||||
|
||||
#include <string>
|
||||
|
||||
#include <google/protobuf/stubs/logging.h>
|
||||
#include <google/protobuf/stubs/common.h>
|
||||
#include <google/protobuf/stubs/fastmem.h>
|
||||
|
||||
@ -145,7 +146,7 @@ struct LIBPROTOBUF_EXPORT ArenaStringPtr {
|
||||
// Swaps internal pointers. Arena-safety semantics: this is guarded by the
|
||||
// logic in Swap()/UnsafeArenaSwap() at the message level, so this method is
|
||||
// 'unsafe' if called directly.
|
||||
inline void Swap(ArenaStringPtr* other) GOOGLE_ATTRIBUTE_ALWAYS_INLINE {
|
||||
GOOGLE_ATTRIBUTE_ALWAYS_INLINE void Swap(ArenaStringPtr* other) {
|
||||
std::swap(ptr_, other->ptr_);
|
||||
}
|
||||
|
||||
@ -283,9 +284,8 @@ struct LIBPROTOBUF_EXPORT ArenaStringPtr {
|
||||
private:
|
||||
::std::string* ptr_;
|
||||
|
||||
inline void CreateInstance(::google::protobuf::Arena* arena,
|
||||
const ::std::string* initial_value)
|
||||
GOOGLE_ATTRIBUTE_NOINLINE {
|
||||
GOOGLE_ATTRIBUTE_NOINLINE void CreateInstance(::google::protobuf::Arena* arena,
|
||||
const ::std::string* initial_value) {
|
||||
// Assumes ptr_ is not NULL.
|
||||
if (initial_value != NULL) {
|
||||
ptr_ = new ::std::string(*initial_value);
|
||||
@ -296,8 +296,7 @@ struct LIBPROTOBUF_EXPORT ArenaStringPtr {
|
||||
arena->Own(ptr_);
|
||||
}
|
||||
}
|
||||
inline void CreateInstanceNoArena(const ::std::string* initial_value)
|
||||
GOOGLE_ATTRIBUTE_NOINLINE {
|
||||
GOOGLE_ATTRIBUTE_NOINLINE void CreateInstanceNoArena(const ::std::string* initial_value) {
|
||||
if (initial_value != NULL) {
|
||||
ptr_ = new ::std::string(*initial_value);
|
||||
} else {
|
||||
|
@ -42,6 +42,7 @@
|
||||
#endif
|
||||
#include <cstdlib>
|
||||
|
||||
#include <google/protobuf/stubs/logging.h>
|
||||
#include <google/protobuf/stubs/common.h>
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
|
@ -34,6 +34,7 @@
|
||||
|
||||
#include <google/protobuf/compiler/code_generator.h>
|
||||
|
||||
#include <google/protobuf/stubs/logging.h>
|
||||
#include <google/protobuf/stubs/common.h>
|
||||
#include <google/protobuf/stubs/strutil.h>
|
||||
|
||||
|
@ -70,6 +70,7 @@
|
||||
#include <google/protobuf/io/coded_stream.h>
|
||||
#include <google/protobuf/io/zero_copy_stream_impl.h>
|
||||
#include <google/protobuf/io/printer.h>
|
||||
#include <google/protobuf/stubs/logging.h>
|
||||
#include <google/protobuf/stubs/strutil.h>
|
||||
#include <google/protobuf/stubs/substitute.h>
|
||||
#include <google/protobuf/stubs/map_util.h>
|
||||
@ -1657,7 +1658,11 @@ bool CommandLineInterface::WriteDescriptorSet(
|
||||
&already_seen, file_set.mutable_file());
|
||||
}
|
||||
} else {
|
||||
set<const FileDescriptor*> already_seen;
|
||||
for (int i = 0; i < parsed_files.size(); i++) {
|
||||
if (!already_seen.insert(parsed_files[i]).second) {
|
||||
continue;
|
||||
}
|
||||
FileDescriptorProto* file_proto = file_set.add_file();
|
||||
parsed_files[i]->CopyTo(file_proto);
|
||||
if (source_info_in_descriptor_set_) {
|
||||
|
@ -886,6 +886,39 @@ TEST_F(CommandLineInterfaceTest, WriteDescriptorSet) {
|
||||
EXPECT_FALSE(descriptor_set.file(0).has_source_code_info());
|
||||
}
|
||||
|
||||
TEST_F(CommandLineInterfaceTest, WriteDescriptorSetWithDuplicates) {
|
||||
CreateTempFile("foo.proto",
|
||||
"syntax = \"proto2\";\n"
|
||||
"message Foo {}\n");
|
||||
CreateTempFile("bar.proto",
|
||||
"syntax = \"proto2\";\n"
|
||||
"import \"foo.proto\";\n"
|
||||
"message Bar {\n"
|
||||
" optional Foo foo = 1;\n"
|
||||
"}\n");
|
||||
CreateTempFile("baz.proto",
|
||||
"syntax = \"proto2\";\n"
|
||||
"import \"foo.proto\";\n"
|
||||
"message Baz {\n"
|
||||
" optional Foo foo = 1;\n"
|
||||
"}\n");
|
||||
|
||||
Run("protocol_compiler --descriptor_set_out=$tmpdir/descriptor_set "
|
||||
"--proto_path=$tmpdir bar.proto foo.proto bar.proto baz.proto");
|
||||
|
||||
ExpectNoErrors();
|
||||
|
||||
FileDescriptorSet descriptor_set;
|
||||
ReadDescriptorSet("descriptor_set", &descriptor_set);
|
||||
if (HasFatalFailure()) return;
|
||||
EXPECT_EQ(3, descriptor_set.file_size());
|
||||
EXPECT_EQ("bar.proto", descriptor_set.file(0).name());
|
||||
EXPECT_EQ("foo.proto", descriptor_set.file(1).name());
|
||||
EXPECT_EQ("baz.proto", descriptor_set.file(2).name());
|
||||
// Descriptor set should not have source code info.
|
||||
EXPECT_FALSE(descriptor_set.file(0).has_source_code_info());
|
||||
}
|
||||
|
||||
TEST_F(CommandLineInterfaceTest, WriteDescriptorSetWithSourceInfo) {
|
||||
CreateTempFile("foo.proto",
|
||||
"syntax = \"proto2\";\n"
|
||||
|
@ -133,8 +133,7 @@ TEST(BootstrapTest, GeneratedDescriptorMatches) {
|
||||
CppGenerator generator;
|
||||
MockGeneratorContext context;
|
||||
string error;
|
||||
string parameter;
|
||||
parameter = "dllexport_decl=LIBPROTOBUF_EXPORT";
|
||||
string parameter = "dllexport_decl=LIBPROTOBUF_EXPORT";
|
||||
ASSERT_TRUE(generator.Generate(proto_file, parameter,
|
||||
&context, &error));
|
||||
parameter = "dllexport_decl=LIBPROTOC_EXPORT";
|
||||
|
@ -32,7 +32,6 @@
|
||||
// Based on original Protocol Buffers design by
|
||||
// Sanjay Ghemawat, Jeff Dean, and others.
|
||||
|
||||
#include <set>
|
||||
#include <map>
|
||||
|
||||
#include <google/protobuf/compiler/cpp/cpp_enum.h>
|
||||
@ -70,14 +69,11 @@ EnumGenerator::EnumGenerator(const EnumDescriptor* descriptor,
|
||||
|
||||
EnumGenerator::~EnumGenerator() {}
|
||||
|
||||
void EnumGenerator::GenerateForwardDeclaration(io::Printer* printer) {
|
||||
void EnumGenerator::FillForwardDeclaration(set<string>* enum_names) {
|
||||
if (!options_.proto_h) {
|
||||
return;
|
||||
}
|
||||
map<string, string> vars;
|
||||
vars["classname"] = classname_;
|
||||
printer->Print(vars, "enum $classname$ : int;\n");
|
||||
printer->Print(vars, "bool $classname$_IsValid(int value);\n");
|
||||
enum_names->insert(classname_);
|
||||
}
|
||||
|
||||
void EnumGenerator::GenerateDefinition(io::Printer* printer) {
|
||||
|
@ -35,6 +35,7 @@
|
||||
#ifndef GOOGLE_PROTOBUF_COMPILER_CPP_ENUM_H__
|
||||
#define GOOGLE_PROTOBUF_COMPILER_CPP_ENUM_H__
|
||||
|
||||
#include <set>
|
||||
#include <string>
|
||||
#include <google/protobuf/compiler/cpp/cpp_options.h>
|
||||
#include <google/protobuf/descriptor.h>
|
||||
@ -60,11 +61,11 @@ class EnumGenerator {
|
||||
|
||||
// Header stuff.
|
||||
|
||||
// Generate header code to forward-declare the enum. This is for use when
|
||||
// Fills the name to use when declaring the enum. This is for use when
|
||||
// generating other .proto.h files. This code should be placed within the
|
||||
// enum's package namespace, but NOT within any class, even for nested
|
||||
// enums.
|
||||
void GenerateForwardDeclaration(io::Printer* printer);
|
||||
void FillForwardDeclaration(set<string>* enum_names);
|
||||
|
||||
// Generate header code defining the enum. This code should be placed
|
||||
// within the enum's package namespace, but NOT within any class, even for
|
||||
|
@ -35,7 +35,6 @@
|
||||
#include <google/protobuf/compiler/cpp/cpp_enum_field.h>
|
||||
#include <google/protobuf/compiler/cpp/cpp_helpers.h>
|
||||
#include <google/protobuf/io/printer.h>
|
||||
#include <google/protobuf/descriptor.pb.h>
|
||||
#include <google/protobuf/stubs/strutil.h>
|
||||
|
||||
namespace google {
|
||||
|
@ -47,6 +47,7 @@
|
||||
#include <google/protobuf/descriptor.pb.h>
|
||||
#include <google/protobuf/wire_format.h>
|
||||
#include <google/protobuf/io/printer.h>
|
||||
#include <google/protobuf/stubs/logging.h>
|
||||
#include <google/protobuf/stubs/common.h>
|
||||
#include <google/protobuf/stubs/strutil.h>
|
||||
|
||||
|
@ -33,6 +33,7 @@
|
||||
// Sanjay Ghemawat, Jeff Dean, and others.
|
||||
|
||||
#include <google/protobuf/compiler/cpp/cpp_file.h>
|
||||
#include <map>
|
||||
#include <memory>
|
||||
#ifndef _SHARED_PTR_H
|
||||
#include <google/protobuf/stubs/shared_ptr.h>
|
||||
@ -93,22 +94,36 @@ FileGenerator::FileGenerator(const FileDescriptor* file, const Options& options)
|
||||
|
||||
FileGenerator::~FileGenerator() {}
|
||||
|
||||
void FileGenerator::GenerateHeader(io::Printer* printer) {
|
||||
GenerateTopHeaderGuard(printer);
|
||||
void FileGenerator::GenerateProtoHeader(io::Printer* printer) {
|
||||
if (!options_.proto_h) {
|
||||
return;
|
||||
}
|
||||
|
||||
string filename_identifier = FilenameIdentifier(file_->name());
|
||||
GenerateTopHeaderGuard(printer, filename_identifier);
|
||||
|
||||
|
||||
GenerateLibraryIncludes(printer);
|
||||
GenerateDependencyIncludes(printer);
|
||||
|
||||
for (int i = 0; i < file_->public_dependency_count(); i++) {
|
||||
const FileDescriptor* dep = file_->public_dependency(i);
|
||||
const char* extension = ".proto.h";
|
||||
string dependency = StripProto(dep->name()) + extension;
|
||||
printer->Print(
|
||||
"#include \"$dependency$\" // IWYU pragma: export\n",
|
||||
"dependency", dependency);
|
||||
}
|
||||
|
||||
printer->Print(
|
||||
"// @@protoc_insertion_point(includes)\n");
|
||||
|
||||
|
||||
GenerateForwardDeclarations(printer);
|
||||
|
||||
// Open namespace.
|
||||
GenerateNamespaceOpeners(printer);
|
||||
|
||||
GenerateGlobalStateFunctionDeclarations(printer);
|
||||
GenerateMessageForwardDeclarations(printer);
|
||||
|
||||
printer->Print("\n");
|
||||
|
||||
@ -133,6 +148,11 @@ void FileGenerator::GenerateHeader(io::Printer* printer) {
|
||||
|
||||
GenerateInlineFunctionDefinitions(printer);
|
||||
|
||||
printer->Print(
|
||||
"\n"
|
||||
"// @@protoc_insertion_point(namespace_scope)\n"
|
||||
"\n");
|
||||
|
||||
// Close up namespace.
|
||||
GenerateNamespaceClosers(printer);
|
||||
|
||||
@ -144,19 +164,89 @@ void FileGenerator::GenerateHeader(io::Printer* printer) {
|
||||
"// @@protoc_insertion_point(global_scope)\n"
|
||||
"\n");
|
||||
|
||||
GenerateBottomHeaderGuard(printer);
|
||||
GenerateBottomHeaderGuard(printer, filename_identifier);
|
||||
}
|
||||
|
||||
void FileGenerator::GeneratePBHeader(io::Printer* printer) {
|
||||
string filename_identifier =
|
||||
FilenameIdentifier(file_->name() + (options_.proto_h ? ".pb.h" : ""));
|
||||
GenerateTopHeaderGuard(printer, filename_identifier);
|
||||
|
||||
if (options_.proto_h) {
|
||||
printer->Print("#include \"$basename$.proto.h\" // IWYU pragma: export\n",
|
||||
"basename", StripProto(file_->name()));
|
||||
} else {
|
||||
GenerateLibraryIncludes(printer);
|
||||
}
|
||||
GenerateDependencyIncludes(printer);
|
||||
|
||||
printer->Print(
|
||||
"// @@protoc_insertion_point(includes)\n");
|
||||
|
||||
|
||||
|
||||
// Open namespace.
|
||||
GenerateNamespaceOpeners(printer);
|
||||
|
||||
if (!options_.proto_h) {
|
||||
GenerateGlobalStateFunctionDeclarations(printer);
|
||||
GenerateMessageForwardDeclarations(printer);
|
||||
|
||||
printer->Print("\n");
|
||||
|
||||
GenerateEnumDefinitions(printer);
|
||||
|
||||
printer->Print(kThickSeparator);
|
||||
printer->Print("\n");
|
||||
|
||||
GenerateMessageDefinitions(printer);
|
||||
|
||||
printer->Print("\n");
|
||||
printer->Print(kThickSeparator);
|
||||
printer->Print("\n");
|
||||
|
||||
GenerateServiceDefinitions(printer);
|
||||
|
||||
GenerateExtensionIdentifiers(printer);
|
||||
|
||||
printer->Print("\n");
|
||||
printer->Print(kThickSeparator);
|
||||
printer->Print("\n");
|
||||
|
||||
GenerateInlineFunctionDefinitions(printer);
|
||||
}
|
||||
|
||||
printer->Print(
|
||||
"\n"
|
||||
"// @@protoc_insertion_point(namespace_scope)\n");
|
||||
|
||||
// Close up namespace.
|
||||
GenerateNamespaceClosers(printer);
|
||||
|
||||
if (!options_.proto_h) {
|
||||
// We need to specialize some templates in the ::google::protobuf namespace:
|
||||
GenerateProto2NamespaceEnumSpecializations(printer);
|
||||
}
|
||||
|
||||
printer->Print(
|
||||
"\n"
|
||||
"// @@protoc_insertion_point(global_scope)\n"
|
||||
"\n");
|
||||
|
||||
GenerateBottomHeaderGuard(printer, filename_identifier);
|
||||
}
|
||||
|
||||
void FileGenerator::GenerateSource(io::Printer* printer) {
|
||||
string header =
|
||||
StripProto(file_->name()) + (options_.proto_h ? ".proto.h" : ".pb.h");
|
||||
printer->Print(
|
||||
"// Generated by the protocol buffer compiler. DO NOT EDIT!\n"
|
||||
"// source: $filename$\n"
|
||||
"\n"
|
||||
|
||||
// The generated code calls accessors that might be deprecated. We don't
|
||||
// want the compiler to warn in generated code.
|
||||
"#define INTERNAL_SUPPRESS_PROTOBUF_FIELD_DEPRECATION\n"
|
||||
"#include \"$basename$.pb.h\"\n"
|
||||
"#include \"$header$\"\n"
|
||||
"\n"
|
||||
"#include <algorithm>\n" // for swap()
|
||||
"\n"
|
||||
@ -165,7 +255,7 @@ void FileGenerator::GenerateSource(io::Printer* printer) {
|
||||
"#include <google/protobuf/io/coded_stream.h>\n"
|
||||
"#include <google/protobuf/wire_format_lite_inl.h>\n",
|
||||
"filename", file_->name(),
|
||||
"basename", StripProto(file_->name()));
|
||||
"header", header);
|
||||
|
||||
// Unknown fields implementation in lite mode uses StringOutputStream
|
||||
if (!UseUnknownFieldSet(file_) && file_->message_type_count() > 0) {
|
||||
@ -181,6 +271,18 @@ void FileGenerator::GenerateSource(io::Printer* printer) {
|
||||
"#include <google/protobuf/wire_format.h>\n");
|
||||
}
|
||||
|
||||
if (options_.proto_h) {
|
||||
// Use the smaller .proto.h files.
|
||||
for (int i = 0; i < file_->dependency_count(); i++) {
|
||||
const FileDescriptor* dep = file_->dependency(i);
|
||||
const char* extension = ".proto.h";
|
||||
string dependency = StripProto(dep->name()) + extension;
|
||||
printer->Print(
|
||||
"#include \"$dependency$\"\n",
|
||||
"dependency", dependency);
|
||||
}
|
||||
}
|
||||
|
||||
printer->Print(
|
||||
"// @@protoc_insertion_point(includes)\n");
|
||||
|
||||
@ -276,6 +378,59 @@ void FileGenerator::GenerateSource(io::Printer* printer) {
|
||||
"// @@protoc_insertion_point(global_scope)\n");
|
||||
}
|
||||
|
||||
class FileGenerator::ForwardDeclarations {
|
||||
public:
|
||||
~ForwardDeclarations() {
|
||||
for (map<string, ForwardDeclarations *>::iterator it = namespaces_.begin(),
|
||||
end = namespaces_.end();
|
||||
it != end; ++it) {
|
||||
delete it->second;
|
||||
}
|
||||
namespaces_.clear();
|
||||
}
|
||||
|
||||
ForwardDeclarations* AddOrGetNamespace(const string& ns_name) {
|
||||
ForwardDeclarations*& ns = namespaces_[ns_name];
|
||||
if (ns == NULL) {
|
||||
ns = new ForwardDeclarations;
|
||||
}
|
||||
return ns;
|
||||
}
|
||||
|
||||
set<string>& classes() { return classes_; }
|
||||
set<string>& enums() { return enums_; }
|
||||
|
||||
void Print(io::Printer* printer) const {
|
||||
for (set<string>::const_iterator it = enums_.begin(), end = enums_.end();
|
||||
it != end; ++it) {
|
||||
printer->Print("enum $enumname$ : int;\n"
|
||||
"bool $enumname$_IsValid(int value);\n",
|
||||
"enumname", it->c_str());
|
||||
}
|
||||
for (set<string>::const_iterator it = classes_.begin(),
|
||||
end = classes_.end();
|
||||
it != end; ++it) {
|
||||
printer->Print("class $classname$;\n", "classname", it->c_str());
|
||||
}
|
||||
for (map<string, ForwardDeclarations *>::const_iterator
|
||||
it = namespaces_.begin(),
|
||||
end = namespaces_.end();
|
||||
it != end; ++it) {
|
||||
printer->Print("namespace $nsname$ {\n",
|
||||
"nsname", it->first);
|
||||
it->second->Print(printer);
|
||||
printer->Print("} // namespace $nsname$\n",
|
||||
"nsname", it->first);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private:
|
||||
map<string, ForwardDeclarations*> namespaces_;
|
||||
set<string> classes_;
|
||||
set<string> enums_;
|
||||
};
|
||||
|
||||
void FileGenerator::GenerateBuildDescriptors(io::Printer* printer) {
|
||||
// AddDescriptors() is a file-level procedure which adds the encoded
|
||||
// FileDescriptorProto for this .proto file to the global DescriptorPool for
|
||||
@ -434,12 +589,17 @@ void FileGenerator::GenerateBuildDescriptors(io::Printer* printer) {
|
||||
string file_data;
|
||||
file_proto.SerializeToString(&file_data);
|
||||
|
||||
#ifdef _MSC_VER
|
||||
bool breakdown_large_file = true;
|
||||
#else
|
||||
bool breakdown_large_file = false;
|
||||
#endif
|
||||
// Workaround for MSVC: "Error C1091: compiler limit: string exceeds 65535
|
||||
// bytes in length". Declare a static array of characters rather than use a
|
||||
// string literal.
|
||||
if (file_data.size() > 65535) {
|
||||
if (breakdown_large_file && file_data.size() > 65535) {
|
||||
printer->Print(
|
||||
"static const char descriptor[] = {\n");
|
||||
"static const char descriptor[] = {\n");
|
||||
printer->Indent();
|
||||
|
||||
// Only write 25 bytes per line.
|
||||
@ -447,26 +607,25 @@ void FileGenerator::GenerateBuildDescriptors(io::Printer* printer) {
|
||||
for (int i = 0; i < file_data.size();) {
|
||||
for (int j = 0; j < kBytesPerLine && i < file_data.size(); ++i, ++j) {
|
||||
printer->Print(
|
||||
"$char$, ",
|
||||
"char", SimpleItoa(file_data[i]));
|
||||
"$char$, ",
|
||||
"char", SimpleItoa(file_data[i]));
|
||||
}
|
||||
printer->Print(
|
||||
"\n");
|
||||
"\n");
|
||||
}
|
||||
|
||||
printer->Outdent();
|
||||
printer->Print(
|
||||
"};\n");
|
||||
"};\n");
|
||||
|
||||
printer->Print(
|
||||
"::google::protobuf::DescriptorPool::InternalAddGeneratedFile(descriptor, $size$);\n",
|
||||
"size", SimpleItoa(file_data.size()));
|
||||
"::google::protobuf::DescriptorPool::InternalAddGeneratedFile(descriptor, $size$);\n",
|
||||
"size", SimpleItoa(file_data.size()));
|
||||
|
||||
} else {
|
||||
|
||||
printer->Print(
|
||||
"::google::protobuf::DescriptorPool::InternalAddGeneratedFile(");
|
||||
|
||||
|
||||
// Only write 40 bytes per line.
|
||||
static const int kBytesPerLine = 40;
|
||||
for (int i = 0; i < file_data.size(); i += kBytesPerLine) {
|
||||
@ -474,11 +633,10 @@ void FileGenerator::GenerateBuildDescriptors(io::Printer* printer) {
|
||||
"data",
|
||||
EscapeTrigraphs(
|
||||
CEscape(file_data.substr(i, kBytesPerLine))));
|
||||
}
|
||||
printer->Print(
|
||||
", $size$);\n",
|
||||
}
|
||||
printer->Print(
|
||||
", $size$);\n",
|
||||
"size", SimpleItoa(file_data.size()));
|
||||
|
||||
}
|
||||
|
||||
// Call MessageFactory::InternalRegisterGeneratedFile().
|
||||
@ -548,8 +706,40 @@ void FileGenerator::GenerateNamespaceClosers(io::Printer* printer) {
|
||||
}
|
||||
}
|
||||
|
||||
void FileGenerator::GenerateTopHeaderGuard(io::Printer* printer) {
|
||||
string filename_identifier = FilenameIdentifier(file_->name());
|
||||
void FileGenerator::GenerateForwardDeclarations(io::Printer* printer) {
|
||||
ForwardDeclarations decls;
|
||||
for (int i = 0; i < file_->dependency_count(); i++) {
|
||||
FileGenerator dependency(file_->dependency(i), options_);
|
||||
dependency.FillForwardDeclarations(&decls);
|
||||
}
|
||||
FillForwardDeclarations(&decls);
|
||||
decls.Print(printer);
|
||||
}
|
||||
|
||||
void FileGenerator::FillForwardDeclarations(ForwardDeclarations* decls) {
|
||||
for (int i = 0; i < file_->public_dependency_count(); i++) {
|
||||
FileGenerator dependency(file_->public_dependency(i), options_);
|
||||
dependency.FillForwardDeclarations(decls);
|
||||
}
|
||||
for (int i = 0; i < package_parts_.size(); i++) {
|
||||
decls = decls->AddOrGetNamespace(package_parts_[i]);
|
||||
}
|
||||
// Generate enum definitions.
|
||||
for (int i = 0; i < file_->message_type_count(); i++) {
|
||||
message_generators_[i]->FillEnumForwardDeclarations(&decls->enums());
|
||||
}
|
||||
for (int i = 0; i < file_->enum_type_count(); i++) {
|
||||
enum_generators_[i]->FillForwardDeclaration(&decls->enums());
|
||||
}
|
||||
// Generate forward declarations of classes.
|
||||
for (int i = 0; i < file_->message_type_count(); i++) {
|
||||
message_generators_[i]->FillMessageForwardDeclarations(
|
||||
&decls->classes());
|
||||
}
|
||||
}
|
||||
|
||||
void FileGenerator::GenerateTopHeaderGuard(io::Printer* printer,
|
||||
const string& filename_identifier) {
|
||||
// Generate top of header.
|
||||
printer->Print(
|
||||
"// Generated by the protocol buffer compiler. DO NOT EDIT!\n"
|
||||
@ -564,8 +754,8 @@ void FileGenerator::GenerateTopHeaderGuard(io::Printer* printer) {
|
||||
"filename_identifier", filename_identifier);
|
||||
}
|
||||
|
||||
void FileGenerator::GenerateBottomHeaderGuard(io::Printer* printer) {
|
||||
string filename_identifier = FilenameIdentifier(file_->name());
|
||||
void FileGenerator::GenerateBottomHeaderGuard(
|
||||
io::Printer* printer, const string& filename_identifier) {
|
||||
printer->Print(
|
||||
"#endif // PROTOBUF_$filename_identifier$__INCLUDED\n",
|
||||
"filename_identifier", filename_identifier);
|
||||
@ -696,9 +886,13 @@ void FileGenerator::GenerateGlobalStateFunctionDeclarations(
|
||||
}
|
||||
|
||||
void FileGenerator::GenerateMessageForwardDeclarations(io::Printer* printer) {
|
||||
// Generate forward declarations of classes.
|
||||
set<string> classes;
|
||||
for (int i = 0; i < file_->message_type_count(); i++) {
|
||||
message_generators_[i]->GenerateMessageForwardDeclaration(printer);
|
||||
message_generators_[i]->FillMessageForwardDeclarations(&classes);
|
||||
}
|
||||
for (set<string>::const_iterator it = classes.begin(), end = classes.end();
|
||||
it != end; ++it) {
|
||||
printer->Print("class $classname$;\n", "classname", it->c_str());
|
||||
}
|
||||
}
|
||||
|
||||
@ -804,10 +998,6 @@ void FileGenerator::GenerateInlineFunctionDefinitions(io::Printer* printer) {
|
||||
// Methods of the dependent base class must always be inline in the header.
|
||||
message_generators_[i]->GenerateDependentInlineMethods(printer);
|
||||
}
|
||||
|
||||
printer->Print(
|
||||
"\n"
|
||||
"// @@protoc_insertion_point(namespace_scope)\n");
|
||||
}
|
||||
|
||||
void FileGenerator::GenerateProto2NamespaceEnumSpecializations(
|
||||
|
@ -69,10 +69,14 @@ class FileGenerator {
|
||||
const Options& options);
|
||||
~FileGenerator();
|
||||
|
||||
void GenerateHeader(io::Printer* printer);
|
||||
void GenerateProtoHeader(io::Printer* printer);
|
||||
void GeneratePBHeader(io::Printer* printer);
|
||||
void GenerateSource(io::Printer* printer);
|
||||
|
||||
private:
|
||||
// Internal type used by GenerateForwardDeclarations (defined in file.cc).
|
||||
class ForwardDeclarations;
|
||||
|
||||
// Generate the BuildDescriptors() procedure, which builds all descriptors
|
||||
// for types defined in the file.
|
||||
void GenerateBuildDescriptors(io::Printer* printer);
|
||||
@ -80,9 +84,19 @@ class FileGenerator {
|
||||
void GenerateNamespaceOpeners(io::Printer* printer);
|
||||
void GenerateNamespaceClosers(io::Printer* printer);
|
||||
|
||||
// For other imports, generates their forward-declarations.
|
||||
void GenerateForwardDeclarations(io::Printer* printer);
|
||||
|
||||
// Internal helper used by GenerateForwardDeclarations: fills 'decls'
|
||||
// with all necessary forward-declarations for this file and its
|
||||
// transient depednencies.
|
||||
void FillForwardDeclarations(ForwardDeclarations* decls);
|
||||
|
||||
// Generates top or bottom of a header file.
|
||||
void GenerateTopHeaderGuard(io::Printer* printer);
|
||||
void GenerateBottomHeaderGuard(io::Printer* printer);
|
||||
void GenerateTopHeaderGuard(io::Printer* printer,
|
||||
const string& filename_identifier);
|
||||
void GenerateBottomHeaderGuard(io::Printer* printer,
|
||||
const string& filename_identifier);
|
||||
|
||||
// Generates #include directives.
|
||||
void GenerateLibraryIncludes(io::Printer* printer);
|
||||
@ -92,10 +106,20 @@ class FileGenerator {
|
||||
void GenerateGlobalStateFunctionDeclarations(io::Printer* printer);
|
||||
|
||||
// Generates types for classes.
|
||||
void GenerateMessageForwardDeclarations(io::Printer* printer);
|
||||
void GenerateMessageDefinitions(io::Printer* printer);
|
||||
|
||||
// Generates forward-declarations for just this file's classes. This is
|
||||
// used for .pb.h headers, but not in proto_h mode.
|
||||
void GenerateMessageForwardDeclarations(io::Printer* printer);
|
||||
|
||||
// Fills in types for forward declarations. This is used internally, and
|
||||
// also by other FileGenerators to determine imports' declarations.
|
||||
void FillMessageForwardDeclarations(ForwardDeclarations* decls);
|
||||
void FillMessageDefinitions(ForwardDeclarations* decls);
|
||||
|
||||
// Generates enum definitions.
|
||||
void GenerateEnumForwardDeclarations(io::Printer* printer);
|
||||
void FillEnumForwardDeclarations(ForwardDeclarations* decls);
|
||||
void GenerateEnumDefinitions(io::Printer* printer);
|
||||
|
||||
// Generates generic service definitions.
|
||||
|
@ -100,16 +100,23 @@ bool CppGenerator::Generate(const FileDescriptor* file,
|
||||
|
||||
|
||||
string basename = StripProto(file->name());
|
||||
basename.append(".pb");
|
||||
|
||||
FileGenerator file_generator(file, file_options);
|
||||
|
||||
// Generate header.
|
||||
// Generate header(s).
|
||||
if (file_options.proto_h) {
|
||||
google::protobuf::scoped_ptr<io::ZeroCopyOutputStream> output(
|
||||
generator_context->Open(basename + ".proto.h"));
|
||||
io::Printer printer(output.get(), '$');
|
||||
file_generator.GenerateProtoHeader(&printer);
|
||||
}
|
||||
|
||||
basename.append(".pb");
|
||||
{
|
||||
google::protobuf::scoped_ptr<io::ZeroCopyOutputStream> output(
|
||||
generator_context->Open(basename + ".h"));
|
||||
io::Printer printer(output.get(), '$');
|
||||
file_generator.GenerateHeader(&printer);
|
||||
file_generator.GeneratePBHeader(&printer);
|
||||
}
|
||||
|
||||
// Generate cc file.
|
||||
|
@ -39,6 +39,7 @@
|
||||
|
||||
#include <google/protobuf/compiler/cpp/cpp_helpers.h>
|
||||
#include <google/protobuf/io/printer.h>
|
||||
#include <google/protobuf/stubs/logging.h>
|
||||
#include <google/protobuf/stubs/common.h>
|
||||
#include <google/protobuf/stubs/strutil.h>
|
||||
#include <google/protobuf/stubs/substitute.h>
|
||||
@ -68,7 +69,7 @@ const char* const kKeywordList[] = {
|
||||
"constexpr", "const_cast", "continue", "decltype", "default", "delete", "do",
|
||||
"double", "dynamic_cast", "else", "enum", "explicit", "extern", "false",
|
||||
"float", "for", "friend", "goto", "if", "inline", "int", "long", "mutable",
|
||||
"namespace", "new", "noexcept", "not", "not_eq", "nullptr", "operator", "or",
|
||||
"namespace", "new", "noexcept", "not", "not_eq", "NULL", "operator", "or",
|
||||
"or_eq", "private", "protected", "public", "register", "reinterpret_cast",
|
||||
"return", "short", "signed", "sizeof", "static", "static_assert",
|
||||
"static_cast", "struct", "switch", "template", "this", "thread_local",
|
||||
@ -174,6 +175,14 @@ string SuperClassName(const Descriptor* descriptor) {
|
||||
"::google::protobuf::Message" : "::google::protobuf::MessageLite";
|
||||
}
|
||||
|
||||
string DependentBaseDownCast() {
|
||||
return "reinterpret_cast<T*>(this)->";
|
||||
}
|
||||
|
||||
string DependentBaseConstDownCast() {
|
||||
return "reinterpret_cast<const T*>(this)->";
|
||||
}
|
||||
|
||||
string FieldName(const FieldDescriptor* field) {
|
||||
string result = field->name();
|
||||
LowerString(&result);
|
||||
@ -208,6 +217,19 @@ string FieldConstantName(const FieldDescriptor *field) {
|
||||
}
|
||||
|
||||
bool IsFieldDependent(const FieldDescriptor* field) {
|
||||
if (field->containing_oneof() != NULL &&
|
||||
field->cpp_type() == FieldDescriptor::CPPTYPE_STRING) {
|
||||
return true;
|
||||
}
|
||||
if (field->is_map()) {
|
||||
const Descriptor* map_descriptor = field->message_type();
|
||||
for (int i = 0; i < map_descriptor->field_count(); i++) {
|
||||
if (IsFieldDependent(map_descriptor->field(i))) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
if (field->cpp_type() != FieldDescriptor::CPPTYPE_MESSAGE) {
|
||||
return false;
|
||||
}
|
||||
@ -578,6 +600,94 @@ bool IsAnyMessage(const Descriptor* descriptor) {
|
||||
descriptor->file()->name() == kAnyProtoFile;
|
||||
}
|
||||
|
||||
enum Utf8CheckMode {
|
||||
STRICT = 0, // Parsing will fail if non UTF-8 data is in string fields.
|
||||
VERIFY = 1, // Only log an error but parsing will succeed.
|
||||
NONE = 2, // No UTF-8 check.
|
||||
};
|
||||
|
||||
// Which level of UTF-8 enforcemant is placed on this file.
|
||||
static Utf8CheckMode GetUtf8CheckMode(const FieldDescriptor* field) {
|
||||
if (field->file()->syntax() == FileDescriptor::SYNTAX_PROTO3) {
|
||||
return STRICT;
|
||||
} else if (field->file()->options().optimize_for() !=
|
||||
FileOptions::LITE_RUNTIME) {
|
||||
return VERIFY;
|
||||
} else {
|
||||
return NONE;
|
||||
}
|
||||
}
|
||||
|
||||
static void GenerateUtf8CheckCode(const FieldDescriptor* field,
|
||||
bool for_parse,
|
||||
const map<string, string>& variables,
|
||||
const char* parameters,
|
||||
const char* strict_function,
|
||||
const char* verify_function,
|
||||
io::Printer* printer) {
|
||||
switch (GetUtf8CheckMode(field)) {
|
||||
case STRICT: {
|
||||
if (for_parse) {
|
||||
printer->Print("DO_(");
|
||||
}
|
||||
printer->Print(
|
||||
"::google::protobuf::internal::WireFormatLite::$function$(\n",
|
||||
"function", strict_function);
|
||||
printer->Indent();
|
||||
printer->Print(variables, parameters);
|
||||
if (for_parse) {
|
||||
printer->Print("::google::protobuf::internal::WireFormatLite::PARSE,\n");
|
||||
} else {
|
||||
printer->Print("::google::protobuf::internal::WireFormatLite::SERIALIZE,\n");
|
||||
}
|
||||
printer->Print("\"$full_name$\")", "full_name", field->full_name());
|
||||
if (for_parse) {
|
||||
printer->Print(")");
|
||||
}
|
||||
printer->Print(";\n");
|
||||
printer->Outdent();
|
||||
break;
|
||||
}
|
||||
case VERIFY: {
|
||||
printer->Print(
|
||||
"::google::protobuf::internal::WireFormat::$function$(\n",
|
||||
"function", verify_function);
|
||||
printer->Indent();
|
||||
printer->Print(variables, parameters);
|
||||
if (for_parse) {
|
||||
printer->Print("::google::protobuf::internal::WireFormat::PARSE,\n");
|
||||
} else {
|
||||
printer->Print("::google::protobuf::internal::WireFormat::SERIALIZE,\n");
|
||||
}
|
||||
printer->Print("\"$full_name$\");\n", "full_name", field->full_name());
|
||||
printer->Outdent();
|
||||
break;
|
||||
}
|
||||
case NONE:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
void GenerateUtf8CheckCodeForString(const FieldDescriptor* field,
|
||||
bool for_parse,
|
||||
const map<string, string>& variables,
|
||||
const char* parameters,
|
||||
io::Printer* printer) {
|
||||
GenerateUtf8CheckCode(field, for_parse, variables, parameters,
|
||||
"VerifyUtf8String", "VerifyUTF8StringNamedField",
|
||||
printer);
|
||||
}
|
||||
|
||||
void GenerateUtf8CheckCodeForCord(const FieldDescriptor* field,
|
||||
bool for_parse,
|
||||
const map<string, string>& variables,
|
||||
const char* parameters,
|
||||
io::Printer* printer) {
|
||||
GenerateUtf8CheckCode(field, for_parse, variables, parameters,
|
||||
"VerifyUtf8Cord", "VerifyUTF8CordNamedField",
|
||||
printer);
|
||||
}
|
||||
|
||||
} // namespace cpp
|
||||
} // namespace compiler
|
||||
} // namespace protobuf
|
||||
|
@ -70,8 +70,15 @@ string ClassName(const EnumDescriptor* enum_descriptor, bool qualified);
|
||||
// This is a class name, like "ProtoName_InternalBase".
|
||||
string DependentBaseClassTemplateName(const Descriptor* descriptor);
|
||||
|
||||
// Name of the base class: either the dependent base class (for use with
|
||||
// proto_h) or google::protobuf::Message.
|
||||
string SuperClassName(const Descriptor* descriptor);
|
||||
|
||||
// Returns a string that down-casts from the dependent base class to the
|
||||
// derived class.
|
||||
string DependentBaseDownCast();
|
||||
string DependentBaseConstDownCast();
|
||||
|
||||
// Get the (unqualified) name that should be used for this field in C++ code.
|
||||
// The name is coerced to lower-case to emulate proto1 behavior. People
|
||||
// should be using lowercase-with-underscores style for proto field names
|
||||
@ -195,11 +202,6 @@ inline bool HasGenericServices(const FileDescriptor* file) {
|
||||
file->options().cc_generic_services();
|
||||
}
|
||||
|
||||
// Should string fields in this file verify that their contents are UTF-8?
|
||||
inline bool HasUtf8Verification(const FileDescriptor* file) {
|
||||
return file->options().optimize_for() != FileOptions::LITE_RUNTIME;
|
||||
}
|
||||
|
||||
// Should we generate a separate, super-optimized code path for serializing to
|
||||
// flat arrays? We don't do this in Lite mode because we'd rather reduce code
|
||||
// size.
|
||||
@ -263,6 +265,20 @@ inline bool SupportsArenas(const FieldDescriptor* field) {
|
||||
bool IsAnyMessage(const FileDescriptor* descriptor);
|
||||
bool IsAnyMessage(const Descriptor* descriptor);
|
||||
|
||||
void GenerateUtf8CheckCodeForString(
|
||||
const FieldDescriptor* field,
|
||||
bool for_parse,
|
||||
const map<string, string>& variables,
|
||||
const char* parameters,
|
||||
io::Printer* printer);
|
||||
|
||||
void GenerateUtf8CheckCodeForCord(
|
||||
const FieldDescriptor* field,
|
||||
bool for_parse,
|
||||
const map<string, string>& variables,
|
||||
const char* parameters,
|
||||
io::Printer* printer);
|
||||
|
||||
} // namespace cpp
|
||||
} // namespace compiler
|
||||
} // namespace protobuf
|
||||
|
@ -100,8 +100,9 @@ void SetMessageVariables(const FieldDescriptor* descriptor,
|
||||
|
||||
MapFieldGenerator::
|
||||
MapFieldGenerator(const FieldDescriptor* descriptor,
|
||||
const Options& options)
|
||||
: descriptor_(descriptor) {
|
||||
const Options& options)
|
||||
: descriptor_(descriptor),
|
||||
dependent_field_(options.proto_h && IsFieldDependent(descriptor)) {
|
||||
SetMessageVariables(descriptor, &variables_, options);
|
||||
}
|
||||
|
||||
@ -152,7 +153,9 @@ GenerateInlineAccessorDefinitions(io::Printer* printer,
|
||||
|
||||
void MapFieldGenerator::
|
||||
GenerateClearingCode(io::Printer* printer) const {
|
||||
printer->Print(variables_, "$name$_.Clear();\n");
|
||||
map<string, string> variables(variables_);
|
||||
variables["this_message"] = dependent_field_ ? DependentBaseDownCast() : "";
|
||||
printer->Print(variables, "$this_message$$name$_.Clear();\n");
|
||||
}
|
||||
|
||||
void MapFieldGenerator::
|
||||
@ -231,6 +234,20 @@ GenerateMergeFromCodedStream(io::Printer* printer) const {
|
||||
"}\n");
|
||||
}
|
||||
|
||||
const FieldDescriptor* key_field =
|
||||
descriptor_->message_type()->FindFieldByName("key");
|
||||
if (key_field->type() == FieldDescriptor::TYPE_STRING) {
|
||||
GenerateUtf8CheckCodeForString(
|
||||
key_field, true, variables_,
|
||||
"entry->key().data(), entry->key().length(),\n", printer);
|
||||
}
|
||||
if (value_field->type() == FieldDescriptor::TYPE_STRING) {
|
||||
GenerateUtf8CheckCodeForString(
|
||||
value_field, true, variables_,
|
||||
"entry->mutable_value()->data(),\n"
|
||||
"entry->mutable_value()->length(),\n", printer);
|
||||
}
|
||||
|
||||
// If entry is allocated by arena, its desctructor should be avoided.
|
||||
if (SupportsArenas(descriptor_)) {
|
||||
printer->Print(variables_,
|
||||
@ -258,7 +275,30 @@ GenerateSerializeWithCachedSizes(io::Printer* printer) const {
|
||||
printer->Print(variables_,
|
||||
" entry.reset($name$_.New$wrapper$(it->first, it->second));\n"
|
||||
" ::google::protobuf::internal::WireFormatLite::Write$stream_writer$(\n"
|
||||
" $number$, *entry, output);\n"
|
||||
" $number$, *entry, output);\n");
|
||||
|
||||
printer->Indent();
|
||||
printer->Indent();
|
||||
|
||||
const FieldDescriptor* key_field =
|
||||
descriptor_->message_type()->FindFieldByName("key");
|
||||
const FieldDescriptor* value_field =
|
||||
descriptor_->message_type()->FindFieldByName("value");
|
||||
if (key_field->type() == FieldDescriptor::TYPE_STRING) {
|
||||
GenerateUtf8CheckCodeForString(
|
||||
key_field, false, variables_,
|
||||
"it->first.data(), it->first.length(),\n", printer);
|
||||
}
|
||||
if (value_field->type() == FieldDescriptor::TYPE_STRING) {
|
||||
GenerateUtf8CheckCodeForString(
|
||||
value_field, false, variables_,
|
||||
"it->second.data(), it->second.length(),\n", printer);
|
||||
}
|
||||
|
||||
printer->Outdent();
|
||||
printer->Outdent();
|
||||
|
||||
printer->Print(
|
||||
" }\n");
|
||||
|
||||
// If entry is allocated by arena, its desctructor should be avoided.
|
||||
@ -293,7 +333,29 @@ GenerateSerializeWithCachedSizesToArray(io::Printer* printer) const {
|
||||
" entry.reset($name$_.New$wrapper$(it->first, it->second));\n"
|
||||
" target = ::google::protobuf::internal::WireFormatLite::\n"
|
||||
" Write$declared_type$NoVirtualToArray(\n"
|
||||
" $number$, *entry, target);\n"
|
||||
" $number$, *entry, target);\n");
|
||||
|
||||
printer->Indent();
|
||||
printer->Indent();
|
||||
|
||||
const FieldDescriptor* key_field =
|
||||
descriptor_->message_type()->FindFieldByName("key");
|
||||
const FieldDescriptor* value_field =
|
||||
descriptor_->message_type()->FindFieldByName("value");
|
||||
if (key_field->type() == FieldDescriptor::TYPE_STRING) {
|
||||
GenerateUtf8CheckCodeForString(
|
||||
key_field, false, variables_,
|
||||
"it->first.data(), it->first.length(),\n", printer);
|
||||
}
|
||||
if (value_field->type() == FieldDescriptor::TYPE_STRING) {
|
||||
GenerateUtf8CheckCodeForString(
|
||||
value_field, false, variables_,
|
||||
"it->second.data(), it->second.length(),\n", printer);
|
||||
}
|
||||
|
||||
printer->Outdent();
|
||||
printer->Outdent();
|
||||
printer->Print(
|
||||
" }\n");
|
||||
|
||||
// If entry is allocated by arena, its desctructor should be avoided.
|
||||
|
@ -63,6 +63,7 @@ class MapFieldGenerator : public FieldGenerator {
|
||||
|
||||
private:
|
||||
const FieldDescriptor* descriptor_;
|
||||
const bool dependent_field_;
|
||||
map<string, string> variables_;
|
||||
|
||||
GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(MapFieldGenerator);
|
||||
|
@ -39,7 +39,6 @@
|
||||
#ifndef _SHARED_PTR_H
|
||||
#include <google/protobuf/stubs/shared_ptr.h>
|
||||
#endif
|
||||
#include <set>
|
||||
#include <utility>
|
||||
#include <vector>
|
||||
#include <google/protobuf/compiler/cpp/cpp_message.h>
|
||||
@ -415,31 +414,34 @@ MessageGenerator::MessageGenerator(const Descriptor* descriptor,
|
||||
use_dependent_base_ = true;
|
||||
}
|
||||
}
|
||||
if (options.proto_h && descriptor->oneof_decl_count() > 0) {
|
||||
// Always make oneofs dependent.
|
||||
use_dependent_base_ = true;
|
||||
}
|
||||
}
|
||||
|
||||
MessageGenerator::~MessageGenerator() {}
|
||||
|
||||
void MessageGenerator::
|
||||
GenerateMessageForwardDeclaration(io::Printer* printer) {
|
||||
printer->Print("class $classname$;\n",
|
||||
"classname", classname_);
|
||||
FillMessageForwardDeclarations(set<string>* class_names) {
|
||||
class_names->insert(classname_);
|
||||
|
||||
for (int i = 0; i < descriptor_->nested_type_count(); i++) {
|
||||
// map entry message doesn't need forward declaration. Since map entry
|
||||
// message cannot be a top level class, we just need to avoid calling
|
||||
// GenerateForwardDeclaration here.
|
||||
if (IsMapEntryMessage(descriptor_->nested_type(i))) continue;
|
||||
nested_generators_[i]->GenerateMessageForwardDeclaration(printer);
|
||||
nested_generators_[i]->FillMessageForwardDeclarations(class_names);
|
||||
}
|
||||
}
|
||||
|
||||
void MessageGenerator::
|
||||
GenerateEnumForwardDeclaration(io::Printer* printer) {
|
||||
FillEnumForwardDeclarations(set<string>* enum_names) {
|
||||
for (int i = 0; i < descriptor_->nested_type_count(); i++) {
|
||||
nested_generators_[i]->GenerateEnumForwardDeclaration(printer);
|
||||
nested_generators_[i]->FillEnumForwardDeclarations(enum_names);
|
||||
}
|
||||
for (int i = 0; i < descriptor_->enum_type_count(); i++) {
|
||||
enum_generators_[i]->GenerateForwardDeclaration(printer);
|
||||
enum_generators_[i]->FillForwardDeclaration(enum_names);
|
||||
}
|
||||
}
|
||||
|
||||
@ -484,13 +486,6 @@ GenerateDependentFieldAccessorDeclarations(io::Printer* printer) {
|
||||
field_generators_.get(field).GenerateDependentAccessorDeclarations(printer);
|
||||
printer->Print("\n");
|
||||
}
|
||||
for (int i = 0; i < descriptor_->oneof_decl_count(); i++) {
|
||||
const OneofDescriptor* oneof = descriptor_->oneof_decl(i);
|
||||
PrintFieldComment(printer, oneof);
|
||||
printer->Print(
|
||||
"void clear_$oneof_name$();\n",
|
||||
"oneof_name", oneof->name());
|
||||
}
|
||||
}
|
||||
|
||||
void MessageGenerator::
|
||||
@ -505,7 +500,9 @@ GenerateFieldAccessorDeclarations(io::Printer* printer) {
|
||||
vars["constant_name"] = FieldConstantName(field);
|
||||
|
||||
bool dependent_field = use_dependent_base_ && IsFieldDependent(field);
|
||||
if (dependent_field) {
|
||||
if (dependent_field &&
|
||||
field->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE &&
|
||||
!field->is_map()) {
|
||||
// If this field is dependent, the dependent base class determines
|
||||
// the message type from the derived class (which is a template
|
||||
// parameter). This typedef is for that:
|
||||
@ -594,8 +591,8 @@ GenerateDependentFieldAccessorDefinitions(io::Printer* printer) {
|
||||
vars["tmpl"] = "template<class T>\n";
|
||||
vars["dependent_classname"] =
|
||||
DependentBaseClassTemplateName(descriptor_) + "<T>";
|
||||
vars["this_message"] = "reinterpret_cast<T*>(this)->";
|
||||
vars["this_const_message"] = "reinterpret_cast<const T*>(this)->";
|
||||
vars["this_message"] = DependentBaseDownCast();
|
||||
vars["this_const_message"] = DependentBaseConstDownCast();
|
||||
GenerateFieldClear(field, vars, printer);
|
||||
}
|
||||
|
||||
@ -721,13 +718,15 @@ GenerateFieldClear(const FieldDescriptor* field,
|
||||
printer->Print(vars,
|
||||
"if ($this_message$has_$name$()) {\n");
|
||||
printer->Indent();
|
||||
field_generators_.get(field).GenerateClearingCode(printer);
|
||||
field_generators_.get(field)
|
||||
.GenerateClearingCode(printer);
|
||||
printer->Print(vars,
|
||||
"$this_message$clear_has_$oneof_name$();\n");
|
||||
printer->Outdent();
|
||||
printer->Print("}\n");
|
||||
} else {
|
||||
field_generators_.get(field).GenerateClearingCode(printer);
|
||||
field_generators_.get(field)
|
||||
.GenerateClearingCode(printer);
|
||||
if (HasFieldPresence(descriptor_->file())) {
|
||||
if (!field->is_repeated()) {
|
||||
printer->Print(vars,
|
||||
@ -752,6 +751,18 @@ GenerateFieldAccessorDefinitions(io::Printer* printer, bool is_inline) {
|
||||
map<string, string> vars;
|
||||
SetCommonFieldVariables(field, &vars, options_);
|
||||
vars["inline"] = is_inline ? "inline " : "";
|
||||
if (use_dependent_base_ && IsFieldDependent(field)) {
|
||||
vars["tmpl"] = "template<class T>\n";
|
||||
vars["dependent_classname"] =
|
||||
DependentBaseClassTemplateName(descriptor_) + "<T>";
|
||||
vars["this_message"] = "reinterpret_cast<T*>(this)->";
|
||||
vars["this_const_message"] = "reinterpret_cast<const T*>(this)->";
|
||||
} else {
|
||||
vars["tmpl"] = "";
|
||||
vars["dependent_classname"] = vars["classname"];
|
||||
vars["this_message"] = "";
|
||||
vars["this_const_message"] = "";
|
||||
}
|
||||
|
||||
// Generate has_$name$() or $name$_size().
|
||||
if (field->is_repeated()) {
|
||||
@ -775,10 +786,6 @@ GenerateFieldAccessorDefinitions(io::Printer* printer, bool is_inline) {
|
||||
}
|
||||
|
||||
if (!use_dependent_base_ || !IsFieldDependent(field)) {
|
||||
vars["tmpl"] = "";
|
||||
vars["dependent_classname"] = vars["classname"];
|
||||
vars["this_message"] = "";
|
||||
vars["this_const_message"] = "";
|
||||
GenerateFieldClear(field, vars, printer);
|
||||
}
|
||||
|
||||
@ -915,15 +922,32 @@ GenerateClassDefinition(io::Printer* printer) {
|
||||
"}\n"
|
||||
"\n");
|
||||
} else {
|
||||
printer->Print(
|
||||
"inline const ::std::string& unknown_fields() const {\n"
|
||||
" return _unknown_fields_;\n"
|
||||
"}\n"
|
||||
"\n"
|
||||
"inline ::std::string* mutable_unknown_fields() {\n"
|
||||
" return &_unknown_fields_;\n"
|
||||
"}\n"
|
||||
"\n");
|
||||
if (SupportsArenas(descriptor_)) {
|
||||
printer->Print(
|
||||
"inline const ::std::string& unknown_fields() const {\n"
|
||||
" return _unknown_fields_.Get(\n"
|
||||
" &::google::protobuf::internal::GetEmptyStringAlreadyInited());\n"
|
||||
"}\n"
|
||||
"\n"
|
||||
"inline ::std::string* mutable_unknown_fields() {\n"
|
||||
" return _unknown_fields_.Mutable(\n"
|
||||
" &::google::protobuf::internal::GetEmptyStringAlreadyInited(),\n"
|
||||
" GetArenaNoVirtual());\n"
|
||||
"}\n"
|
||||
"\n");
|
||||
} else {
|
||||
printer->Print(
|
||||
"inline const ::std::string& unknown_fields() const {\n"
|
||||
" return _unknown_fields_.GetNoArena(\n"
|
||||
" &::google::protobuf::internal::GetEmptyStringAlreadyInited());\n"
|
||||
"}\n"
|
||||
"\n"
|
||||
"inline ::std::string* mutable_unknown_fields() {\n"
|
||||
" return _unknown_fields_.MutableNoArena(\n"
|
||||
" &::google::protobuf::internal::GetEmptyStringAlreadyInited());\n"
|
||||
"}\n"
|
||||
"\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1068,6 +1092,10 @@ GenerateClassDefinition(io::Printer* printer) {
|
||||
}
|
||||
}
|
||||
uses_string_ = false;
|
||||
if (PreserveUnknownFields(descriptor_) &&
|
||||
!UseUnknownFieldSet(descriptor_->file())) {
|
||||
uses_string_ = true;
|
||||
}
|
||||
for (int i = 0; i < descriptors.size(); i++) {
|
||||
const FieldDescriptor* field = descriptors[i];
|
||||
if (field->cpp_type() == FieldDescriptor::CPPTYPE_STRING) {
|
||||
@ -1201,18 +1229,11 @@ GenerateClassDefinition(io::Printer* printer) {
|
||||
|
||||
// Generate oneof function declarations
|
||||
for (int i = 0; i < descriptor_->oneof_decl_count(); i++) {
|
||||
if (use_dependent_base_) {
|
||||
printer->Print(
|
||||
"inline bool has_$oneof_name$() const;\n"
|
||||
"inline void clear_has_$oneof_name$();\n\n",
|
||||
"oneof_name", descriptor_->oneof_decl(i)->name());
|
||||
} else {
|
||||
printer->Print(
|
||||
"inline bool has_$oneof_name$() const;\n"
|
||||
"void clear_$oneof_name$();\n"
|
||||
"inline void clear_has_$oneof_name$();\n\n",
|
||||
"oneof_name", descriptor_->oneof_decl(i)->name());
|
||||
}
|
||||
printer->Print(
|
||||
"inline bool has_$oneof_name$() const;\n"
|
||||
"void clear_$oneof_name$();\n"
|
||||
"inline void clear_has_$oneof_name$();\n\n",
|
||||
"oneof_name", descriptor_->oneof_decl(i)->name());
|
||||
}
|
||||
|
||||
if (HasGeneratedMethods(descriptor_->file()) &&
|
||||
@ -1262,7 +1283,7 @@ GenerateClassDefinition(io::Printer* printer) {
|
||||
"::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_;\n");
|
||||
} else {
|
||||
printer->Print(
|
||||
"::std::string _unknown_fields_;\n"
|
||||
"::google::protobuf::internal::ArenaStringPtr _unknown_fields_;\n"
|
||||
"::google::protobuf::Arena* _arena_ptr_;\n"
|
||||
"\n");
|
||||
}
|
||||
@ -1919,6 +1940,13 @@ GenerateSharedConstructorCode(io::Printer* printer) {
|
||||
uses_string_ ? "::google::protobuf::internal::GetEmptyString();\n" : "",
|
||||
"_cached_size_ = 0;\n").c_str());
|
||||
|
||||
if (PreserveUnknownFields(descriptor_) &&
|
||||
!UseUnknownFieldSet(descriptor_->file())) {
|
||||
printer->Print(
|
||||
"_unknown_fields_.UnsafeSetDefault(\n"
|
||||
" &::google::protobuf::internal::GetEmptyStringAlreadyInited());\n");
|
||||
}
|
||||
|
||||
for (int i = 0; i < descriptor_->field_count(); i++) {
|
||||
if (!descriptor_->field(i)->containing_oneof()) {
|
||||
field_generators_.get(descriptor_->field(i))
|
||||
@ -1955,6 +1983,22 @@ GenerateSharedDestructorCode(io::Printer* printer) {
|
||||
"}\n"
|
||||
"\n");
|
||||
}
|
||||
|
||||
// Write the desctructor for _unknown_fields_ in lite runtime.
|
||||
if (PreserveUnknownFields(descriptor_) &&
|
||||
!UseUnknownFieldSet(descriptor_->file())) {
|
||||
if (SupportsArenas(descriptor_)) {
|
||||
printer->Print(
|
||||
"_unknown_fields_.Destroy(\n"
|
||||
" &::google::protobuf::internal::GetEmptyStringAlreadyInited(),\n"
|
||||
" GetArenaNoVirtual());\n");
|
||||
} else {
|
||||
printer->Print(
|
||||
"_unknown_fields_.DestroyNoArena(\n"
|
||||
" &::google::protobuf::internal::GetEmptyStringAlreadyInited());\n");
|
||||
}
|
||||
}
|
||||
|
||||
// Write the destructors for each field except oneof members.
|
||||
for (int i = 0; i < descriptor_->field_count(); i++) {
|
||||
if (!descriptor_->field(i)->containing_oneof()) {
|
||||
@ -2463,8 +2507,16 @@ GenerateClear(io::Printer* printer) {
|
||||
" mutable_unknown_fields()->Clear();\n"
|
||||
"}\n");
|
||||
} else {
|
||||
printer->Print(
|
||||
"mutable_unknown_fields()->clear();\n");
|
||||
if (SupportsArenas(descriptor_)) {
|
||||
printer->Print(
|
||||
"_unknown_fields_.ClearToEmpty(\n"
|
||||
" &::google::protobuf::internal::GetEmptyStringAlreadyInited(),\n"
|
||||
" GetArenaNoVirtual());\n");
|
||||
} else {
|
||||
printer->Print(
|
||||
"_unknown_fields_.ClearToEmptyNoArena(\n"
|
||||
" &::google::protobuf::internal::GetEmptyStringAlreadyInited());\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -2481,33 +2533,22 @@ GenerateOneofClear(io::Printer* printer) {
|
||||
oneof_vars["oneofname"] = descriptor_->oneof_decl(i)->name();
|
||||
string message_class;
|
||||
|
||||
if (use_dependent_base_) {
|
||||
oneof_vars["tmpl"] = "template<class T>\n";
|
||||
oneof_vars["inline"] = "inline ";
|
||||
oneof_vars["dependent_classname"] =
|
||||
DependentBaseClassTemplateName(descriptor_) + "<T>";
|
||||
oneof_vars["this_message"] = "reinterpret_cast<T*>(this)->";
|
||||
message_class = "T::";
|
||||
} else {
|
||||
oneof_vars["tmpl"] = "";
|
||||
oneof_vars["inline"] = "";
|
||||
oneof_vars["dependent_classname"] = classname_;
|
||||
oneof_vars["this_message"] = "";
|
||||
}
|
||||
|
||||
printer->Print(oneof_vars,
|
||||
"$tmpl$"
|
||||
"$inline$"
|
||||
"void $dependent_classname$::clear_$oneofname$() {\n");
|
||||
"void $classname$::clear_$oneofname$() {\n");
|
||||
printer->Indent();
|
||||
// In .proto.h mode, fields with a dependent type will generate
|
||||
// clearing code that down casts from the dependent base class.
|
||||
// However, clear_oneof() methods are always in the .cc file, and thus
|
||||
// must remain in the derived base. So, to make the clearing code work,
|
||||
// we add a typedef so that the down cast works (it will be a no-op).
|
||||
printer->Print(oneof_vars,
|
||||
"switch($this_message$$oneofname$_case()) {\n");
|
||||
"typedef $classname$ T;\n"
|
||||
"switch($oneofname$_case()) {\n");
|
||||
printer->Indent();
|
||||
for (int j = 0; j < descriptor_->oneof_decl(i)->field_count(); j++) {
|
||||
const FieldDescriptor* field = descriptor_->oneof_decl(i)->field(j);
|
||||
printer->Print(
|
||||
"case $message_class$k$field_name$: {\n",
|
||||
"message_class", message_class,
|
||||
"case k$field_name$: {\n",
|
||||
"field_name", UnderscoresToCamelCase(field->name(), true));
|
||||
printer->Indent();
|
||||
// We clear only allocated objects in oneofs
|
||||
@ -2524,20 +2565,16 @@ GenerateOneofClear(io::Printer* printer) {
|
||||
"}\n");
|
||||
}
|
||||
printer->Print(
|
||||
"case $message_class$$cap_oneof_name$_NOT_SET: {\n"
|
||||
"case $cap_oneof_name$_NOT_SET: {\n"
|
||||
" break;\n"
|
||||
"}\n",
|
||||
"message_class", message_class,
|
||||
"cap_oneof_name",
|
||||
ToUpper(descriptor_->oneof_decl(i)->name()));
|
||||
printer->Outdent();
|
||||
printer->Print(
|
||||
"}\n"
|
||||
"$this_message$_oneof_case_[$oneof_index$] = "
|
||||
"$message_class$$cap_oneof_name$_NOT_SET;\n",
|
||||
"this_message", oneof_vars["this_message"],
|
||||
"_oneof_case_[$oneof_index$] = $cap_oneof_name$_NOT_SET;\n",
|
||||
"oneof_index", SimpleItoa(i),
|
||||
"message_class", message_class,
|
||||
"cap_oneof_name",
|
||||
ToUpper(descriptor_->oneof_decl(i)->name()));
|
||||
printer->Outdent();
|
||||
@ -2612,7 +2649,7 @@ GenerateSwap(io::Printer* printer) {
|
||||
printer->Print(
|
||||
"_internal_metadata_.Swap(&other->_internal_metadata_);\n");
|
||||
} else {
|
||||
printer->Print("_unknown_fields_.swap(other->_unknown_fields_);\n");
|
||||
printer->Print("_unknown_fields_.Swap(&other->_unknown_fields_);\n");
|
||||
}
|
||||
} else {
|
||||
// Still swap internal_metadata as it may contain more than just
|
||||
|
@ -39,8 +39,8 @@
|
||||
#ifndef _SHARED_PTR_H
|
||||
#include <google/protobuf/stubs/shared_ptr.h>
|
||||
#endif
|
||||
#include <set>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <google/protobuf/compiler/cpp/cpp_field.h>
|
||||
#include <google/protobuf/compiler/cpp/cpp_options.h>
|
||||
|
||||
@ -66,9 +66,10 @@ class MessageGenerator {
|
||||
|
||||
// Header stuff.
|
||||
|
||||
// Generate foward declarations for this class and all its nested types.
|
||||
void GenerateMessageForwardDeclaration(io::Printer* printer);
|
||||
void GenerateEnumForwardDeclaration(io::Printer* printer);
|
||||
// Return names for foward declarations of this class and all its nested
|
||||
// types.
|
||||
void FillMessageForwardDeclarations(set<string>* class_names);
|
||||
void FillEnumForwardDeclarations(set<string>* enum_names);
|
||||
|
||||
// Generate definitions of all nested enums (must come before class
|
||||
// definitions because those classes use the enums definitions).
|
||||
|
@ -63,6 +63,14 @@ void SetMessageVariables(const FieldDescriptor* descriptor,
|
||||
SafeFunctionName(descriptor->containing_type(),
|
||||
descriptor, "release_");
|
||||
(*variables)["full_name"] = descriptor->full_name();
|
||||
if (options.proto_h && IsFieldDependent(descriptor)) {
|
||||
(*variables)["dependent_type"] = "T::" + DependentTypeName(descriptor);
|
||||
(*variables)["dependent_typename"] =
|
||||
"typename T::" + DependentTypeName(descriptor);
|
||||
} else {
|
||||
(*variables)["dependent_type"] = FieldMessageTypeName(descriptor);
|
||||
(*variables)["dependent_typename"] = FieldMessageTypeName(descriptor);
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace
|
||||
@ -84,8 +92,22 @@ GeneratePrivateMembers(io::Printer* printer) const {
|
||||
printer->Print(variables_, "$type$* $name$_;\n");
|
||||
}
|
||||
|
||||
void MessageFieldGenerator::
|
||||
GenerateGetterDeclaration(io::Printer* printer) const {
|
||||
printer->Print(variables_,
|
||||
"const $type$& $name$() const$deprecation$;\n");
|
||||
}
|
||||
|
||||
void MessageFieldGenerator::
|
||||
GenerateDependentAccessorDeclarations(io::Printer* printer) const {
|
||||
if (!dependent_field_) {
|
||||
return;
|
||||
}
|
||||
// Arena manipulation code is out-of-line in the derived message class.
|
||||
printer->Print(variables_,
|
||||
"$type$* mutable_$name$()$deprecation$;\n"
|
||||
"$type$* $release_name$()$deprecation$;\n"
|
||||
"void set_allocated_$name$($type$* $name$)$deprecation$;\n");
|
||||
}
|
||||
|
||||
void MessageFieldGenerator::
|
||||
@ -103,11 +125,13 @@ GenerateAccessorDeclarations(io::Printer* printer) const {
|
||||
"$type$* _slow_$release_name$()$deprecation$;\n"
|
||||
"public:\n");
|
||||
}
|
||||
printer->Print(variables_,
|
||||
"const $type$& $name$() const$deprecation$;\n"
|
||||
"$type$* mutable_$name$()$deprecation$;\n"
|
||||
"$type$* $release_name$()$deprecation$;\n"
|
||||
"void set_allocated_$name$($type$* $name$)$deprecation$;\n");
|
||||
GenerateGetterDeclaration(printer);
|
||||
if (!dependent_field_) {
|
||||
printer->Print(variables_,
|
||||
"$type$* mutable_$name$()$deprecation$;\n"
|
||||
"$type$* $release_name$()$deprecation$;\n"
|
||||
"void set_allocated_$name$($type$* $name$)$deprecation$;\n");
|
||||
}
|
||||
if (SupportsArenas(descriptor_)) {
|
||||
printer->Print(variables_,
|
||||
"$type$* unsafe_arena_release_$name$()$deprecation$;\n"
|
||||
@ -123,12 +147,12 @@ void MessageFieldGenerator::GenerateNonInlineAccessorDefinitions(
|
||||
"void $classname$::_slow_mutable_$name$() {\n");
|
||||
if (SupportsArenas(descriptor_->message_type())) {
|
||||
printer->Print(variables_,
|
||||
" $name$_ = ::google::protobuf::Arena::CreateMessage< $type$ >(\n"
|
||||
" GetArenaNoVirtual());\n");
|
||||
" $name$_ = ::google::protobuf::Arena::CreateMessage< $type$ >(\n"
|
||||
" GetArenaNoVirtual());\n");
|
||||
} else {
|
||||
printer->Print(variables_,
|
||||
" $name$_ = ::google::protobuf::Arena::Create< $type$ >(\n"
|
||||
" GetArenaNoVirtual());\n");
|
||||
" $name$_ = ::google::protobuf::Arena::Create< $type$ >(\n"
|
||||
" GetArenaNoVirtual());\n");
|
||||
}
|
||||
printer->Print(variables_,
|
||||
"}\n"
|
||||
@ -151,7 +175,7 @@ void MessageFieldGenerator::GenerateNonInlineAccessorDefinitions(
|
||||
if (SupportsArenas(descriptor_->message_type())) {
|
||||
// NOTE: the same logic is mirrored in weak_message_field.cc. Any
|
||||
// arena-related semantics changes should be made in both places.
|
||||
printer->Print(variables_,
|
||||
printer->Print(variables_,
|
||||
"void $classname$::_slow_set_allocated_$name$(\n"
|
||||
" ::google::protobuf::Arena* message_arena, $type$** $name$) {\n"
|
||||
" if (message_arena != NULL && \n"
|
||||
@ -189,15 +213,139 @@ void MessageFieldGenerator::GenerateNonInlineAccessorDefinitions(
|
||||
|
||||
void MessageFieldGenerator::
|
||||
GenerateDependentInlineAccessorDefinitions(io::Printer* printer) const {
|
||||
if (!dependent_field_) {
|
||||
return;
|
||||
}
|
||||
|
||||
map<string, string> variables(variables_);
|
||||
// For the CRTP base class, all mutation methods are dependent, and so
|
||||
// they must be in the header.
|
||||
variables["dependent_classname"] =
|
||||
DependentBaseClassTemplateName(descriptor_->containing_type()) + "<T>";
|
||||
variables["this_message"] = DependentBaseDownCast();
|
||||
if (!variables["set_hasbit"].empty()) {
|
||||
variables["set_hasbit"] =
|
||||
variables["this_message"] + variables["set_hasbit"];
|
||||
}
|
||||
if (!variables["clear_hasbit"].empty()) {
|
||||
variables["clear_hasbit"] =
|
||||
variables["this_message"] + variables["clear_hasbit"];
|
||||
}
|
||||
|
||||
if (SupportsArenas(descriptor_)) {
|
||||
printer->Print(variables,
|
||||
"template <class T>\n"
|
||||
"inline $type$* $dependent_classname$::mutable_$name$() {\n"
|
||||
" $set_hasbit$\n"
|
||||
" $dependent_typename$*& $name$_ = $this_message$$name$_;\n"
|
||||
" if ($name$_ == NULL) {\n"
|
||||
" $this_message$_slow_mutable_$name$();\n"
|
||||
" }\n"
|
||||
" // @@protoc_insertion_point(field_mutable:$full_name$)\n"
|
||||
" return $name$_;\n"
|
||||
"}\n"
|
||||
"template <class T>\n"
|
||||
"inline $type$* $dependent_classname$::$release_name$() {\n"
|
||||
" $dependent_typename$*& $name$_ = $this_message$$name$_;\n"
|
||||
" $clear_hasbit$\n"
|
||||
" if ($this_message$GetArenaNoVirtual() != NULL) {\n"
|
||||
" return $this_message$_slow_$release_name$();\n"
|
||||
" } else {\n"
|
||||
" $dependent_typename$* temp = $name$_;\n"
|
||||
" $name$_ = NULL;\n"
|
||||
" return temp;\n"
|
||||
" }\n"
|
||||
"}\n"
|
||||
"template <class T>\n"
|
||||
"inline void $dependent_classname$::"
|
||||
"set_allocated_$name$($type$* $name$) {\n"
|
||||
" ::google::protobuf::Arena* message_arena = $this_message$GetArenaNoVirtual();\n"
|
||||
" $dependent_typename$*& $name$_ = $this_message$$name$_;\n"
|
||||
" if (message_arena == NULL) {\n"
|
||||
" delete $name$_;\n"
|
||||
" }\n"
|
||||
" if ($name$ != NULL) {\n");
|
||||
if (SupportsArenas(descriptor_->message_type())) {
|
||||
// If we're on an arena and the incoming message is not, simply Own() it
|
||||
// rather than copy to the arena -- either way we need a heap dealloc,
|
||||
// so we might as well defer it. Otherwise, if incoming message is on a
|
||||
// different ownership domain (specific arena, or the heap) than we are,
|
||||
// copy to our arena (or heap, as the case may be).
|
||||
printer->Print(variables,
|
||||
" $this_message$_slow_set_allocated_$name$(message_arena, "
|
||||
"&$name$);\n");
|
||||
} else {
|
||||
printer->Print(variables,
|
||||
" if (message_arena != NULL) {\n"
|
||||
" message_arena->Own($name$);\n"
|
||||
" }\n");
|
||||
}
|
||||
printer->Print(variables,
|
||||
" }\n"
|
||||
" $name$_ = $name$;\n"
|
||||
" if ($name$) {\n"
|
||||
" $set_hasbit$\n"
|
||||
" } else {\n"
|
||||
" $clear_hasbit$\n"
|
||||
" }\n"
|
||||
// TODO(dlj): move insertion points to message class.
|
||||
" // @@protoc_insertion_point(field_set_allocated:$full_name$)\n"
|
||||
"}\n");
|
||||
} else {
|
||||
printer->Print(variables,
|
||||
"template <class T>\n"
|
||||
"inline $type$* $dependent_classname$::mutable_$name$() {\n"
|
||||
" $set_hasbit$\n"
|
||||
" $dependent_typename$*& $name$_ = $this_message$$name$_;\n"
|
||||
" if ($name$_ == NULL) {\n"
|
||||
" $name$_ = new $dependent_typename$;\n"
|
||||
" }\n"
|
||||
" // @@protoc_insertion_point(field_mutable:$full_name$)\n"
|
||||
" return $name$_;\n"
|
||||
"}\n"
|
||||
"template <class T>\n"
|
||||
"inline $type$* $dependent_classname$::$release_name$() {\n"
|
||||
" $clear_hasbit$\n"
|
||||
" $dependent_typename$*& $name$_ = $this_message$$name$_;\n"
|
||||
" $dependent_typename$* temp = $name$_;\n"
|
||||
" $name$_ = NULL;\n"
|
||||
" return temp;\n"
|
||||
"}\n"
|
||||
"template <class T>\n"
|
||||
"inline void $dependent_classname$::"
|
||||
"set_allocated_$name$($type$* $name$) {\n"
|
||||
" $dependent_typename$*& $name$_ = $this_message$$name$_;\n"
|
||||
" delete $name$_;\n");
|
||||
|
||||
if (SupportsArenas(descriptor_->message_type())) {
|
||||
printer->Print(variables,
|
||||
" if ($name$ != NULL && static_cast< $dependent_typename$* >($name$)"
|
||||
"->GetArena() != NULL) {\n"
|
||||
" $dependent_typename$* new_$name$ = new $dependent_typename$;\n"
|
||||
" new_$name$->CopyFrom(*$name$);\n"
|
||||
" $name$ = new_$name$;\n"
|
||||
" }\n");
|
||||
}
|
||||
|
||||
printer->Print(variables,
|
||||
" $name$_ = $name$;\n"
|
||||
" if ($name$) {\n"
|
||||
" $set_hasbit$\n"
|
||||
" } else {\n"
|
||||
" $clear_hasbit$\n"
|
||||
" }\n"
|
||||
" // @@protoc_insertion_point(field_set_allocated:$full_name$)\n"
|
||||
"}\n");
|
||||
}
|
||||
}
|
||||
|
||||
void MessageFieldGenerator::
|
||||
GenerateInlineAccessorDefinitions(io::Printer* printer,
|
||||
bool is_inline) const {
|
||||
map<string, string> variables(variables_);
|
||||
variables["inline"] = is_inline ? "inline" : "";
|
||||
variables["inline"] = is_inline ? "inline " : "";
|
||||
printer->Print(variables,
|
||||
"$inline$ const $type$& $classname$::$name$() const {\n"
|
||||
"$inline$const $type$& $classname$::$name$() const {\n"
|
||||
" // @@protoc_insertion_point(field_get:$full_name$)\n");
|
||||
|
||||
PrintHandlingOptionalStaticInitializers(
|
||||
@ -206,19 +354,25 @@ GenerateInlineAccessorDefinitions(io::Printer* printer,
|
||||
" return $name$_ != NULL ? *$name$_ : *default_instance_->$name$_;\n",
|
||||
// Without.
|
||||
" return $name$_ != NULL ? *$name$_ : *default_instance().$name$_;\n");
|
||||
printer->Print(variables, "}\n");
|
||||
|
||||
if (dependent_field_) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (SupportsArenas(descriptor_)) {
|
||||
printer->Print(variables,
|
||||
"}\n"
|
||||
"$inline$ $type$* $classname$::mutable_$name$() {\n"
|
||||
"$inline$"
|
||||
"$type$* $classname$::mutable_$name$() {\n"
|
||||
" $set_hasbit$\n"
|
||||
" if ($name$_ == NULL) {\n"
|
||||
" _slow_mutable_$name$();"
|
||||
" _slow_mutable_$name$();\n"
|
||||
" }\n"
|
||||
" // @@protoc_insertion_point(field_mutable:$full_name$)\n"
|
||||
" return $name$_;\n"
|
||||
"}\n"
|
||||
"$inline$ $type$* $classname$::$release_name$() {\n"
|
||||
"$inline$"
|
||||
"$type$* $classname$::$release_name$() {\n"
|
||||
" $clear_hasbit$\n"
|
||||
" if (GetArenaNoVirtual() != NULL) {\n"
|
||||
" return _slow_$release_name$();\n"
|
||||
@ -228,7 +382,8 @@ GenerateInlineAccessorDefinitions(io::Printer* printer,
|
||||
" return temp;\n"
|
||||
" }\n"
|
||||
"}\n"
|
||||
"$inline$ void $classname$::set_allocated_$name$($type$* $name$) {\n"
|
||||
"$inline$ "
|
||||
"void $classname$::set_allocated_$name$($type$* $name$) {\n"
|
||||
" ::google::protobuf::Arena* message_arena = GetArenaNoVirtual();\n"
|
||||
" if (message_arena == NULL) {\n"
|
||||
" delete $name$_;\n"
|
||||
@ -260,8 +415,8 @@ GenerateInlineAccessorDefinitions(io::Printer* printer,
|
||||
"}\n");
|
||||
} else {
|
||||
printer->Print(variables,
|
||||
"}\n"
|
||||
"$inline$ $type$* $classname$::mutable_$name$() {\n"
|
||||
"$inline$"
|
||||
"$type$* $classname$::mutable_$name$() {\n"
|
||||
" $set_hasbit$\n"
|
||||
" if ($name$_ == NULL) {\n"
|
||||
" $name$_ = new $type$;\n"
|
||||
@ -269,13 +424,15 @@ GenerateInlineAccessorDefinitions(io::Printer* printer,
|
||||
" // @@protoc_insertion_point(field_mutable:$full_name$)\n"
|
||||
" return $name$_;\n"
|
||||
"}\n"
|
||||
"$inline$ $type$* $classname$::$release_name$() {\n"
|
||||
"$inline$"
|
||||
"$type$* $classname$::$release_name$() {\n"
|
||||
" $clear_hasbit$\n"
|
||||
" $type$* temp = $name$_;\n"
|
||||
" $name$_ = NULL;\n"
|
||||
" return temp;\n"
|
||||
"}\n"
|
||||
"$inline$ void $classname$::set_allocated_$name$($type$* $name$) {\n"
|
||||
"$inline$"
|
||||
"void $classname$::set_allocated_$name$($type$* $name$) {\n"
|
||||
" delete $name$_;\n");
|
||||
|
||||
if (SupportsArenas(descriptor_->message_type())) {
|
||||
@ -301,15 +458,19 @@ GenerateInlineAccessorDefinitions(io::Printer* printer,
|
||||
|
||||
void MessageFieldGenerator::
|
||||
GenerateClearingCode(io::Printer* printer) const {
|
||||
map<string, string> variables(variables_);
|
||||
variables["this_message"] = dependent_field_ ? DependentBaseDownCast() : "";
|
||||
if (!HasFieldPresence(descriptor_->file())) {
|
||||
// If we don't have has-bits, message presence is indicated only by ptr !=
|
||||
// NULL. Thus on clear, we need to delete the object.
|
||||
printer->Print(variables_,
|
||||
"if (GetArenaNoVirtual() == NULL && $name$_ != NULL) delete $name$_;\n"
|
||||
"$name$_ = NULL;\n");
|
||||
printer->Print(variables,
|
||||
"if ($this_message$GetArenaNoVirtual() == NULL && "
|
||||
"$this_message$$name$_ != NULL) delete $this_message$$name$_;\n"
|
||||
"$this_message$$name$_ = NULL;\n");
|
||||
} else {
|
||||
printer->Print(variables_,
|
||||
"if ($name$_ != NULL) $name$_->$type$::Clear();\n");
|
||||
printer->Print(variables,
|
||||
"if ($this_message$$name$_ != NULL) $this_message$$name$_->"
|
||||
"$dependent_type$::Clear();\n");
|
||||
}
|
||||
}
|
||||
|
||||
@ -370,67 +531,180 @@ GenerateByteSize(io::Printer* printer) const {
|
||||
MessageOneofFieldGenerator::
|
||||
MessageOneofFieldGenerator(const FieldDescriptor* descriptor,
|
||||
const Options& options)
|
||||
: MessageFieldGenerator(descriptor, options) {
|
||||
: MessageFieldGenerator(descriptor, options),
|
||||
dependent_base_(options.proto_h) {
|
||||
SetCommonOneofFieldVariables(descriptor, &variables_);
|
||||
}
|
||||
|
||||
MessageOneofFieldGenerator::~MessageOneofFieldGenerator() {}
|
||||
|
||||
|
||||
void MessageOneofFieldGenerator::
|
||||
GenerateDependentAccessorDeclarations(io::Printer* printer) const {
|
||||
// Oneof field getters must be dependent as they call default_instance().
|
||||
// Otherwise, the logic is the same as MessageFields.
|
||||
if (!dependent_field_) {
|
||||
return;
|
||||
}
|
||||
printer->Print(variables_,
|
||||
"const $type$& $name$() const$deprecation$;\n");
|
||||
MessageFieldGenerator::GenerateDependentAccessorDeclarations(printer);
|
||||
}
|
||||
|
||||
void MessageOneofFieldGenerator::
|
||||
GenerateGetterDeclaration(io::Printer* printer) const {
|
||||
// Oneof field getters must be dependent as they call default_instance().
|
||||
// Unlike MessageField, this means there is no (non-dependent) getter to
|
||||
// generate.
|
||||
if (dependent_field_) {
|
||||
return;
|
||||
}
|
||||
printer->Print(variables_,
|
||||
"const $type$& $name$() const$deprecation$;\n");
|
||||
}
|
||||
|
||||
void MessageOneofFieldGenerator::
|
||||
GenerateDependentInlineAccessorDefinitions(io::Printer* printer) const {
|
||||
// For the CRTP base class, all mutation methods are dependent, and so
|
||||
// they must be in the header.
|
||||
if (!dependent_base_) {
|
||||
return;
|
||||
}
|
||||
map<string, string> variables(variables_);
|
||||
variables["inline"] = "inline ";
|
||||
variables["dependent_classname"] =
|
||||
DependentBaseClassTemplateName(descriptor_->containing_type()) + "<T>";
|
||||
variables["this_message"] = "reinterpret_cast<T*>(this)->";
|
||||
// Const message access is needed for the dependent getter.
|
||||
variables["this_const_message"] = "reinterpret_cast<const T*>(this)->";
|
||||
variables["tmpl"] = "template <class T>\n";
|
||||
variables["field_member"] = variables["this_message"] +
|
||||
variables["oneof_prefix"] + variables["name"] +
|
||||
"_";
|
||||
InternalGenerateInlineAccessorDefinitions(variables, printer);
|
||||
}
|
||||
|
||||
void MessageOneofFieldGenerator::
|
||||
GenerateInlineAccessorDefinitions(io::Printer* printer,
|
||||
bool is_inline) const {
|
||||
if (dependent_base_) {
|
||||
return;
|
||||
}
|
||||
map<string, string> variables(variables_);
|
||||
variables["inline"] = is_inline ? "inline" : "";
|
||||
variables["inline"] = is_inline ? "inline " : "";
|
||||
variables["dependent_classname"] = variables["classname"];
|
||||
variables["this_message"] = "";
|
||||
variables["this_const_message"] = "";
|
||||
variables["tmpl"] = "";
|
||||
variables["field_member"] =
|
||||
variables["oneof_prefix"] + variables["name"] + "_";
|
||||
variables["dependent_type"] = variables["type"];
|
||||
InternalGenerateInlineAccessorDefinitions(variables, printer);
|
||||
}
|
||||
|
||||
void MessageOneofFieldGenerator::
|
||||
GenerateNonInlineAccessorDefinitions(io::Printer* printer) const {
|
||||
map<string, string> variables(variables_);
|
||||
variables["field_member"] =
|
||||
variables["oneof_prefix"] + variables["name"] + "_";
|
||||
|
||||
//printer->Print(variables,
|
||||
}
|
||||
|
||||
void MessageOneofFieldGenerator::
|
||||
InternalGenerateInlineAccessorDefinitions(const map<string, string>& variables,
|
||||
io::Printer* printer) const {
|
||||
printer->Print(variables,
|
||||
"$tmpl$"
|
||||
"$inline$ "
|
||||
"const $type$& $dependent_classname$::$name$() const {\n"
|
||||
" // @@protoc_insertion_point(field_get:$full_name$)\n"
|
||||
" return $this_const_message$has_$name$()\n"
|
||||
" ? *$this_const_message$$oneof_prefix$$name$_\n"
|
||||
" : $dependent_type$::default_instance();\n"
|
||||
"}\n");
|
||||
|
||||
if (SupportsArenas(descriptor_)) {
|
||||
printer->Print(variables,
|
||||
"$inline$ const $type$& $classname$::$name$() const {\n"
|
||||
" // @@protoc_insertion_point(field_get:$full_name$)\n"
|
||||
" return has_$name$() ? *$oneof_prefix$$name$_\n"
|
||||
" : $type$::default_instance();\n"
|
||||
"}\n"
|
||||
"$inline$ $type$* $classname$::mutable_$name$() {\n"
|
||||
" if (!has_$name$()) {\n"
|
||||
" clear_$oneof_name$();\n"
|
||||
" set_has_$name$();\n");
|
||||
"$tmpl$"
|
||||
"$inline$"
|
||||
"$type$* $dependent_classname$::mutable_$name$() {\n"
|
||||
" if (!$this_message$has_$name$()) {\n"
|
||||
" $this_message$clear_$oneof_name$();\n"
|
||||
" $this_message$set_has_$name$();\n");
|
||||
if (SupportsArenas(descriptor_->message_type())) {
|
||||
printer->Print(variables,
|
||||
" $oneof_prefix$$name$_ = \n"
|
||||
" ::google::protobuf::Arena::CreateMessage< $type$ >(\n"
|
||||
" GetArenaNoVirtual());\n");
|
||||
" $field_member$ = \n"
|
||||
" ::google::protobuf::Arena::CreateMessage< $dependent_typename$ >(\n"
|
||||
" $this_message$GetArenaNoVirtual());\n");
|
||||
} else {
|
||||
printer->Print(variables,
|
||||
" $oneof_prefix$$name$_ = \n"
|
||||
" ::google::protobuf::Arena::Create< $type$ >(\n"
|
||||
" GetArenaNoVirtual());\n");
|
||||
" $this_message$$oneof_prefix$$name$_ = \n"
|
||||
" ::google::protobuf::Arena::Create< $dependent_typename$ >(\n"
|
||||
" $this_message$GetArenaNoVirtual());\n");
|
||||
}
|
||||
printer->Print(variables,
|
||||
" }\n"
|
||||
" // @@protoc_insertion_point(field_mutable:$full_name$)\n"
|
||||
" return $oneof_prefix$$name$_;\n"
|
||||
" return $field_member$;\n"
|
||||
"}\n"
|
||||
"$inline$ $type$* $classname$::$release_name$() {\n"
|
||||
" if (has_$name$()) {\n"
|
||||
" clear_has_$oneof_name$();\n"
|
||||
" if (GetArenaNoVirtual() != NULL) {\n"
|
||||
"$tmpl$"
|
||||
"$inline$"
|
||||
"$type$* $dependent_classname$::$release_name$() {\n"
|
||||
" if ($this_message$has_$name$()) {\n"
|
||||
" $this_message$clear_has_$oneof_name$();\n"
|
||||
" if ($this_message$GetArenaNoVirtual() != NULL) {\n"
|
||||
// N.B.: safe to use the underlying field pointer here because we are sure
|
||||
// that it is non-NULL (because has_$name$() returned true).
|
||||
" $type$* temp = new $type$;\n"
|
||||
" temp->MergeFrom(*$oneof_prefix$$name$_);\n"
|
||||
" $oneof_prefix$$name$_ = NULL;\n"
|
||||
" $dependent_typename$* temp = new $dependent_typename$;\n"
|
||||
" temp->MergeFrom(*$field_member$);\n"
|
||||
" $field_member$ = NULL;\n"
|
||||
" return temp;\n"
|
||||
" } else {\n"
|
||||
" $type$* temp = $oneof_prefix$$name$_;\n"
|
||||
" $oneof_prefix$$name$_ = NULL;\n"
|
||||
" $dependent_typename$* temp = $field_member$;\n"
|
||||
" $field_member$ = NULL;\n"
|
||||
" return temp;\n"
|
||||
" }\n"
|
||||
" } else {\n"
|
||||
" return NULL;\n"
|
||||
" }\n"
|
||||
"}\n"
|
||||
"$tmpl$"
|
||||
"$inline$"
|
||||
"void $dependent_classname$::"
|
||||
"set_allocated_$name$($type$* $name$) {\n"
|
||||
" $this_message$clear_$oneof_name$();\n"
|
||||
" if ($name$) {\n");
|
||||
|
||||
if (SupportsArenas(descriptor_->message_type())) {
|
||||
printer->Print(variables,
|
||||
// If incoming message is on the heap and we are on an arena, just Own()
|
||||
// it (see above). If it's on a different arena than we are or one of us
|
||||
// is on the heap, we make a copy to our arena/heap.
|
||||
" if ($this_message$GetArenaNoVirtual() != NULL &&\n"
|
||||
" ::google::protobuf::Arena::GetArena($name$) == NULL) {\n"
|
||||
" $this_message$GetArenaNoVirtual()->Own($name$);\n"
|
||||
" } else if ($this_message$GetArenaNoVirtual() !=\n"
|
||||
" ::google::protobuf::Arena::GetArena($name$)) {\n"
|
||||
" $dependent_typename$* new_$name$ = \n"
|
||||
" ::google::protobuf::Arena::CreateMessage< $dependent_typename$ >(\n"
|
||||
" $this_message$GetArenaNoVirtual());\n"
|
||||
" new_$name$->CopyFrom(*$name$);\n"
|
||||
" $name$ = new_$name$;\n"
|
||||
" }\n");
|
||||
} else {
|
||||
printer->Print(variables,
|
||||
" if ($this_message$GetArenaNoVirtual() != NULL) {\n"
|
||||
" $this_message$GetArenaNoVirtual()->Own($name$);\n"
|
||||
" }\n");
|
||||
}
|
||||
|
||||
printer->Print(variables,
|
||||
" $this_message$set_has_$name$();\n"
|
||||
" $field_member$ = $name$;\n"
|
||||
" }\n"
|
||||
" // @@protoc_insertion_point(field_set_allocated:$full_name$)\n"
|
||||
"}\n"
|
||||
"$inline$ $type$* $classname$::unsafe_arena_release_$name$() {\n"
|
||||
" if (has_$name$()) {\n"
|
||||
" clear_has_$oneof_name$();\n"
|
||||
@ -441,41 +715,8 @@ GenerateInlineAccessorDefinitions(io::Printer* printer,
|
||||
" return NULL;\n"
|
||||
" }\n"
|
||||
"}\n"
|
||||
"$inline$ void $classname$::set_allocated_$name$($type$* $name$) {\n"
|
||||
" clear_$oneof_name$();\n"
|
||||
" if ($name$) {\n");
|
||||
|
||||
if (SupportsArenas(descriptor_->message_type())) {
|
||||
printer->Print(variables,
|
||||
// If incoming message is on the heap and we are on an arena, just Own()
|
||||
// it (see above). If it's on a different arena than we are or one of us
|
||||
// is on the heap, we make a copy to our arena/heap.
|
||||
" if (GetArenaNoVirtual() != NULL &&\n"
|
||||
" ::google::protobuf::Arena::GetArena($name$) == NULL) {\n"
|
||||
" GetArenaNoVirtual()->Own($name$);\n"
|
||||
" } else if (GetArenaNoVirtual() !=\n"
|
||||
" ::google::protobuf::Arena::GetArena($name$)) {\n"
|
||||
" $type$* new_$name$ = \n"
|
||||
" ::google::protobuf::Arena::CreateMessage< $type$ >(\n"
|
||||
" GetArenaNoVirtual());\n"
|
||||
" new_$name$->CopyFrom(*$name$);\n"
|
||||
" $name$ = new_$name$;\n"
|
||||
" }\n");
|
||||
} else {
|
||||
printer->Print(variables,
|
||||
" if (GetArenaNoVirtual() != NULL) {\n"
|
||||
" GetArenaNoVirtual()->Own($name$);\n"
|
||||
" }\n");
|
||||
}
|
||||
|
||||
printer->Print(variables,
|
||||
" set_has_$name$();\n"
|
||||
" $oneof_prefix$$name$_ = $name$;\n"
|
||||
" }\n"
|
||||
" // @@protoc_insertion_point(field_set_allocated:$full_name$)\n"
|
||||
"}\n"
|
||||
"$inline$ void $classname$::unsafe_arena_set_allocated_$name$("
|
||||
"$type$* $name$) {\n"
|
||||
"$inline$ void $classname$::unsafe_arena_set_allocated_$name$"
|
||||
"($type$* $name$) {\n"
|
||||
// We rely on the oneof clear method to free the earlier contents of this
|
||||
// oneof. We can directly use the pointer we're given to set the new
|
||||
// value.
|
||||
@ -489,44 +730,47 @@ GenerateInlineAccessorDefinitions(io::Printer* printer,
|
||||
"}\n");
|
||||
} else {
|
||||
printer->Print(variables,
|
||||
"$inline$ const $type$& $classname$::$name$() const {\n"
|
||||
" // @@protoc_insertion_point(field_get:$full_name$)\n"
|
||||
" return has_$name$() ? *$oneof_prefix$$name$_\n"
|
||||
" : $type$::default_instance();\n"
|
||||
"}\n"
|
||||
"$inline$ $type$* $classname$::mutable_$name$() {\n"
|
||||
" if (!has_$name$()) {\n"
|
||||
" clear_$oneof_name$();\n"
|
||||
" set_has_$name$();\n"
|
||||
" $oneof_prefix$$name$_ = new $type$;\n"
|
||||
"$tmpl$"
|
||||
"$inline$"
|
||||
"$type$* $dependent_classname$::mutable_$name$() {\n"
|
||||
" if (!$this_message$has_$name$()) {\n"
|
||||
" $this_message$clear_$oneof_name$();\n"
|
||||
" $this_message$set_has_$name$();\n"
|
||||
" $field_member$ = new $dependent_typename$;\n"
|
||||
" }\n"
|
||||
" // @@protoc_insertion_point(field_mutable:$full_name$)\n"
|
||||
" return $oneof_prefix$$name$_;\n"
|
||||
" return $field_member$;\n"
|
||||
"}\n"
|
||||
"$inline$ $type$* $classname$::$release_name$() {\n"
|
||||
" if (has_$name$()) {\n"
|
||||
" clear_has_$oneof_name$();\n"
|
||||
" $type$* temp = $oneof_prefix$$name$_;\n"
|
||||
" $oneof_prefix$$name$_ = NULL;\n"
|
||||
"$tmpl$"
|
||||
"$inline$"
|
||||
"$type$* $dependent_classname$::$release_name$() {\n"
|
||||
" if ($this_message$has_$name$()) {\n"
|
||||
" $this_message$clear_has_$oneof_name$();\n"
|
||||
" $dependent_typename$* temp = $field_member$;\n"
|
||||
" $field_member$ = NULL;\n"
|
||||
" return temp;\n"
|
||||
" } else {\n"
|
||||
" return NULL;\n"
|
||||
" }\n"
|
||||
"}\n"
|
||||
"$inline$ void $classname$::set_allocated_$name$($type$* $name$) {\n"
|
||||
" clear_$oneof_name$();\n"
|
||||
"$tmpl$"
|
||||
"$inline$"
|
||||
"void $dependent_classname$::"
|
||||
"set_allocated_$name$($type$* $name$) {\n"
|
||||
" $this_message$clear_$oneof_name$();\n"
|
||||
" if ($name$) {\n");
|
||||
if (SupportsArenas(descriptor_->message_type())) {
|
||||
printer->Print(variables,
|
||||
" if ($name$->GetArena() != NULL) {\n"
|
||||
" $type$* new_$name$ = new $type$;\n"
|
||||
" if (static_cast< $dependent_typename$*>($name$)->"
|
||||
"GetArena() != NULL) {\n"
|
||||
" $dependent_typename$* new_$name$ = new $dependent_typename$;\n"
|
||||
" new_$name$->CopyFrom(*$name$);\n"
|
||||
" $name$ = new_$name$;\n"
|
||||
" }\n");
|
||||
}
|
||||
printer->Print(variables,
|
||||
" set_has_$name$();\n"
|
||||
" $oneof_prefix$$name$_ = $name$;\n"
|
||||
" $this_message$set_has_$name$();\n"
|
||||
" $field_member$ = $name$;\n"
|
||||
" }\n"
|
||||
" // @@protoc_insertion_point(field_set_allocated:$full_name$)\n"
|
||||
"}\n");
|
||||
@ -535,14 +779,16 @@ GenerateInlineAccessorDefinitions(io::Printer* printer,
|
||||
|
||||
void MessageOneofFieldGenerator::
|
||||
GenerateClearingCode(io::Printer* printer) const {
|
||||
map<string, string> variables(variables_);
|
||||
variables["this_message"] = dependent_field_ ? DependentBaseDownCast() : "";
|
||||
if (SupportsArenas(descriptor_)) {
|
||||
printer->Print(variables_,
|
||||
"if (GetArenaNoVirtual() == NULL) {\n"
|
||||
" delete $oneof_prefix$$name$_;\n"
|
||||
printer->Print(variables,
|
||||
"if ($this_message$GetArenaNoVirtual() == NULL) {\n"
|
||||
" delete $this_message$$oneof_prefix$$name$_;\n"
|
||||
"}\n");
|
||||
} else {
|
||||
printer->Print(variables_,
|
||||
"delete $oneof_prefix$$name$_;\n");
|
||||
printer->Print(variables,
|
||||
"delete $this_message$$oneof_prefix$$name$_;\n");
|
||||
}
|
||||
}
|
||||
|
||||
@ -562,7 +808,9 @@ GenerateConstructorCode(io::Printer* printer) const {
|
||||
RepeatedMessageFieldGenerator::
|
||||
RepeatedMessageFieldGenerator(const FieldDescriptor* descriptor,
|
||||
const Options& options)
|
||||
: descriptor_(descriptor) {
|
||||
: descriptor_(descriptor),
|
||||
dependent_field_(options.proto_h && IsFieldDependent(descriptor)),
|
||||
dependent_getter_(dependent_field_ && options.safe_boundary_check) {
|
||||
SetMessageVariables(descriptor, &variables_, options);
|
||||
}
|
||||
|
||||
@ -575,60 +823,160 @@ GeneratePrivateMembers(io::Printer* printer) const {
|
||||
}
|
||||
|
||||
void RepeatedMessageFieldGenerator::
|
||||
GenerateDependentAccessorDeclarations(io::Printer* printer) const {
|
||||
}
|
||||
|
||||
void RepeatedMessageFieldGenerator::
|
||||
GenerateAccessorDeclarations(io::Printer* printer) const {
|
||||
InternalGenerateTypeDependentAccessorDeclarations(io::Printer* printer) const {
|
||||
printer->Print(variables_,
|
||||
"const $type$& $name$(int index) const$deprecation$;\n"
|
||||
"$type$* mutable_$name$(int index)$deprecation$;\n"
|
||||
"$type$* add_$name$()$deprecation$;\n");
|
||||
if (dependent_getter_) {
|
||||
printer->Print(variables_,
|
||||
"const ::google::protobuf::RepeatedPtrField< $type$ >&\n"
|
||||
" $name$() const$deprecation$;\n");
|
||||
}
|
||||
printer->Print(variables_,
|
||||
"const ::google::protobuf::RepeatedPtrField< $type$ >&\n"
|
||||
" $name$() const$deprecation$;\n"
|
||||
"::google::protobuf::RepeatedPtrField< $type$ >*\n"
|
||||
" mutable_$name$()$deprecation$;\n");
|
||||
}
|
||||
|
||||
void RepeatedMessageFieldGenerator::
|
||||
GenerateDependentAccessorDeclarations(io::Printer* printer) const {
|
||||
if (dependent_getter_) {
|
||||
printer->Print(variables_,
|
||||
"const $type$& $name$(int index) const$deprecation$;\n");
|
||||
}
|
||||
if (dependent_field_) {
|
||||
InternalGenerateTypeDependentAccessorDeclarations(printer);
|
||||
}
|
||||
}
|
||||
|
||||
void RepeatedMessageFieldGenerator::
|
||||
GenerateAccessorDeclarations(io::Printer* printer) const {
|
||||
if (!dependent_getter_) {
|
||||
printer->Print(variables_,
|
||||
"const $type$& $name$(int index) const$deprecation$;\n");
|
||||
}
|
||||
if (!dependent_field_) {
|
||||
InternalGenerateTypeDependentAccessorDeclarations(printer);
|
||||
}
|
||||
if (!dependent_getter_) {
|
||||
printer->Print(variables_,
|
||||
"const ::google::protobuf::RepeatedPtrField< $type$ >&\n"
|
||||
" $name$() const$deprecation$;\n");
|
||||
}
|
||||
}
|
||||
|
||||
void RepeatedMessageFieldGenerator::
|
||||
GenerateDependentInlineAccessorDefinitions(io::Printer* printer) const {
|
||||
if (!dependent_field_) {
|
||||
return;
|
||||
}
|
||||
map<string, string> variables(variables_);
|
||||
// For the CRTP base class, all mutation methods are dependent, and so
|
||||
// they must be in the header.
|
||||
variables["dependent_classname"] =
|
||||
DependentBaseClassTemplateName(descriptor_->containing_type()) + "<T>";
|
||||
variables["this_message"] = DependentBaseDownCast();
|
||||
variables["this_const_message"] = DependentBaseConstDownCast();
|
||||
|
||||
if (dependent_getter_) {
|
||||
printer->Print(variables,
|
||||
"template <class T>\n"
|
||||
"inline const $type$& $dependent_classname$::$name$(int index) const {\n"
|
||||
" // @@protoc_insertion_point(field_get:$full_name$)\n"
|
||||
" return $this_const_message$$name$_.$cppget$(index);\n"
|
||||
"}\n");
|
||||
}
|
||||
|
||||
// Generate per-element accessors:
|
||||
printer->Print(variables,
|
||||
"template <class T>\n"
|
||||
"inline $type$* $dependent_classname$::mutable_$name$(int index) {\n"
|
||||
// TODO(dlj): move insertion points
|
||||
" // @@protoc_insertion_point(field_mutable:$full_name$)\n"
|
||||
" return $this_message$$name$_.Mutable(index);\n"
|
||||
"}\n"
|
||||
"template <class T>\n"
|
||||
"inline $type$* $dependent_classname$::add_$name$() {\n"
|
||||
" // @@protoc_insertion_point(field_add:$full_name$)\n"
|
||||
" return $this_message$$name$_.Add();\n"
|
||||
"}\n");
|
||||
|
||||
|
||||
if (dependent_getter_) {
|
||||
printer->Print(variables,
|
||||
"template <class T>\n"
|
||||
"inline const ::google::protobuf::RepeatedPtrField< $type$ >&\n"
|
||||
"$dependent_classname$::$name$() const {\n"
|
||||
" // @@protoc_insertion_point(field_list:$full_name$)\n"
|
||||
" return $this_const_message$$name$_;\n"
|
||||
"}\n");
|
||||
}
|
||||
|
||||
// Generate mutable access to the entire list:
|
||||
printer->Print(variables,
|
||||
"template <class T>\n"
|
||||
"inline ::google::protobuf::RepeatedPtrField< $type$ >*\n"
|
||||
"$dependent_classname$::mutable_$name$() {\n"
|
||||
" // @@protoc_insertion_point(field_mutable_list:$full_name$)\n"
|
||||
" return &$this_message$$name$_;\n"
|
||||
"}\n");
|
||||
}
|
||||
|
||||
void RepeatedMessageFieldGenerator::
|
||||
GenerateInlineAccessorDefinitions(io::Printer* printer,
|
||||
bool is_inline) const {
|
||||
map<string, string> variables(variables_);
|
||||
variables["inline"] = is_inline ? "inline" : "";
|
||||
printer->Print(variables,
|
||||
"$inline$ const $type$& $classname$::$name$(int index) const {\n"
|
||||
" // @@protoc_insertion_point(field_get:$full_name$)\n"
|
||||
" return $name$_.$cppget$(index);\n"
|
||||
"}\n"
|
||||
"$inline$ $type$* $classname$::mutable_$name$(int index) {\n"
|
||||
" // @@protoc_insertion_point(field_mutable:$full_name$)\n"
|
||||
" return $name$_.Mutable(index);\n"
|
||||
"}\n"
|
||||
"$inline$ $type$* $classname$::add_$name$() {\n"
|
||||
" // @@protoc_insertion_point(field_add:$full_name$)\n"
|
||||
" return $name$_.Add();\n"
|
||||
"}\n");
|
||||
printer->Print(variables,
|
||||
"$inline$ const ::google::protobuf::RepeatedPtrField< $type$ >&\n"
|
||||
"$classname$::$name$() const {\n"
|
||||
" // @@protoc_insertion_point(field_list:$full_name$)\n"
|
||||
" return $name$_;\n"
|
||||
"}\n"
|
||||
"$inline$ ::google::protobuf::RepeatedPtrField< $type$ >*\n"
|
||||
"$classname$::mutable_$name$() {\n"
|
||||
" // @@protoc_insertion_point(field_mutable_list:$full_name$)\n"
|
||||
" return &$name$_;\n"
|
||||
"}\n");
|
||||
variables["inline"] = is_inline ? "inline " : "";
|
||||
|
||||
if (!dependent_getter_) {
|
||||
printer->Print(variables,
|
||||
"$inline$"
|
||||
"const $type$& $classname$::$name$(int index) const {\n"
|
||||
" // @@protoc_insertion_point(field_get:$full_name$)\n"
|
||||
" return $name$_.$cppget$(index);\n"
|
||||
"}\n");
|
||||
}
|
||||
|
||||
if (!dependent_field_) {
|
||||
printer->Print(variables,
|
||||
"$inline$"
|
||||
"$type$* $classname$::mutable_$name$(int index) {\n"
|
||||
// TODO(dlj): move insertion points
|
||||
" // @@protoc_insertion_point(field_mutable:$full_name$)\n"
|
||||
" return $name$_.Mutable(index);\n"
|
||||
"}\n"
|
||||
"$inline$"
|
||||
"$type$* $classname$::add_$name$() {\n"
|
||||
" // @@protoc_insertion_point(field_add:$full_name$)\n"
|
||||
" return $name$_.Add();\n"
|
||||
"}\n");
|
||||
}
|
||||
|
||||
|
||||
if (!dependent_field_) {
|
||||
printer->Print(variables,
|
||||
"$inline$"
|
||||
"::google::protobuf::RepeatedPtrField< $type$ >*\n"
|
||||
"$classname$::mutable_$name$() {\n"
|
||||
" // @@protoc_insertion_point(field_mutable_list:$full_name$)\n"
|
||||
" return &$name$_;\n"
|
||||
"}\n");
|
||||
}
|
||||
if (!dependent_getter_) {
|
||||
printer->Print(variables,
|
||||
"$inline$"
|
||||
"const ::google::protobuf::RepeatedPtrField< $type$ >&\n"
|
||||
"$classname$::$name$() const {\n"
|
||||
" // @@protoc_insertion_point(field_list:$full_name$)\n"
|
||||
" return $name$_;\n"
|
||||
"}\n");
|
||||
}
|
||||
}
|
||||
|
||||
void RepeatedMessageFieldGenerator::
|
||||
GenerateClearingCode(io::Printer* printer) const {
|
||||
printer->Print(variables_, "$name$_.Clear();\n");
|
||||
map<string, string> variables(variables_);
|
||||
variables["this_message"] = dependent_field_ ? DependentBaseDownCast() : "";
|
||||
printer->Print(variables, "$this_message$$name$_.Clear();\n");
|
||||
}
|
||||
|
||||
void RepeatedMessageFieldGenerator::
|
||||
|
@ -68,6 +68,11 @@ class MessageFieldGenerator : public FieldGenerator {
|
||||
void GenerateByteSize(io::Printer* printer) const;
|
||||
|
||||
protected:
|
||||
void GenerateArenaManipulationCode(const map<string, string>& variables,
|
||||
io::Printer* printer) const;
|
||||
|
||||
virtual void GenerateGetterDeclaration(io::Printer* printer) const;
|
||||
|
||||
const FieldDescriptor* descriptor_;
|
||||
const bool dependent_field_;
|
||||
map<string, string> variables_;
|
||||
@ -83,15 +88,23 @@ class MessageOneofFieldGenerator : public MessageFieldGenerator {
|
||||
~MessageOneofFieldGenerator();
|
||||
|
||||
// implements FieldGenerator ---------------------------------------
|
||||
void GenerateDependentAccessorDeclarations(io::Printer* printer) const;
|
||||
void GenerateDependentInlineAccessorDefinitions(io::Printer* printer) const;
|
||||
void GenerateInlineAccessorDefinitions(io::Printer* printer,
|
||||
bool is_inline) const;
|
||||
void GenerateNonInlineAccessorDefinitions(io::Printer* printer) const {}
|
||||
void GenerateNonInlineAccessorDefinitions(io::Printer* printer) const;
|
||||
void GenerateClearingCode(io::Printer* printer) const;
|
||||
void GenerateSwappingCode(io::Printer* printer) const;
|
||||
void GenerateConstructorCode(io::Printer* printer) const;
|
||||
|
||||
protected:
|
||||
void GenerateGetterDeclaration(io::Printer* printer) const;
|
||||
|
||||
private:
|
||||
void InternalGenerateInlineAccessorDefinitions(
|
||||
const map<string, string>& variables, io::Printer* printer) const;
|
||||
|
||||
const bool dependent_base_;
|
||||
GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(MessageOneofFieldGenerator);
|
||||
};
|
||||
|
||||
@ -118,7 +131,12 @@ class RepeatedMessageFieldGenerator : public FieldGenerator {
|
||||
void GenerateByteSize(io::Printer* printer) const;
|
||||
|
||||
private:
|
||||
void InternalGenerateTypeDependentAccessorDeclarations(
|
||||
io::Printer* printer) const;
|
||||
|
||||
const FieldDescriptor* descriptor_;
|
||||
const bool dependent_field_;
|
||||
const bool dependent_getter_;
|
||||
map<string, string> variables_;
|
||||
|
||||
GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(RepeatedMessageFieldGenerator);
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user