Merge pull request #1145 from jskeet/conformance
JSON Conformance improvements
This commit is contained in:
commit
d522479aa0
@ -1,89 +1,16 @@
|
||||
DurationProtoInputTooLarge.JsonOutput
|
||||
DurationProtoInputTooSmall.JsonOutput
|
||||
FieldMaskNumbersDontRoundTrip.JsonOutput
|
||||
FieldMaskPathsDontRoundTrip.JsonOutput
|
||||
FieldMaskTooManyUnderscore.JsonOutput
|
||||
JsonInput.AnyWithValueForInteger.JsonOutput
|
||||
JsonInput.AnyWithValueForJsonObject.JsonOutput
|
||||
JsonInput.BoolFieldAllCapitalFalse
|
||||
JsonInput.BoolFieldAllCapitalTrue
|
||||
JsonInput.BoolFieldCamelCaseFalse
|
||||
JsonInput.BoolFieldCamelCaseTrue
|
||||
JsonInput.BoolMapFieldKeyNotQuoted
|
||||
JsonInput.BytesFieldInvalidBase64Characters
|
||||
JsonInput.BytesFieldNoPadding
|
||||
JsonInput.DoubleFieldInfinityNotQuoted
|
||||
JsonInput.DoubleFieldNanNotQuoted
|
||||
JsonInput.DoubleFieldNegativeInfinityNotQuoted
|
||||
JsonInput.DoubleFieldTooLarge
|
||||
JsonInput.DoubleFieldTooSmall
|
||||
JsonInput.DurationHas3FractionalDigits.Validator
|
||||
JsonInput.DurationHas6FractionalDigits.Validator
|
||||
JsonInput.DurationHas9FractionalDigits.Validator
|
||||
JsonInput.DurationMaxValue.JsonOutput
|
||||
JsonInput.DurationMaxValue.ProtobufOutput
|
||||
JsonInput.DurationMinValue.JsonOutput
|
||||
JsonInput.DurationMinValue.ProtobufOutput
|
||||
JsonInput.EnumFieldNotQuoted
|
||||
JsonInput.EnumFieldNumericValueNonZero.JsonOutput
|
||||
JsonInput.EnumFieldNumericValueNonZero.ProtobufOutput
|
||||
JsonInput.EnumFieldNumericValueZero.JsonOutput
|
||||
JsonInput.EnumFieldNumericValueZero.ProtobufOutput
|
||||
JsonInput.EnumFieldUnknownValue.Validator
|
||||
JsonInput.FieldMaskInvalidCharacter
|
||||
JsonInput.FieldNameDuplicate
|
||||
JsonInput.FieldNameDuplicateDifferentCasing2
|
||||
JsonInput.FieldNameInLowerCamelCase.Validator
|
||||
JsonInput.FieldNameInSnakeCase.JsonOutput
|
||||
JsonInput.FieldNameInSnakeCase.ProtobufOutput
|
||||
JsonInput.FieldNameNotQuoted
|
||||
JsonInput.FieldNameWithMixedCases.JsonOutput
|
||||
JsonInput.FieldNameWithMixedCases.ProtobufOutput
|
||||
JsonInput.FieldNameWithMixedCases.Validator
|
||||
JsonInput.FloatFieldInfinityNotQuoted
|
||||
JsonInput.FloatFieldNanNotQuoted
|
||||
JsonInput.FloatFieldNegativeInfinityNotQuoted
|
||||
JsonInput.Int32FieldLeadingZero
|
||||
JsonInput.Int32FieldMinFloatValue.JsonOutput
|
||||
JsonInput.Int32FieldMinValue.JsonOutput
|
||||
JsonInput.Int32FieldNegativeWithLeadingZero
|
||||
JsonInput.Int32FieldPlusSign
|
||||
JsonInput.Int32MapFieldKeyNotQuoted
|
||||
JsonInput.Int64FieldMaxValueNotQuoted.JsonOutput
|
||||
JsonInput.Int64FieldMaxValueNotQuoted.ProtobufOutput
|
||||
JsonInput.Int64MapFieldKeyNotQuoted
|
||||
JsonInput.JsonWithComments
|
||||
JsonInput.MapFieldKeyIsNull
|
||||
JsonInput.MapFieldValueIsNull
|
||||
JsonInput.OneofFieldDuplicate
|
||||
JsonInput.OriginalProtoFieldName.JsonOutput
|
||||
JsonInput.OriginalProtoFieldName.ProtobufOutput
|
||||
JsonInput.RepeatedBoolWrapper.ProtobufOutput
|
||||
JsonInput.RepeatedDoubleWrapper.ProtobufOutput
|
||||
JsonInput.RepeatedFieldMessageElementIsNull
|
||||
JsonInput.RepeatedFieldPrimitiveElementIsNull
|
||||
JsonInput.RepeatedFieldTrailingComma
|
||||
JsonInput.RepeatedFloatWrapper.ProtobufOutput
|
||||
JsonInput.RepeatedInt32Wrapper.ProtobufOutput
|
||||
JsonInput.RepeatedInt64Wrapper.ProtobufOutput
|
||||
JsonInput.RepeatedUint32Wrapper.ProtobufOutput
|
||||
JsonInput.RepeatedUint64Wrapper.ProtobufOutput
|
||||
JsonInput.StringFieldInvalidEscape
|
||||
JsonInput.StringFieldSurrogateInWrongOrder
|
||||
JsonInput.StringFieldSurrogatePair.JsonOutput
|
||||
JsonInput.StringFieldUnpairedHighSurrogate
|
||||
JsonInput.StringFieldUnpairedLowSurrogate
|
||||
JsonInput.StringFieldUnterminatedEscape
|
||||
JsonInput.StringFieldUppercaseEscapeLetter
|
||||
JsonInput.TimestampHas3FractionalDigits.Validator
|
||||
JsonInput.TimestampHas6FractionalDigits.Validator
|
||||
JsonInput.TimestampHas9FractionalDigits.Validator
|
||||
JsonInput.TrailingCommaInAnObject
|
||||
JsonInput.Uint32MapFieldKeyNotQuoted
|
||||
JsonInput.Uint64FieldMaxValueNotQuoted.JsonOutput
|
||||
JsonInput.Uint64FieldMaxValueNotQuoted.ProtobufOutput
|
||||
JsonInput.Uint64MapFieldKeyNotQuoted
|
||||
JsonInput.ValueAcceptNull.JsonOutput
|
||||
JsonInput.ValueAcceptNull.ProtobufOutput
|
||||
TimestampProtoInputTooLarge.JsonOutput
|
||||
TimestampProtoInputTooSmall.JsonOutput
|
||||
|
@ -101,15 +101,26 @@ namespace Google.Protobuf.Conformance
|
||||
{
|
||||
return new ConformanceResponse { ParseError = e.Message };
|
||||
}
|
||||
switch (request.RequestedOutputFormat)
|
||||
catch (InvalidJsonException e)
|
||||
{
|
||||
case global::Conformance.WireFormat.JSON:
|
||||
var formatter = new JsonFormatter(new JsonFormatter.Settings(false, typeRegistry));
|
||||
return new ConformanceResponse { JsonPayload = formatter.Format(message) };
|
||||
case global::Conformance.WireFormat.PROTOBUF:
|
||||
return new ConformanceResponse { ProtobufPayload = message.ToByteString() };
|
||||
default:
|
||||
throw new Exception("Unsupported request output format: " + request.PayloadCase);
|
||||
return new ConformanceResponse { ParseError = e.Message };
|
||||
}
|
||||
try
|
||||
{
|
||||
switch (request.RequestedOutputFormat)
|
||||
{
|
||||
case global::Conformance.WireFormat.JSON:
|
||||
var formatter = new JsonFormatter(new JsonFormatter.Settings(false, typeRegistry));
|
||||
return new ConformanceResponse { JsonPayload = formatter.Format(message) };
|
||||
case global::Conformance.WireFormat.PROTOBUF:
|
||||
return new ConformanceResponse { ProtobufPayload = message.ToByteString() };
|
||||
default:
|
||||
throw new Exception("Unsupported request output format: " + request.PayloadCase);
|
||||
}
|
||||
}
|
||||
catch (InvalidOperationException e)
|
||||
{
|
||||
return new ConformanceResponse { SerializeError = e.Message };
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -165,34 +165,31 @@ namespace Google.Protobuf
|
||||
}
|
||||
|
||||
[Test]
|
||||
public void UnknownEnumValueOmitted_SingleField()
|
||||
public void UnknownEnumValueNumeric_SingleField()
|
||||
{
|
||||
var message = new TestAllTypes { SingleForeignEnum = (ForeignEnum) 100 };
|
||||
AssertJson("{ }", JsonFormatter.Default.Format(message));
|
||||
AssertJson("{ 'singleForeignEnum': 100 }", JsonFormatter.Default.Format(message));
|
||||
}
|
||||
|
||||
[Test]
|
||||
public void UnknownEnumValueOmitted_RepeatedField()
|
||||
public void UnknownEnumValueNumeric_RepeatedField()
|
||||
{
|
||||
var message = new TestAllTypes { RepeatedForeignEnum = { ForeignEnum.FOREIGN_BAZ, (ForeignEnum) 100, ForeignEnum.FOREIGN_FOO } };
|
||||
AssertJson("{ 'repeatedForeignEnum': [ 'FOREIGN_BAZ', 'FOREIGN_FOO' ] }", JsonFormatter.Default.Format(message));
|
||||
AssertJson("{ 'repeatedForeignEnum': [ 'FOREIGN_BAZ', 100, 'FOREIGN_FOO' ] }", JsonFormatter.Default.Format(message));
|
||||
}
|
||||
|
||||
[Test]
|
||||
public void UnknownEnumValueOmitted_MapField()
|
||||
public void UnknownEnumValueNumeric_MapField()
|
||||
{
|
||||
// This matches the C++ behaviour.
|
||||
var message = new TestMap { MapInt32Enum = { { 1, MapEnum.MAP_ENUM_FOO }, { 2, (MapEnum) 100 }, { 3, MapEnum.MAP_ENUM_BAR } } };
|
||||
AssertJson("{ 'mapInt32Enum': { '1': 'MAP_ENUM_FOO', '3': 'MAP_ENUM_BAR' } }", JsonFormatter.Default.Format(message));
|
||||
AssertJson("{ 'mapInt32Enum': { '1': 'MAP_ENUM_FOO', '2': 100, '3': 'MAP_ENUM_BAR' } }", JsonFormatter.Default.Format(message));
|
||||
}
|
||||
|
||||
[Test]
|
||||
public void UnknownEnumValueOmitted_RepeatedField_AllEntriesUnknown()
|
||||
public void UnknownEnumValue_RepeatedField_AllEntriesUnknown()
|
||||
{
|
||||
// *Maybe* we should hold off on writing the "[" until we find that we've got at least one value to write...
|
||||
// but this is what happens at the moment, and it doesn't seem too awful.
|
||||
var message = new TestAllTypes { RepeatedForeignEnum = { (ForeignEnum) 200, (ForeignEnum) 100 } };
|
||||
AssertJson("{ 'repeatedForeignEnum': [ ] }", JsonFormatter.Default.Format(message));
|
||||
AssertJson("{ 'repeatedForeignEnum': [ 200, 100 ] }", JsonFormatter.Default.Format(message));
|
||||
}
|
||||
|
||||
[Test]
|
||||
@ -315,6 +312,15 @@ namespace Google.Protobuf
|
||||
|
||||
[Test]
|
||||
[TestCase("1970-01-01T00:00:00Z", 0)]
|
||||
[TestCase("1970-01-01T00:00:00.000000001Z", 1)]
|
||||
[TestCase("1970-01-01T00:00:00.000000010Z", 10)]
|
||||
[TestCase("1970-01-01T00:00:00.000000100Z", 100)]
|
||||
[TestCase("1970-01-01T00:00:00.000001Z", 1000)]
|
||||
[TestCase("1970-01-01T00:00:00.000010Z", 10000)]
|
||||
[TestCase("1970-01-01T00:00:00.000100Z", 100000)]
|
||||
[TestCase("1970-01-01T00:00:00.001Z", 1000000)]
|
||||
[TestCase("1970-01-01T00:00:00.010Z", 10000000)]
|
||||
[TestCase("1970-01-01T00:00:00.100Z", 100000000)]
|
||||
[TestCase("1970-01-01T00:00:00.100Z", 100000000)]
|
||||
[TestCase("1970-01-01T00:00:00.120Z", 120000000)]
|
||||
[TestCase("1970-01-01T00:00:00.123Z", 123000000)]
|
||||
@ -350,6 +356,14 @@ namespace Google.Protobuf
|
||||
[TestCase(0, 0, "0s")]
|
||||
[TestCase(1, 0, "1s")]
|
||||
[TestCase(-1, 0, "-1s")]
|
||||
[TestCase(0, 1, "0.000000001s")]
|
||||
[TestCase(0, 10, "0.000000010s")]
|
||||
[TestCase(0, 100, "0.000000100s")]
|
||||
[TestCase(0, 1000, "0.000001s")]
|
||||
[TestCase(0, 10000, "0.000010s")]
|
||||
[TestCase(0, 100000, "0.000100s")]
|
||||
[TestCase(0, 1000000, "0.001s")]
|
||||
[TestCase(0, 10000000, "0.010s")]
|
||||
[TestCase(0, 100000000, "0.100s")]
|
||||
[TestCase(0, 120000000, "0.120s")]
|
||||
[TestCase(0, 123000000, "0.123s")]
|
||||
@ -362,14 +376,19 @@ namespace Google.Protobuf
|
||||
[TestCase(0, -100000000, "-0.100s")]
|
||||
[TestCase(1, 100000000, "1.100s")]
|
||||
[TestCase(-1, -100000000, "-1.100s")]
|
||||
// Non-normalized examples
|
||||
[TestCase(1, 2123456789, "3.123456789s")]
|
||||
[TestCase(1, -100000000, "0.900s")]
|
||||
public void DurationStandalone(long seconds, int nanoseconds, string expected)
|
||||
{
|
||||
Assert.AreEqual(WrapInQuotes(expected), new Duration { Seconds = seconds, Nanos = nanoseconds }.ToString());
|
||||
}
|
||||
|
||||
[Test]
|
||||
[TestCase(1, 2123456789)]
|
||||
[TestCase(1, -100000000)]
|
||||
public void DurationStandalone_NonNormalized(long seconds, int nanoseconds)
|
||||
{
|
||||
Assert.Throws<InvalidOperationException>(() => new Duration { Seconds = seconds, Nanos = nanoseconds }.ToString());
|
||||
}
|
||||
|
||||
[Test]
|
||||
public void DurationField()
|
||||
{
|
||||
@ -395,6 +414,16 @@ namespace Google.Protobuf
|
||||
AssertJson("{ 'a': null, 'b': false, 'c': 10.5, 'd': 'text', 'e': [ 't1', 5 ], 'f': { 'nested': 'value' } }", message.ToString());
|
||||
}
|
||||
|
||||
[Test]
|
||||
[TestCase("foo__bar")]
|
||||
[TestCase("foo_3_ar")]
|
||||
[TestCase("fooBar")]
|
||||
public void FieldMaskInvalid(string input)
|
||||
{
|
||||
var mask = new FieldMask { Paths = { input } };
|
||||
Assert.Throws<InvalidOperationException>(() => JsonFormatter.Default.Format(mask));
|
||||
}
|
||||
|
||||
[Test]
|
||||
public void FieldMaskStandalone()
|
||||
{
|
||||
|
@ -71,6 +71,14 @@ namespace Google.Protobuf
|
||||
Assert.Throws<InvalidProtocolBufferException>(() => JsonParser.Default.Parse<TestMap>(json));
|
||||
}
|
||||
|
||||
[Test]
|
||||
public void OriginalFieldNameAccepted()
|
||||
{
|
||||
var json = "{ \"single_int32\": 10 }";
|
||||
var expected = new TestAllTypes { SingleInt32 = 10 };
|
||||
Assert.AreEqual(expected, TestAllTypes.Parser.ParseJson(json));
|
||||
}
|
||||
|
||||
[Test]
|
||||
public void SourceContextRoundtrip()
|
||||
{
|
||||
@ -116,7 +124,9 @@ namespace Google.Protobuf
|
||||
[Test]
|
||||
public void SingularWrappers_ExplicitNulls()
|
||||
{
|
||||
var message = new TestWellKnownTypes();
|
||||
// When we parse the "valueField": null part, we remember it... basically, it's one case
|
||||
// where explicit default values don't fully roundtrip.
|
||||
var message = new TestWellKnownTypes { ValueField = Value.ForNull() };
|
||||
var json = new JsonFormatter(new JsonFormatter.Settings(true)).Format(message);
|
||||
var parsed = JsonParser.Default.Parse<TestWellKnownTypes>(json);
|
||||
Assert.AreEqual(message, parsed);
|
||||
@ -142,6 +152,14 @@ namespace Google.Protobuf
|
||||
Assert.AreEqual(expected, parsed);
|
||||
}
|
||||
|
||||
[Test]
|
||||
public void ExplicitNullValue()
|
||||
{
|
||||
string json = "{\"valueField\": null}";
|
||||
var message = JsonParser.Default.Parse<TestWellKnownTypes>(json);
|
||||
Assert.AreEqual(new TestWellKnownTypes { ValueField = Value.ForNull() }, message);
|
||||
}
|
||||
|
||||
[Test]
|
||||
public void BytesWrapper_Standalone()
|
||||
{
|
||||
@ -170,6 +188,36 @@ namespace Google.Protobuf
|
||||
AssertRoundtrip(message);
|
||||
}
|
||||
|
||||
[Test]
|
||||
public void RepeatedField_NullElementProhibited()
|
||||
{
|
||||
string json = "{ \"repeated_foreign_message\": [null] }";
|
||||
Assert.Throws<InvalidProtocolBufferException>(() => TestAllTypes.Parser.ParseJson(json));
|
||||
}
|
||||
|
||||
[Test]
|
||||
public void RepeatedField_NullOverallValueAllowed()
|
||||
{
|
||||
string json = "{ \"repeated_foreign_message\": null }";
|
||||
Assert.AreEqual(new TestAllTypes(), TestAllTypes.Parser.ParseJson(json));
|
||||
}
|
||||
|
||||
[Test]
|
||||
[TestCase("{ \"mapInt32Int32\": { \"10\": null }")]
|
||||
[TestCase("{ \"mapStringString\": { \"abc\": null }")]
|
||||
[TestCase("{ \"mapInt32ForeignMessage\": { \"10\": null }")]
|
||||
public void MapField_NullValueProhibited(string json)
|
||||
{
|
||||
Assert.Throws<InvalidProtocolBufferException>(() => TestMap.Parser.ParseJson(json));
|
||||
}
|
||||
|
||||
[Test]
|
||||
public void MapField_NullOverallValueAllowed()
|
||||
{
|
||||
string json = "{ \"mapInt32Int32\": null }";
|
||||
Assert.AreEqual(new TestMap(), TestMap.Parser.ParseJson(json));
|
||||
}
|
||||
|
||||
[Test]
|
||||
public void IndividualWrapperTypes()
|
||||
{
|
||||
@ -715,7 +763,6 @@ namespace Google.Protobuf
|
||||
[TestCase("--0.123456789s", Description = "Double minus sign")]
|
||||
// Violate upper/lower bounds in various ways
|
||||
[TestCase("315576000001s", Description = "Integer part too large")]
|
||||
[TestCase("315576000000.000000001s", Description = "Integer part is upper bound; non-zero fraction")]
|
||||
[TestCase("3155760000000s", Description = "Integer part too long (positive)")]
|
||||
[TestCase("-3155760000000s", Description = "Integer part too long (negative)")]
|
||||
public void Duration_Invalid(string jsonValue)
|
||||
@ -741,6 +788,14 @@ namespace Google.Protobuf
|
||||
CollectionAssert.AreEqual(expectedPaths, parsed.Paths);
|
||||
}
|
||||
|
||||
[Test]
|
||||
[TestCase("foo_bar")]
|
||||
public void FieldMask_Invalid(string jsonValue)
|
||||
{
|
||||
string json = WrapInQuotes(jsonValue);
|
||||
Assert.Throws<InvalidProtocolBufferException>(() => FieldMask.Parser.ParseJson(json));
|
||||
}
|
||||
|
||||
[Test]
|
||||
public void Any_RegularMessage()
|
||||
{
|
||||
@ -762,6 +817,13 @@ namespace Google.Protobuf
|
||||
Assert.Throws<InvalidOperationException>(() => Any.Parser.ParseJson(json));
|
||||
}
|
||||
|
||||
[Test]
|
||||
public void Any_NoTypeUrl()
|
||||
{
|
||||
string json = "{ \"foo\": \"bar\" }";
|
||||
Assert.Throws<InvalidProtocolBufferException>(() => Any.Parser.ParseJson(json));
|
||||
}
|
||||
|
||||
[Test]
|
||||
public void Any_WellKnownType()
|
||||
{
|
||||
@ -814,6 +876,42 @@ namespace Google.Protobuf
|
||||
Assert.Throws<InvalidProtocolBufferException>(() => parser63.Parse<TestRecursiveMessage>(data64));
|
||||
}
|
||||
|
||||
[Test]
|
||||
[TestCase("AQI")]
|
||||
[TestCase("_-==")]
|
||||
public void Bytes_InvalidBase64(string badBase64)
|
||||
{
|
||||
string json = "{ \"singleBytes\": \"" + badBase64 + "\" }";
|
||||
Assert.Throws<InvalidProtocolBufferException>(() => TestAllTypes.Parser.ParseJson(json));
|
||||
}
|
||||
|
||||
[Test]
|
||||
[TestCase("\"FOREIGN_BAR\"", ForeignEnum.FOREIGN_BAR)]
|
||||
[TestCase("5", ForeignEnum.FOREIGN_BAR)]
|
||||
[TestCase("100", (ForeignEnum) 100)]
|
||||
public void EnumValid(string value, ForeignEnum expectedValue)
|
||||
{
|
||||
string json = "{ \"singleForeignEnum\": " + value + " }";
|
||||
var parsed = TestAllTypes.Parser.ParseJson(json);
|
||||
Assert.AreEqual(new TestAllTypes { SingleForeignEnum = expectedValue }, parsed);
|
||||
}
|
||||
|
||||
[Test]
|
||||
[TestCase("\"NOT_A_VALID_VALUE\"")]
|
||||
[TestCase("5.5")]
|
||||
public void Enum_Invalid(string value)
|
||||
{
|
||||
string json = "{ \"singleForeignEnum\": " + value + " }";
|
||||
Assert.Throws<InvalidProtocolBufferException>(() => TestAllTypes.Parser.ParseJson(json));
|
||||
}
|
||||
|
||||
[Test]
|
||||
public void OneofDuplicate_Invalid()
|
||||
{
|
||||
string json = "{ \"oneofString\": \"x\", \"oneofUint32\": 10 }";
|
||||
Assert.Throws<InvalidProtocolBufferException>(() => TestAllTypes.Parser.ParseJson(json));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Various tests use strings which have quotes round them for parsing or as the result
|
||||
/// of formatting, but without those quotes being specified in the tests (for the sake of readability).
|
||||
|
@ -31,7 +31,7 @@ namespace Google.Protobuf.TestProtos {
|
||||
"L3Byb3RvYnVmL3NvdXJjZV9jb250ZXh0LnByb3RvGhxnb29nbGUvcHJvdG9i",
|
||||
"dWYvc3RydWN0LnByb3RvGh9nb29nbGUvcHJvdG9idWYvdGltZXN0YW1wLnBy",
|
||||
"b3RvGhpnb29nbGUvcHJvdG9idWYvdHlwZS5wcm90bxoeZ29vZ2xlL3Byb3Rv",
|
||||
"YnVmL3dyYXBwZXJzLnByb3RvIpEHChJUZXN0V2VsbEtub3duVHlwZXMSJwoJ",
|
||||
"YnVmL3dyYXBwZXJzLnByb3RvIr4HChJUZXN0V2VsbEtub3duVHlwZXMSJwoJ",
|
||||
"YW55X2ZpZWxkGAEgASgLMhQuZ29vZ2xlLnByb3RvYnVmLkFueRInCglhcGlf",
|
||||
"ZmllbGQYAiABKAsyFC5nb29nbGUucHJvdG9idWYuQXBpEjEKDmR1cmF0aW9u",
|
||||
"X2ZpZWxkGAMgASgLMhkuZ29vZ2xlLnByb3RvYnVmLkR1cmF0aW9uEisKC2Vt",
|
||||
@ -51,7 +51,8 @@ namespace Google.Protobuf.TestProtos {
|
||||
"cm90b2J1Zi5VSW50MzJWYWx1ZRIuCgpib29sX2ZpZWxkGBAgASgLMhouZ29v",
|
||||
"Z2xlLnByb3RvYnVmLkJvb2xWYWx1ZRIyCgxzdHJpbmdfZmllbGQYESABKAsy",
|
||||
"HC5nb29nbGUucHJvdG9idWYuU3RyaW5nVmFsdWUSMAoLYnl0ZXNfZmllbGQY",
|
||||
"EiABKAsyGy5nb29nbGUucHJvdG9idWYuQnl0ZXNWYWx1ZSKVBwoWUmVwZWF0",
|
||||
"EiABKAsyGy5nb29nbGUucHJvdG9idWYuQnl0ZXNWYWx1ZRIrCgt2YWx1ZV9m",
|
||||
"aWVsZBgTIAEoCzIWLmdvb2dsZS5wcm90b2J1Zi5WYWx1ZSKVBwoWUmVwZWF0",
|
||||
"ZWRXZWxsS25vd25UeXBlcxInCglhbnlfZmllbGQYASADKAsyFC5nb29nbGUu",
|
||||
"cHJvdG9idWYuQW55EicKCWFwaV9maWVsZBgCIAMoCzIULmdvb2dsZS5wcm90",
|
||||
"b2J1Zi5BcGkSMQoOZHVyYXRpb25fZmllbGQYAyADKAsyGS5nb29nbGUucHJv",
|
||||
@ -162,7 +163,7 @@ namespace Google.Protobuf.TestProtos {
|
||||
descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
|
||||
new pbr::FileDescriptor[] { global::Google.Protobuf.WellKnownTypes.AnyReflection.Descriptor, global::Google.Protobuf.WellKnownTypes.ApiReflection.Descriptor, global::Google.Protobuf.WellKnownTypes.DurationReflection.Descriptor, global::Google.Protobuf.WellKnownTypes.EmptyReflection.Descriptor, global::Google.Protobuf.WellKnownTypes.FieldMaskReflection.Descriptor, global::Google.Protobuf.WellKnownTypes.SourceContextReflection.Descriptor, global::Google.Protobuf.WellKnownTypes.StructReflection.Descriptor, global::Google.Protobuf.WellKnownTypes.TimestampReflection.Descriptor, global::Google.Protobuf.WellKnownTypes.TypeReflection.Descriptor, global::Google.Protobuf.WellKnownTypes.WrappersReflection.Descriptor, },
|
||||
new pbr::GeneratedCodeInfo(null, new pbr::GeneratedCodeInfo[] {
|
||||
new pbr::GeneratedCodeInfo(typeof(global::Google.Protobuf.TestProtos.TestWellKnownTypes), global::Google.Protobuf.TestProtos.TestWellKnownTypes.Parser, new[]{ "AnyField", "ApiField", "DurationField", "EmptyField", "FieldMaskField", "SourceContextField", "StructField", "TimestampField", "TypeField", "DoubleField", "FloatField", "Int64Field", "Uint64Field", "Int32Field", "Uint32Field", "BoolField", "StringField", "BytesField" }, null, null, null),
|
||||
new pbr::GeneratedCodeInfo(typeof(global::Google.Protobuf.TestProtos.TestWellKnownTypes), global::Google.Protobuf.TestProtos.TestWellKnownTypes.Parser, new[]{ "AnyField", "ApiField", "DurationField", "EmptyField", "FieldMaskField", "SourceContextField", "StructField", "TimestampField", "TypeField", "DoubleField", "FloatField", "Int64Field", "Uint64Field", "Int32Field", "Uint32Field", "BoolField", "StringField", "BytesField", "ValueField" }, null, null, null),
|
||||
new pbr::GeneratedCodeInfo(typeof(global::Google.Protobuf.TestProtos.RepeatedWellKnownTypes), global::Google.Protobuf.TestProtos.RepeatedWellKnownTypes.Parser, new[]{ "AnyField", "ApiField", "DurationField", "EmptyField", "FieldMaskField", "SourceContextField", "StructField", "TimestampField", "TypeField", "DoubleField", "FloatField", "Int64Field", "Uint64Field", "Int32Field", "Uint32Field", "BoolField", "StringField", "BytesField" }, null, null, null),
|
||||
new pbr::GeneratedCodeInfo(typeof(global::Google.Protobuf.TestProtos.OneofWellKnownTypes), global::Google.Protobuf.TestProtos.OneofWellKnownTypes.Parser, new[]{ "AnyField", "ApiField", "DurationField", "EmptyField", "FieldMaskField", "SourceContextField", "StructField", "TimestampField", "TypeField", "DoubleField", "FloatField", "Int64Field", "Uint64Field", "Int32Field", "Uint32Field", "BoolField", "StringField", "BytesField" }, new[]{ "OneofField" }, null, null),
|
||||
new pbr::GeneratedCodeInfo(typeof(global::Google.Protobuf.TestProtos.MapWellKnownTypes), global::Google.Protobuf.TestProtos.MapWellKnownTypes.Parser, new[]{ "AnyField", "ApiField", "DurationField", "EmptyField", "FieldMaskField", "SourceContextField", "StructField", "TimestampField", "TypeField", "DoubleField", "FloatField", "Int64Field", "Uint64Field", "Int32Field", "Uint32Field", "BoolField", "StringField", "BytesField" }, null, null, new pbr::GeneratedCodeInfo[] { null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, })
|
||||
@ -215,6 +216,7 @@ namespace Google.Protobuf.TestProtos {
|
||||
BoolField = other.BoolField;
|
||||
StringField = other.StringField;
|
||||
BytesField = other.BytesField;
|
||||
ValueField = other.valueField_ != null ? other.ValueField.Clone() : null;
|
||||
}
|
||||
|
||||
public TestWellKnownTypes Clone() {
|
||||
@ -410,6 +412,19 @@ namespace Google.Protobuf.TestProtos {
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>Field number for the "value_field" field.</summary>
|
||||
public const int ValueFieldFieldNumber = 19;
|
||||
private global::Google.Protobuf.WellKnownTypes.Value valueField_;
|
||||
/// <summary>
|
||||
/// Part of struct, but useful to be able to test separately
|
||||
/// </summary>
|
||||
public global::Google.Protobuf.WellKnownTypes.Value ValueField {
|
||||
get { return valueField_; }
|
||||
set {
|
||||
valueField_ = value;
|
||||
}
|
||||
}
|
||||
|
||||
public override bool Equals(object other) {
|
||||
return Equals(other as TestWellKnownTypes);
|
||||
}
|
||||
@ -439,6 +454,7 @@ namespace Google.Protobuf.TestProtos {
|
||||
if (BoolField != other.BoolField) return false;
|
||||
if (StringField != other.StringField) return false;
|
||||
if (BytesField != other.BytesField) return false;
|
||||
if (!object.Equals(ValueField, other.ValueField)) return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -462,6 +478,7 @@ namespace Google.Protobuf.TestProtos {
|
||||
if (boolField_ != null) hash ^= BoolField.GetHashCode();
|
||||
if (stringField_ != null) hash ^= StringField.GetHashCode();
|
||||
if (bytesField_ != null) hash ^= BytesField.GetHashCode();
|
||||
if (valueField_ != null) hash ^= ValueField.GetHashCode();
|
||||
return hash;
|
||||
}
|
||||
|
||||
@ -533,6 +550,10 @@ namespace Google.Protobuf.TestProtos {
|
||||
if (bytesField_ != null) {
|
||||
_single_bytesField_codec.WriteTagAndValue(output, BytesField);
|
||||
}
|
||||
if (valueField_ != null) {
|
||||
output.WriteRawTag(154, 1);
|
||||
output.WriteMessage(ValueField);
|
||||
}
|
||||
}
|
||||
|
||||
public int CalculateSize() {
|
||||
@ -591,6 +612,9 @@ namespace Google.Protobuf.TestProtos {
|
||||
if (bytesField_ != null) {
|
||||
size += _single_bytesField_codec.CalculateSizeWithTag(BytesField);
|
||||
}
|
||||
if (valueField_ != null) {
|
||||
size += 2 + pb::CodedOutputStream.ComputeMessageSize(ValueField);
|
||||
}
|
||||
return size;
|
||||
}
|
||||
|
||||
@ -697,6 +721,12 @@ namespace Google.Protobuf.TestProtos {
|
||||
BytesField = other.BytesField;
|
||||
}
|
||||
}
|
||||
if (other.valueField_ != null) {
|
||||
if (valueField_ == null) {
|
||||
valueField_ = new global::Google.Protobuf.WellKnownTypes.Value();
|
||||
}
|
||||
ValueField.MergeFrom(other.ValueField);
|
||||
}
|
||||
}
|
||||
|
||||
public void MergeFrom(pb::CodedInputStream input) {
|
||||
@ -832,6 +862,13 @@ namespace Google.Protobuf.TestProtos {
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 154: {
|
||||
if (valueField_ == null) {
|
||||
valueField_ = new global::Google.Protobuf.WellKnownTypes.Value();
|
||||
}
|
||||
input.ReadMessage(valueField_);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -50,11 +50,6 @@ namespace Google.Protobuf.WellKnownTypes
|
||||
// Rounding is towards 0
|
||||
Assert.AreEqual(TimeSpan.FromTicks(2), new Duration { Nanos = 250 }.ToTimeSpan());
|
||||
Assert.AreEqual(TimeSpan.FromTicks(-2), new Duration { Nanos = -250 }.ToTimeSpan());
|
||||
|
||||
// Non-normalized durations
|
||||
Assert.AreEqual(TimeSpan.FromSeconds(3), new Duration { Seconds = 1, Nanos = 2 * Duration.NanosecondsPerSecond }.ToTimeSpan());
|
||||
Assert.AreEqual(TimeSpan.FromSeconds(1), new Duration { Seconds = 3, Nanos = -2 * Duration.NanosecondsPerSecond }.ToTimeSpan());
|
||||
Assert.AreEqual(TimeSpan.FromSeconds(-1), new Duration { Seconds = 1, Nanos = -2 * Duration.NanosecondsPerSecond }.ToTimeSpan());
|
||||
}
|
||||
|
||||
[Test]
|
||||
@ -100,5 +95,30 @@ namespace Google.Protobuf.WellKnownTypes
|
||||
Assert.AreEqual(new Duration { Seconds = 1 }, Duration.FromTimeSpan(TimeSpan.FromSeconds(1)));
|
||||
Assert.AreEqual(new Duration { Nanos = Duration.NanosecondsPerTick }, Duration.FromTimeSpan(TimeSpan.FromTicks(1)));
|
||||
}
|
||||
|
||||
[Test]
|
||||
[TestCase(0, Duration.MaxNanoseconds + 1)]
|
||||
[TestCase(0, Duration.MinNanoseconds - 1)]
|
||||
[TestCase(Duration.MinSeconds - 1, 0)]
|
||||
[TestCase(Duration.MaxSeconds + 1, 0)]
|
||||
[TestCase(1, -1)]
|
||||
[TestCase(-1, 1)]
|
||||
public void ToTimeSpan_Invalid(long seconds, int nanoseconds)
|
||||
{
|
||||
var duration = new Duration { Seconds = seconds, Nanos = nanoseconds };
|
||||
Assert.Throws<InvalidOperationException>(() => duration.ToTimeSpan());
|
||||
}
|
||||
|
||||
[Test]
|
||||
[TestCase(0, Duration.MaxNanoseconds)]
|
||||
[TestCase(0, Duration.MinNanoseconds)]
|
||||
[TestCase(Duration.MinSeconds, Duration.MinNanoseconds)]
|
||||
[TestCase(Duration.MaxSeconds, Duration.MaxNanoseconds)]
|
||||
public void ToTimeSpan_Valid(long seconds, int nanoseconds)
|
||||
{
|
||||
// Only testing that these values don't throw, unlike their similar tests in ToTimeSpan_Invalid
|
||||
var duration = new Duration { Seconds = seconds, Nanos = nanoseconds };
|
||||
duration.ToTimeSpan();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -61,6 +61,29 @@ namespace Google.Protobuf.WellKnownTypes
|
||||
Assert.AreEqual(new DateTime(1969, 12, 31, 23, 59, 59).AddMilliseconds(1), t2.ToDateTime());
|
||||
}
|
||||
|
||||
[Test]
|
||||
[TestCase(Timestamp.UnixSecondsAtBclMinValue - 1, Timestamp.MaxNanos)]
|
||||
[TestCase(Timestamp.UnixSecondsAtBclMaxValue + 1, 0)]
|
||||
[TestCase(0, -1)]
|
||||
[TestCase(0, Timestamp.MaxNanos + 1)]
|
||||
public void ToDateTime_OutOfRange(long seconds, int nanoseconds)
|
||||
{
|
||||
var value = new Timestamp { Seconds = seconds, Nanos = nanoseconds };
|
||||
Assert.Throws<InvalidOperationException>(() => value.ToDateTime());
|
||||
}
|
||||
|
||||
// 1ns larger or smaller than the above values
|
||||
[Test]
|
||||
[TestCase(Timestamp.UnixSecondsAtBclMinValue, 0)]
|
||||
[TestCase(Timestamp.UnixSecondsAtBclMaxValue, Timestamp.MaxNanos)]
|
||||
[TestCase(0, 0)]
|
||||
[TestCase(0, Timestamp.MaxNanos)]
|
||||
public void ToDateTime_ValidBoundaries(long seconds, int nanoseconds)
|
||||
{
|
||||
var value = new Timestamp { Seconds = seconds, Nanos = nanoseconds };
|
||||
value.ToDateTime();
|
||||
}
|
||||
|
||||
private static void AssertRoundtrip(Timestamp timestamp, DateTime dateTime)
|
||||
{
|
||||
Assert.AreEqual(timestamp, Timestamp.FromDateTime(dateTime));
|
||||
|
@ -148,6 +148,30 @@ namespace Google.Protobuf.WellKnownTypes
|
||||
Assert.AreEqual("Second", message.StringField[1]);
|
||||
}
|
||||
|
||||
[Test]
|
||||
public void RepeatedWrappersBinaryFormat()
|
||||
{
|
||||
// At one point we accidentally used a packed format for repeated wrappers, which is wrong (and weird).
|
||||
// This test is just to prove that we use the right format.
|
||||
|
||||
var rawOutput = new MemoryStream();
|
||||
var output = new CodedOutputStream(rawOutput);
|
||||
// Write a value of 5
|
||||
output.WriteTag(RepeatedWellKnownTypes.Int32FieldFieldNumber, WireFormat.WireType.LengthDelimited);
|
||||
output.WriteLength(2);
|
||||
output.WriteTag(WrappersReflection.WrapperValueFieldNumber, WireFormat.WireType.Varint);
|
||||
output.WriteInt32(5);
|
||||
// Write a value of 0 (empty message)
|
||||
output.WriteTag(RepeatedWellKnownTypes.Int32FieldFieldNumber, WireFormat.WireType.LengthDelimited);
|
||||
output.WriteLength(0);
|
||||
output.Flush();
|
||||
var expectedBytes = rawOutput.ToArray();
|
||||
|
||||
var message = new RepeatedWellKnownTypes { Int32Field = { 5, 0 } };
|
||||
var actualBytes = message.ToByteArray();
|
||||
Assert.AreEqual(expectedBytes, actualBytes);
|
||||
}
|
||||
|
||||
[Test]
|
||||
public void MapWrappersSerializeDeserialize()
|
||||
{
|
||||
|
@ -34,7 +34,6 @@ using System;
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
using System.Text;
|
||||
using Google.Protobuf.Compatibility;
|
||||
|
||||
namespace Google.Protobuf.Collections
|
||||
{
|
||||
@ -96,8 +95,8 @@ namespace Google.Protobuf.Collections
|
||||
// iteration.
|
||||
uint tag = input.LastTag;
|
||||
var reader = codec.ValueReader;
|
||||
// Value types can be packed or not.
|
||||
if (typeof(T).IsValueType() && WireFormat.GetTagWireType(tag) == WireFormat.WireType.LengthDelimited)
|
||||
// Non-nullable value types can be packed or not.
|
||||
if (FieldCodec<T>.IsPackedRepeatedField(tag))
|
||||
{
|
||||
int length = input.ReadLength();
|
||||
if (length > 0)
|
||||
@ -134,7 +133,7 @@ namespace Google.Protobuf.Collections
|
||||
return 0;
|
||||
}
|
||||
uint tag = codec.Tag;
|
||||
if (typeof(T).IsValueType() && WireFormat.GetTagWireType(tag) == WireFormat.WireType.LengthDelimited)
|
||||
if (codec.PackedRepeatedField)
|
||||
{
|
||||
int dataSize = CalculatePackedDataSize(codec);
|
||||
return CodedOutputStream.ComputeRawVarint32Size(tag) +
|
||||
@ -186,7 +185,7 @@ namespace Google.Protobuf.Collections
|
||||
}
|
||||
var writer = codec.ValueWriter;
|
||||
var tag = codec.Tag;
|
||||
if (typeof(T).IsValueType() && WireFormat.GetTagWireType(tag) == WireFormat.WireType.LengthDelimited)
|
||||
if (codec.PackedRepeatedField)
|
||||
{
|
||||
// Packed primitive type
|
||||
uint size = (uint)CalculatePackedDataSize(codec);
|
||||
|
@ -30,6 +30,7 @@
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#endregion
|
||||
|
||||
using Google.Protobuf.Compatibility;
|
||||
using Google.Protobuf.WellKnownTypes;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
@ -329,17 +330,24 @@ namespace Google.Protobuf
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// <para>
|
||||
/// An encode/decode pair for a single field. This effectively encapsulates
|
||||
/// all the information needed to read or write the field value from/to a coded
|
||||
/// stream.
|
||||
/// </para>
|
||||
/// <para>
|
||||
/// This class is public and has to be as it is used by generated code, but its public
|
||||
/// API is very limited - just what the generated code needs to call directly.
|
||||
/// </para>
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// This never writes default values to the stream, and is not currently designed
|
||||
/// to play well with packed arrays.
|
||||
/// This never writes default values to the stream, and does not address "packedness"
|
||||
/// in repeated fields itself, other than to know whether or not the field *should* be packed.
|
||||
/// </remarks>
|
||||
public sealed class FieldCodec<T>
|
||||
{
|
||||
private static readonly T DefaultDefault;
|
||||
private static readonly bool TypeSupportsPacking = typeof(T).IsValueType() && Nullable.GetUnderlyingType(typeof(T)) == null;
|
||||
|
||||
static FieldCodec()
|
||||
{
|
||||
@ -354,16 +362,60 @@ namespace Google.Protobuf
|
||||
// Otherwise it's the default value of the CLR type
|
||||
}
|
||||
|
||||
private readonly Func<CodedInputStream, T> reader;
|
||||
private readonly Action<CodedOutputStream, T> writer;
|
||||
private readonly Func<T, int> sizeCalculator;
|
||||
private readonly uint tag;
|
||||
internal static bool IsPackedRepeatedField(uint tag) =>
|
||||
TypeSupportsPacking && WireFormat.GetTagWireType(tag) == WireFormat.WireType.LengthDelimited;
|
||||
|
||||
internal bool PackedRepeatedField { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Returns a delegate to write a value (unconditionally) to a coded output stream.
|
||||
/// </summary>
|
||||
internal Action<CodedOutputStream, T> ValueWriter { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Returns the size calculator for just a value.
|
||||
/// </summary>
|
||||
internal Func<T, int> ValueSizeCalculator { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Returns a delegate to read a value from a coded input stream. It is assumed that
|
||||
/// the stream is already positioned on the appropriate tag.
|
||||
/// </summary>
|
||||
internal Func<CodedInputStream, T> ValueReader { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Returns the fixed size for an entry, or 0 if sizes vary.
|
||||
/// </summary>
|
||||
internal int FixedSize { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the tag of the codec.
|
||||
/// </summary>
|
||||
/// <value>
|
||||
/// The tag of the codec.
|
||||
/// </value>
|
||||
internal uint Tag { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Default value for this codec. Usually the same for every instance of the same type, but
|
||||
/// for string/ByteString wrapper fields the codec's default value is null, whereas for
|
||||
/// other string/ByteString fields it's "" or ByteString.Empty.
|
||||
/// </summary>
|
||||
/// <value>
|
||||
/// The default value of the codec's type.
|
||||
/// </value>
|
||||
internal T DefaultValue { get; }
|
||||
|
||||
private readonly int tagSize;
|
||||
private readonly int fixedSize;
|
||||
// Default value for this codec. Usually the same for every instance of the same type, but
|
||||
// for string/ByteString wrapper fields the codec's default value is null, whereas for
|
||||
// other string/ByteString fields it's "" or ByteString.Empty.
|
||||
private readonly T defaultValue;
|
||||
|
||||
internal FieldCodec(
|
||||
Func<CodedInputStream, T> reader,
|
||||
Action<CodedOutputStream, T> writer,
|
||||
int fixedSize,
|
||||
uint tag) : this(reader, writer, _ => fixedSize, tag)
|
||||
{
|
||||
FixedSize = fixedSize;
|
||||
}
|
||||
|
||||
internal FieldCodec(
|
||||
Func<CodedInputStream, T> reader,
|
||||
@ -380,66 +432,17 @@ namespace Google.Protobuf
|
||||
uint tag,
|
||||
T defaultValue)
|
||||
{
|
||||
this.reader = reader;
|
||||
this.writer = writer;
|
||||
this.sizeCalculator = sizeCalculator;
|
||||
this.fixedSize = 0;
|
||||
this.tag = tag;
|
||||
this.defaultValue = defaultValue;
|
||||
ValueReader = reader;
|
||||
ValueWriter = writer;
|
||||
ValueSizeCalculator = sizeCalculator;
|
||||
FixedSize = 0;
|
||||
Tag = tag;
|
||||
DefaultValue = defaultValue;
|
||||
tagSize = CodedOutputStream.ComputeRawVarint32Size(tag);
|
||||
// Detect packed-ness once, so we can check for it within RepeatedField<T>.
|
||||
PackedRepeatedField = IsPackedRepeatedField(tag);
|
||||
}
|
||||
|
||||
internal FieldCodec(
|
||||
Func<CodedInputStream, T> reader,
|
||||
Action<CodedOutputStream, T> writer,
|
||||
int fixedSize,
|
||||
uint tag)
|
||||
{
|
||||
this.reader = reader;
|
||||
this.writer = writer;
|
||||
this.sizeCalculator = _ => fixedSize;
|
||||
this.fixedSize = fixedSize;
|
||||
this.tag = tag;
|
||||
tagSize = CodedOutputStream.ComputeRawVarint32Size(tag);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Returns the size calculator for just a value.
|
||||
/// </summary>
|
||||
internal Func<T, int> ValueSizeCalculator { get { return sizeCalculator; } }
|
||||
|
||||
/// <summary>
|
||||
/// Returns a delegate to write a value (unconditionally) to a coded output stream.
|
||||
/// </summary>
|
||||
internal Action<CodedOutputStream, T> ValueWriter { get { return writer; } }
|
||||
|
||||
/// <summary>
|
||||
/// Returns a delegate to read a value from a coded input stream. It is assumed that
|
||||
/// the stream is already positioned on the appropriate tag.
|
||||
/// </summary>
|
||||
internal Func<CodedInputStream, T> ValueReader { get { return reader; } }
|
||||
|
||||
/// <summary>
|
||||
/// Returns the fixed size for an entry, or 0 if sizes vary.
|
||||
/// </summary>
|
||||
internal int FixedSize { get { return fixedSize; } }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the tag of the codec.
|
||||
/// </summary>
|
||||
/// <value>
|
||||
/// The tag of the codec.
|
||||
/// </value>
|
||||
public uint Tag { get { return tag; } }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the default value of the codec's type.
|
||||
/// </summary>
|
||||
/// <value>
|
||||
/// The default value of the codec's type.
|
||||
/// </value>
|
||||
public T DefaultValue { get { return defaultValue; } }
|
||||
|
||||
/// <summary>
|
||||
/// Write a tag and the given value, *if* the value is not the default.
|
||||
/// </summary>
|
||||
@ -447,8 +450,8 @@ namespace Google.Protobuf
|
||||
{
|
||||
if (!IsDefault(value))
|
||||
{
|
||||
output.WriteTag(tag);
|
||||
writer(output, value);
|
||||
output.WriteTag(Tag);
|
||||
ValueWriter(output, value);
|
||||
}
|
||||
}
|
||||
|
||||
@ -457,23 +460,14 @@ namespace Google.Protobuf
|
||||
/// </summary>
|
||||
/// <param name="input">The input stream to read from.</param>
|
||||
/// <returns>The value read from the stream.</returns>
|
||||
public T Read(CodedInputStream input)
|
||||
{
|
||||
return reader(input);
|
||||
}
|
||||
public T Read(CodedInputStream input) => ValueReader(input);
|
||||
|
||||
/// <summary>
|
||||
/// Calculates the size required to write the given value, with a tag,
|
||||
/// if the value is not the default.
|
||||
/// </summary>
|
||||
public int CalculateSizeWithTag(T value)
|
||||
{
|
||||
return IsDefault(value) ? 0 : sizeCalculator(value) + tagSize;
|
||||
}
|
||||
public int CalculateSizeWithTag(T value) => IsDefault(value) ? 0 : ValueSizeCalculator(value) + tagSize;
|
||||
|
||||
private bool IsDefault(T value)
|
||||
{
|
||||
return EqualityComparer<T>.Default.Equals(value, defaultValue);
|
||||
}
|
||||
private bool IsDefault(T value) => EqualityComparer<T>.Default.Equals(value, DefaultValue);
|
||||
}
|
||||
}
|
||||
|
@ -30,6 +30,7 @@
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#endregion
|
||||
|
||||
using System;
|
||||
using System.IO;
|
||||
|
||||
namespace Google.Protobuf
|
||||
@ -45,6 +46,11 @@ namespace Google.Protobuf
|
||||
{
|
||||
}
|
||||
|
||||
internal InvalidProtocolBufferException(string message, Exception innerException)
|
||||
: base(message, innerException)
|
||||
{
|
||||
}
|
||||
|
||||
internal static InvalidProtocolBufferException MoreDataAvailable()
|
||||
{
|
||||
return new InvalidProtocolBufferException(
|
||||
@ -82,6 +88,11 @@ namespace Google.Protobuf
|
||||
"Protocol message contained an invalid tag (zero).");
|
||||
}
|
||||
|
||||
internal static InvalidProtocolBufferException InvalidBase64(Exception innerException)
|
||||
{
|
||||
return new InvalidProtocolBufferException("Invalid base64 data", innerException);
|
||||
}
|
||||
|
||||
internal static InvalidProtocolBufferException InvalidEndTag()
|
||||
{
|
||||
return new InvalidProtocolBufferException(
|
||||
|
@ -205,11 +205,6 @@ namespace Google.Protobuf
|
||||
{
|
||||
continue;
|
||||
}
|
||||
// Omit awkward (single) values such as unknown enum values
|
||||
if (!field.IsRepeated && !field.IsMap && !CanWriteSingleValue(value))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// Okay, all tests complete: let's write the field value...
|
||||
if (!first)
|
||||
@ -224,6 +219,31 @@ namespace Google.Protobuf
|
||||
return !first;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Camel-case converter with added strictness for field mask formatting.
|
||||
/// </summary>
|
||||
/// <exception cref="InvalidOperationException">The field mask is invalid for JSON representation</exception>
|
||||
private static string ToCamelCaseForFieldMask(string input)
|
||||
{
|
||||
for (int i = 0; i < input.Length; i++)
|
||||
{
|
||||
char c = input[i];
|
||||
if (c >= 'A' && c <= 'Z')
|
||||
{
|
||||
throw new InvalidOperationException($"Invalid field mask to be converted to JSON: {input}");
|
||||
}
|
||||
if (c == '_' && i < input.Length - 1)
|
||||
{
|
||||
char next = input[i + 1];
|
||||
if (next < 'a' || next > 'z')
|
||||
{
|
||||
throw new InvalidOperationException($"Invalid field mask to be converted to JSON: {input}");
|
||||
}
|
||||
}
|
||||
}
|
||||
return ToCamelCase(input);
|
||||
}
|
||||
|
||||
// Converted from src/google/protobuf/util/internal/utility.cc ToCamelCase
|
||||
// TODO: Use the new field in FieldDescriptor.
|
||||
internal static string ToCamelCase(string input)
|
||||
@ -372,7 +392,14 @@ namespace Google.Protobuf
|
||||
}
|
||||
else if (value is System.Enum)
|
||||
{
|
||||
WriteString(builder, value.ToString());
|
||||
if (System.Enum.IsDefined(value.GetType(), value))
|
||||
{
|
||||
WriteString(builder, value.ToString());
|
||||
}
|
||||
else
|
||||
{
|
||||
WriteValue(builder, (int) value);
|
||||
}
|
||||
}
|
||||
else if (value is float || value is double)
|
||||
{
|
||||
@ -485,13 +512,14 @@ namespace Google.Protobuf
|
||||
int nanos = (int) value.Descriptor.Fields[Timestamp.NanosFieldNumber].Accessor.GetValue(value);
|
||||
long seconds = (long) value.Descriptor.Fields[Timestamp.SecondsFieldNumber].Accessor.GetValue(value);
|
||||
|
||||
// Even if the original message isn't using the built-in classes, we can still build one... and then
|
||||
// rely on it being normalized.
|
||||
Timestamp normalized = Timestamp.Normalize(seconds, nanos);
|
||||
// Even if the original message isn't using the built-in classes, we can still build one... and its
|
||||
// conversion will check whether or not it's normalized.
|
||||
// TODO: Perhaps the diagnostic-only formatter should not throw for non-normalized values?
|
||||
Timestamp ts = new Timestamp { Seconds = seconds, Nanos = nanos };
|
||||
// Use .NET's formatting for the value down to the second, including an opening double quote (as it's a string value)
|
||||
DateTime dateTime = normalized.ToDateTime();
|
||||
DateTime dateTime = ts.ToDateTime();
|
||||
builder.Append(dateTime.ToString("yyyy'-'MM'-'dd'T'HH:mm:ss", CultureInfo.InvariantCulture));
|
||||
AppendNanoseconds(builder, Math.Abs(normalized.Nanos));
|
||||
AppendNanoseconds(builder, Math.Abs(ts.Nanos));
|
||||
builder.Append("Z\"");
|
||||
}
|
||||
|
||||
@ -502,25 +530,29 @@ namespace Google.Protobuf
|
||||
int nanos = (int) value.Descriptor.Fields[Duration.NanosFieldNumber].Accessor.GetValue(value);
|
||||
long seconds = (long) value.Descriptor.Fields[Duration.SecondsFieldNumber].Accessor.GetValue(value);
|
||||
|
||||
// TODO: Perhaps the diagnostic-only formatter should not throw for non-normalized values?
|
||||
// Even if the original message isn't using the built-in classes, we can still build one... and then
|
||||
// rely on it being normalized.
|
||||
Duration normalized = Duration.Normalize(seconds, nanos);
|
||||
if (!Duration.IsNormalized(seconds, nanos))
|
||||
{
|
||||
throw new InvalidOperationException("Non-normalized duration value");
|
||||
}
|
||||
|
||||
// The seconds part will normally provide the minus sign if we need it, but not if it's 0...
|
||||
if (normalized.Seconds == 0 && normalized.Nanos < 0)
|
||||
if (seconds == 0 && nanos < 0)
|
||||
{
|
||||
builder.Append('-');
|
||||
}
|
||||
|
||||
builder.Append(normalized.Seconds.ToString("d", CultureInfo.InvariantCulture));
|
||||
AppendNanoseconds(builder, Math.Abs(normalized.Nanos));
|
||||
builder.Append(seconds.ToString("d", CultureInfo.InvariantCulture));
|
||||
AppendNanoseconds(builder, Math.Abs(nanos));
|
||||
builder.Append("s\"");
|
||||
}
|
||||
|
||||
private void WriteFieldMask(StringBuilder builder, IMessage value)
|
||||
{
|
||||
IList paths = (IList) value.Descriptor.Fields[FieldMask.PathsFieldNumber].Accessor.GetValue(value);
|
||||
WriteString(builder, string.Join(",", paths.Cast<string>().Select(ToCamelCase)));
|
||||
WriteString(builder, string.Join(",", paths.Cast<string>().Select(ToCamelCaseForFieldMask)));
|
||||
}
|
||||
|
||||
private void WriteAny(StringBuilder builder, IMessage value)
|
||||
@ -598,15 +630,15 @@ namespace Google.Protobuf
|
||||
// Output to 3, 6 or 9 digits.
|
||||
if (nanos % 1000000 == 0)
|
||||
{
|
||||
builder.Append((nanos / 1000000).ToString("d", CultureInfo.InvariantCulture));
|
||||
builder.Append((nanos / 1000000).ToString("d3", CultureInfo.InvariantCulture));
|
||||
}
|
||||
else if (nanos % 1000 == 0)
|
||||
{
|
||||
builder.Append((nanos / 1000).ToString("d", CultureInfo.InvariantCulture));
|
||||
builder.Append((nanos / 1000).ToString("d6", CultureInfo.InvariantCulture));
|
||||
}
|
||||
else
|
||||
{
|
||||
builder.Append(nanos.ToString("d", CultureInfo.InvariantCulture));
|
||||
builder.Append(nanos.ToString("d9", CultureInfo.InvariantCulture));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -674,10 +706,6 @@ namespace Google.Protobuf
|
||||
bool first = true;
|
||||
foreach (var value in list)
|
||||
{
|
||||
if (!CanWriteSingleValue(value))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
if (!first)
|
||||
{
|
||||
builder.Append(PropertySeparator);
|
||||
@ -695,10 +723,6 @@ namespace Google.Protobuf
|
||||
// This will box each pair. Could use IDictionaryEnumerator, but that's ugly in terms of disposal.
|
||||
foreach (DictionaryEntry pair in dictionary)
|
||||
{
|
||||
if (!CanWriteSingleValue(pair.Value))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
if (!first)
|
||||
{
|
||||
builder.Append(PropertySeparator);
|
||||
|
@ -168,6 +168,10 @@ namespace Google.Protobuf
|
||||
}
|
||||
var descriptor = message.Descriptor;
|
||||
var jsonFieldMap = descriptor.Fields.ByJsonName();
|
||||
// All the oneof fields we've already accounted for - we can only see each of them once.
|
||||
// The set is created lazily to avoid the overhead of creating a set for every message
|
||||
// we parsed, when oneofs are relatively rare.
|
||||
HashSet<OneofDescriptor> seenOneofs = null;
|
||||
while (true)
|
||||
{
|
||||
token = tokenizer.Next();
|
||||
@ -183,6 +187,17 @@ namespace Google.Protobuf
|
||||
FieldDescriptor field;
|
||||
if (jsonFieldMap.TryGetValue(name, out field))
|
||||
{
|
||||
if (field.ContainingOneof != null)
|
||||
{
|
||||
if (seenOneofs == null)
|
||||
{
|
||||
seenOneofs = new HashSet<OneofDescriptor>();
|
||||
}
|
||||
if (!seenOneofs.Add(field.ContainingOneof))
|
||||
{
|
||||
throw new InvalidProtocolBufferException($"Multiple values specified for oneof {field.ContainingOneof.Name}");
|
||||
}
|
||||
}
|
||||
MergeField(message, field, tokenizer);
|
||||
}
|
||||
else
|
||||
@ -200,10 +215,15 @@ namespace Google.Protobuf
|
||||
var token = tokenizer.Next();
|
||||
if (token.Type == JsonToken.TokenType.Null)
|
||||
{
|
||||
// Clear the field if we see a null token, unless it's for a singular field of type
|
||||
// google.protobuf.Value.
|
||||
// Note: different from Java API, which just ignores it.
|
||||
// TODO: Bring it more in line? Discuss...
|
||||
field.Accessor.Clear(message);
|
||||
return;
|
||||
if (field.IsMap || field.IsRepeated || !IsGoogleProtobufValueField(field))
|
||||
{
|
||||
field.Accessor.Clear(message);
|
||||
return;
|
||||
}
|
||||
}
|
||||
tokenizer.PushBack(token);
|
||||
|
||||
@ -239,6 +259,10 @@ namespace Google.Protobuf
|
||||
return;
|
||||
}
|
||||
tokenizer.PushBack(token);
|
||||
if (token.Type == JsonToken.TokenType.Null)
|
||||
{
|
||||
throw new InvalidProtocolBufferException("Repeated field elements cannot be null");
|
||||
}
|
||||
list.Add(ParseSingleValue(field, tokenizer));
|
||||
}
|
||||
}
|
||||
@ -270,19 +294,30 @@ namespace Google.Protobuf
|
||||
}
|
||||
object key = ParseMapKey(keyField, token.StringValue);
|
||||
object value = ParseSingleValue(valueField, tokenizer);
|
||||
// TODO: Null handling
|
||||
if (value == null)
|
||||
{
|
||||
throw new InvalidProtocolBufferException("Map values must not be null");
|
||||
}
|
||||
dictionary[key] = value;
|
||||
}
|
||||
}
|
||||
|
||||
private static bool IsGoogleProtobufValueField(FieldDescriptor field)
|
||||
{
|
||||
return field.FieldType == FieldType.Message &&
|
||||
field.MessageType.FullName == Value.Descriptor.FullName;
|
||||
}
|
||||
|
||||
private object ParseSingleValue(FieldDescriptor field, JsonTokenizer tokenizer)
|
||||
{
|
||||
var token = tokenizer.Next();
|
||||
if (token.Type == JsonToken.TokenType.Null)
|
||||
{
|
||||
if (field.FieldType == FieldType.Message && field.MessageType.FullName == Value.Descriptor.FullName)
|
||||
// TODO: In order to support dynamic messages, we should really build this up
|
||||
// dynamically.
|
||||
if (IsGoogleProtobufValueField(field))
|
||||
{
|
||||
return new Value { NullValue = NullValue.NULL_VALUE };
|
||||
return Value.ForNull();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
@ -464,6 +499,11 @@ namespace Google.Protobuf
|
||||
{
|
||||
tokens.Add(token);
|
||||
token = tokenizer.Next();
|
||||
|
||||
if (tokenizer.ObjectDepth < typeUrlObjectDepth)
|
||||
{
|
||||
throw new InvalidProtocolBufferException("Any message with no @type");
|
||||
}
|
||||
}
|
||||
|
||||
// Don't add the @type property or its value to the recorded token list
|
||||
@ -603,6 +643,11 @@ namespace Google.Protobuf
|
||||
throw new InvalidProtocolBufferException($"Value out of range: {value}");
|
||||
}
|
||||
return (float) value;
|
||||
case FieldType.Enum:
|
||||
CheckInteger(value);
|
||||
// Just return it as an int, and let the CLR convert it.
|
||||
// Note that we deliberately don't check that it's a known value.
|
||||
return (int) value;
|
||||
default:
|
||||
throw new InvalidProtocolBufferException($"Unsupported conversion from JSON number for field type {field.FieldType}");
|
||||
}
|
||||
@ -633,7 +678,14 @@ namespace Google.Protobuf
|
||||
case FieldType.String:
|
||||
return text;
|
||||
case FieldType.Bytes:
|
||||
return ByteString.FromBase64(text);
|
||||
try
|
||||
{
|
||||
return ByteString.FromBase64(text);
|
||||
}
|
||||
catch (FormatException e)
|
||||
{
|
||||
throw InvalidProtocolBufferException.InvalidBase64(e);
|
||||
}
|
||||
case FieldType.Int32:
|
||||
case FieldType.SInt32:
|
||||
case FieldType.SFixed32:
|
||||
@ -826,28 +878,24 @@ namespace Google.Protobuf
|
||||
|
||||
try
|
||||
{
|
||||
long seconds = long.Parse(secondsText, CultureInfo.InvariantCulture);
|
||||
long seconds = long.Parse(secondsText, CultureInfo.InvariantCulture) * multiplier;
|
||||
int nanos = 0;
|
||||
if (subseconds != "")
|
||||
{
|
||||
// This should always work, as we've got 1-9 digits.
|
||||
int parsedFraction = int.Parse(subseconds.Substring(1));
|
||||
nanos = parsedFraction * SubsecondScalingFactors[subseconds.Length];
|
||||
nanos = parsedFraction * SubsecondScalingFactors[subseconds.Length] * multiplier;
|
||||
}
|
||||
if (seconds >= Duration.MaxSeconds)
|
||||
if (!Duration.IsNormalized(seconds, nanos))
|
||||
{
|
||||
// Allow precisely 315576000000 seconds, but prohibit even 1ns more.
|
||||
if (seconds > Duration.MaxSeconds || nanos > 0)
|
||||
{
|
||||
throw new InvalidProtocolBufferException("Invalid Duration value: " + token.StringValue);
|
||||
}
|
||||
throw new InvalidProtocolBufferException($"Invalid Duration value: {token.StringValue}");
|
||||
}
|
||||
message.Descriptor.Fields[Duration.SecondsFieldNumber].Accessor.SetValue(message, seconds * multiplier);
|
||||
message.Descriptor.Fields[Duration.NanosFieldNumber].Accessor.SetValue(message, nanos * multiplier);
|
||||
message.Descriptor.Fields[Duration.SecondsFieldNumber].Accessor.SetValue(message, seconds);
|
||||
message.Descriptor.Fields[Duration.NanosFieldNumber].Accessor.SetValue(message, nanos);
|
||||
}
|
||||
catch (FormatException)
|
||||
{
|
||||
throw new InvalidProtocolBufferException("Invalid Duration value: " + token.StringValue);
|
||||
throw new InvalidProtocolBufferException($"Invalid Duration value: {token.StringValue}");
|
||||
}
|
||||
}
|
||||
|
||||
@ -870,6 +918,8 @@ namespace Google.Protobuf
|
||||
private static string ToSnakeCase(string text)
|
||||
{
|
||||
var builder = new StringBuilder(text.Length * 2);
|
||||
// Note: this is probably unnecessary now, but currently retained to be as close as possible to the
|
||||
// C++, whilst still throwing an exception on underscores.
|
||||
bool wasNotUnderscore = false; // Initialize to false for case 1 (below)
|
||||
bool wasNotCap = false;
|
||||
|
||||
@ -903,7 +953,11 @@ namespace Google.Protobuf
|
||||
else
|
||||
{
|
||||
builder.Append(c);
|
||||
wasNotUnderscore = c != '_';
|
||||
if (c == '_')
|
||||
{
|
||||
throw new InvalidProtocolBufferException($"Invalid field mask: {text}");
|
||||
}
|
||||
wasNotUnderscore = true;
|
||||
wasNotCap = true;
|
||||
}
|
||||
}
|
||||
|
@ -92,11 +92,22 @@ namespace Google.Protobuf.Reflection
|
||||
new FieldDescriptor(field, file, this, index, generatedCodeInfo?.PropertyNames[index]));
|
||||
fieldsInNumberOrder = new ReadOnlyCollection<FieldDescriptor>(fieldsInDeclarationOrder.OrderBy(field => field.FieldNumber).ToArray());
|
||||
// TODO: Use field => field.Proto.JsonName when we're confident it's appropriate. (And then use it in the formatter, too.)
|
||||
jsonFieldMap = new ReadOnlyDictionary<string, FieldDescriptor>(fieldsInNumberOrder.ToDictionary(field => JsonFormatter.ToCamelCase(field.Name)));
|
||||
jsonFieldMap = CreateJsonFieldMap(fieldsInNumberOrder);
|
||||
file.DescriptorPool.AddSymbol(this);
|
||||
Fields = new FieldCollection(this);
|
||||
}
|
||||
|
||||
private static ReadOnlyDictionary<string, FieldDescriptor> CreateJsonFieldMap(IList<FieldDescriptor> fields)
|
||||
{
|
||||
var map = new Dictionary<string, FieldDescriptor>();
|
||||
foreach (var field in fields)
|
||||
{
|
||||
map[JsonFormatter.ToCamelCase(field.Name)] = field;
|
||||
map[field.Name] = field;
|
||||
}
|
||||
return new ReadOnlyDictionary<string, FieldDescriptor>(map);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The brief name of the descriptor's target.
|
||||
/// </summary>
|
||||
@ -255,9 +266,10 @@ namespace Google.Protobuf.Reflection
|
||||
// TODO: consider making this public in the future. (Being conservative for now...)
|
||||
|
||||
/// <value>
|
||||
/// Returns a read-only dictionary mapping the field names in this message as they're used
|
||||
/// Returns a read-only dictionary mapping the field names in this message as they're available
|
||||
/// in the JSON representation to the field descriptors. For example, a field <c>foo_bar</c>
|
||||
/// in the message would result in an entry with a key <c>fooBar</c>.
|
||||
/// in the message would result two entries, one with a key <c>fooBar</c> and one with a key
|
||||
/// <c>foo_bar</c>, both referring to the same field.
|
||||
/// </value>
|
||||
internal IDictionary<string, FieldDescriptor> ByJsonName() => messageDescriptor.jsonFieldMap;
|
||||
|
||||
|
@ -57,15 +57,38 @@ namespace Google.Protobuf.WellKnownTypes
|
||||
/// </summary>
|
||||
public const long MinSeconds = -315576000000L;
|
||||
|
||||
internal const int MaxNanoseconds = NanosecondsPerSecond - 1;
|
||||
internal const int MinNanoseconds = -NanosecondsPerSecond + 1;
|
||||
|
||||
internal static bool IsNormalized(long seconds, int nanoseconds)
|
||||
{
|
||||
// Simple boundaries
|
||||
if (seconds < MinSeconds || seconds > MaxSeconds ||
|
||||
nanoseconds < MinNanoseconds || nanoseconds > MaxNanoseconds)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
// We only have a problem is one is strictly negative and the other is
|
||||
// strictly positive.
|
||||
return Math.Sign(seconds) * Math.Sign(nanoseconds) != -1;
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// Converts this <see cref="Duration"/> to a <see cref="TimeSpan"/>.
|
||||
/// </summary>
|
||||
/// <remarks>If the duration is not a precise number of ticks, it is truncated towards 0.</remarks>
|
||||
/// <returns>The value of this duration, as a <c>TimeSpan</c>.</returns>
|
||||
/// <exception cref="InvalidOperationException">This value isn't a valid normalized duration, as
|
||||
/// described in the documentation.</exception>
|
||||
public TimeSpan ToTimeSpan()
|
||||
{
|
||||
checked
|
||||
{
|
||||
if (!IsNormalized(Seconds, Nanos))
|
||||
{
|
||||
throw new InvalidOperationException("Duration was not a valid normalized duration");
|
||||
}
|
||||
long ticks = Seconds * TimeSpan.TicksPerSecond + Nanos / NanosecondsPerTick;
|
||||
return TimeSpan.FromTicks(ticks);
|
||||
}
|
||||
|
@ -37,9 +37,17 @@ namespace Google.Protobuf.WellKnownTypes
|
||||
public partial class Timestamp
|
||||
{
|
||||
private static readonly DateTime UnixEpoch = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
|
||||
private static readonly long BclSecondsAtUnixEpoch = UnixEpoch.Ticks / TimeSpan.TicksPerSecond;
|
||||
internal static readonly long UnixSecondsAtBclMinValue = -BclSecondsAtUnixEpoch;
|
||||
internal static readonly long UnixSecondsAtBclMaxValue = (DateTime.MaxValue.Ticks / TimeSpan.TicksPerSecond) - BclSecondsAtUnixEpoch;
|
||||
// Constants determined programmatically, but then hard-coded so they can be constant expressions.
|
||||
private const long BclSecondsAtUnixEpoch = 62135596800;
|
||||
internal const long UnixSecondsAtBclMaxValue = 253402300799;
|
||||
internal const long UnixSecondsAtBclMinValue = -BclSecondsAtUnixEpoch;
|
||||
internal const int MaxNanos = Duration.NanosecondsPerSecond - 1;
|
||||
|
||||
private bool IsNormalized =>
|
||||
Nanos >= 0 &&
|
||||
Nanos <= MaxNanos &&
|
||||
Seconds >= UnixSecondsAtBclMinValue &&
|
||||
Seconds <= UnixSecondsAtBclMaxValue;
|
||||
|
||||
/// <summary>
|
||||
/// Returns the difference between one <see cref="Timestamp"/> and another, as a <see cref="Duration"/>.
|
||||
@ -99,8 +107,14 @@ namespace Google.Protobuf.WellKnownTypes
|
||||
/// <see cref="DateTime"/> value precisely on a second.
|
||||
/// </remarks>
|
||||
/// <returns>This timestamp as a <c>DateTime</c>.</returns>
|
||||
/// <exception cref="InvalidOperationException">The timestamp contains invalid values; either it is
|
||||
/// incorrectly normalized or is outside the valid range.</exception>
|
||||
public DateTime ToDateTime()
|
||||
{
|
||||
if (!IsNormalized)
|
||||
{
|
||||
throw new InvalidOperationException(@"Timestamp contains invalid values: Seconds={Seconds}; Nanos={Nanos}");
|
||||
}
|
||||
return UnixEpoch.AddSeconds(Seconds).AddTicks(Nanos / Duration.NanosecondsPerTick);
|
||||
}
|
||||
|
||||
@ -114,6 +128,8 @@ namespace Google.Protobuf.WellKnownTypes
|
||||
/// <see cref="DateTimeOffset"/> value precisely on a second.
|
||||
/// </remarks>
|
||||
/// <returns>This timestamp as a <c>DateTimeOffset</c>.</returns>
|
||||
/// <exception cref="InvalidOperationException">The timestamp contains invalid values; either it is
|
||||
/// incorrectly normalized or is outside the valid range.</exception>
|
||||
public DateTimeOffset ToDateTimeOffset()
|
||||
{
|
||||
return new DateTimeOffset(ToDateTime(), TimeSpan.Zero);
|
||||
|
@ -39,6 +39,8 @@ message TestWellKnownTypes {
|
||||
google.protobuf.BoolValue bool_field = 16;
|
||||
google.protobuf.StringValue string_field = 17;
|
||||
google.protobuf.BytesValue bytes_field = 18;
|
||||
// Part of struct, but useful to be able to test separately
|
||||
google.protobuf.Value value_field = 19;
|
||||
}
|
||||
|
||||
// A repeated field for each well-known type.
|
||||
|
Loading…
Reference in New Issue
Block a user