aboutsummaryrefslogtreecommitdiffhomepage
path: root/csharp
diff options
context:
space:
mode:
Diffstat (limited to 'csharp')
-rw-r--r--csharp/src/Google.Protobuf.Test/JsonParserTest.cs21
-rw-r--r--csharp/src/Google.Protobuf.Test/JsonTokenizerTest.cs16
-rw-r--r--csharp/src/Google.Protobuf/JsonParser.cs37
-rw-r--r--csharp/src/Google.Protobuf/JsonTokenizer.cs28
4 files changed, 95 insertions, 7 deletions
diff --git a/csharp/src/Google.Protobuf.Test/JsonParserTest.cs b/csharp/src/Google.Protobuf.Test/JsonParserTest.cs
index f595455a..2c515433 100644
--- a/csharp/src/Google.Protobuf.Test/JsonParserTest.cs
+++ b/csharp/src/Google.Protobuf.Test/JsonParserTest.cs
@@ -926,6 +926,27 @@ namespace Google.Protobuf
Assert.Throws<InvalidProtocolBufferException>(() => TestAllTypes.Parser.ParseJson(json));
}
+ [Test]
+ public void UnknownField_NotIgnored()
+ {
+ string json = "{ \"unknownField\": 10, \"singleString\": \"x\" }";
+ Assert.Throws<InvalidProtocolBufferException>(() => TestAllTypes.Parser.ParseJson(json));
+ }
+
+ [Test]
+ [TestCase("5")]
+ [TestCase("\"text\"")]
+ [TestCase("[0, 1, 2]")]
+ [TestCase("{ \"a\": { \"b\": 10 } }")]
+ public void UnknownField_Ignored(string value)
+ {
+ var parser = new JsonParser(JsonParser.Settings.Default.WithIgnoreUnknownFields(true));
+ string json = "{ \"unknownField\": " + value + ", \"singleString\": \"x\" }";
+ var actual = parser.Parse<TestAllTypes>(json);
+ var expected = new TestAllTypes { SingleString = "x" };
+ Assert.AreEqual(expected, actual);
+ }
+
/// <summary>
/// Various tests use strings which have quotes round them for parsing or as the result
/// of formatting, but without those quotes being specified in the tests (for the sake of readability).
diff --git a/csharp/src/Google.Protobuf.Test/JsonTokenizerTest.cs b/csharp/src/Google.Protobuf.Test/JsonTokenizerTest.cs
index 527ab336..33d35036 100644
--- a/csharp/src/Google.Protobuf.Test/JsonTokenizerTest.cs
+++ b/csharp/src/Google.Protobuf.Test/JsonTokenizerTest.cs
@@ -349,6 +349,22 @@ namespace Google.Protobuf
Assert.AreEqual(JsonToken.EndDocument, tokenizer.Next());
Assert.Throws<InvalidOperationException>(() => tokenizer.Next());
}
+
+ [Test]
+ [TestCase("{ 'skip': 0, 'next': 1")]
+ [TestCase("{ 'skip': [0, 1, 2], 'next': 1")]
+ [TestCase("{ 'skip': 'x', 'next': 1")]
+ [TestCase("{ 'skip': ['x', 'y'], 'next': 1")]
+ [TestCase("{ 'skip': {'a': 0}, 'next': 1")]
+ [TestCase("{ 'skip': {'a': [0, {'b':[]}]}, 'next': 1")]
+ public void SkipValue(string json)
+ {
+ var tokenizer = JsonTokenizer.FromTextReader(new StringReader(json.Replace('\'', '"')));
+ Assert.AreEqual(JsonToken.StartObject, tokenizer.Next());
+ Assert.AreEqual("skip", tokenizer.Next().StringValue);
+ tokenizer.SkipValue();
+ Assert.AreEqual("next", tokenizer.Next().StringValue);
+ }
/// <summary>
/// Asserts that the specified JSON is tokenized into the given sequence of tokens.
diff --git a/csharp/src/Google.Protobuf/JsonParser.cs b/csharp/src/Google.Protobuf/JsonParser.cs
index 6b6f2d9a..2ab4a860 100644
--- a/csharp/src/Google.Protobuf/JsonParser.cs
+++ b/csharp/src/Google.Protobuf/JsonParser.cs
@@ -203,10 +203,14 @@ namespace Google.Protobuf
}
else
{
- // TODO: Is this what we want to do? If not, we'll need to skip the value,
- // which may be an object or array. (We might want to put code in the tokenizer
- // to do that.)
- throw new InvalidProtocolBufferException("Unknown field: " + name);
+ if (settings.IgnoreUnknownFields)
+ {
+ tokenizer.SkipValue();
+ }
+ else
+ {
+ throw new InvalidProtocolBufferException("Unknown field: " + name);
+ }
}
}
}
@@ -997,6 +1001,19 @@ namespace Google.Protobuf
public TypeRegistry TypeRegistry { get; }
/// <summary>
+ /// Whether the parser should ignore unknown fields (<c>true</c>) or throw an exception when
+ /// they are encountered (<c>false</c>).
+ /// </summary>
+ public bool IgnoreUnknownFields { get; }
+
+ private Settings(int recursionLimit, TypeRegistry typeRegistry, bool ignoreUnknownFields)
+ {
+ RecursionLimit = recursionLimit;
+ TypeRegistry = ProtoPreconditions.CheckNotNull(typeRegistry, nameof(typeRegistry));
+ IgnoreUnknownFields = ignoreUnknownFields;
+ }
+
+ /// <summary>
/// Creates a new <see cref="Settings"/> object with the specified recursion limit.
/// </summary>
/// <param name="recursionLimit">The maximum depth of messages to parse</param>
@@ -1009,11 +1026,17 @@ namespace Google.Protobuf
/// </summary>
/// <param name="recursionLimit">The maximum depth of messages to parse</param>
/// <param name="typeRegistry">The type registry used to parse <see cref="Any"/> messages</param>
- public Settings(int recursionLimit, TypeRegistry typeRegistry)
+ public Settings(int recursionLimit, TypeRegistry typeRegistry) : this(recursionLimit, typeRegistry, false)
{
- RecursionLimit = recursionLimit;
- TypeRegistry = ProtoPreconditions.CheckNotNull(typeRegistry, nameof(typeRegistry));
}
+
+ /// <summary>
+ /// Creates a new <see cref="Settings"/> object set to either ignore unknown fields, or throw an exception
+ /// when unknown fields are encountered.
+ /// </summary>
+ /// <param name="ignoreUnknownFields"><c>true</c> if unknown fields should be ignored when parsing; <c>false</c> to throw an exception.</param>
+ public Settings WithIgnoreUnknownFields(bool ignoreUnknownFields) =>
+ new Settings(RecursionLimit, TypeRegistry, ignoreUnknownFields);
}
}
}
diff --git a/csharp/src/Google.Protobuf/JsonTokenizer.cs b/csharp/src/Google.Protobuf/JsonTokenizer.cs
index 09a6d43b..2fc1e7c3 100644
--- a/csharp/src/Google.Protobuf/JsonTokenizer.cs
+++ b/csharp/src/Google.Protobuf/JsonTokenizer.cs
@@ -138,6 +138,34 @@ namespace Google.Protobuf
protected abstract JsonToken NextImpl();
/// <summary>
+ /// Skips the value we're about to read. This must only be called immediately after reading a property name.
+ /// If the value is an object or an array, the complete object/array is skipped.
+ /// </summary>
+ internal void SkipValue()
+ {
+ // We'll assume that Next() makes sure that the end objects and end arrays are all valid.
+ // All we care about is the total nesting depth we need to close.
+ int depth = 0;
+
+ // do/while rather than while loop so that we read at least one token.
+ do
+ {
+ var token = Next();
+ switch (token.Type)
+ {
+ case JsonToken.TokenType.EndArray:
+ case JsonToken.TokenType.EndObject:
+ depth--;
+ break;
+ case JsonToken.TokenType.StartArray:
+ case JsonToken.TokenType.StartObject:
+ depth++;
+ break;
+ }
+ } while (depth != 0);
+ }
+
+ /// <summary>
/// Tokenizer which first exhausts a list of tokens, then consults another tokenizer.
/// </summary>
private class JsonReplayTokenizer : JsonTokenizer