+21
-7
lines changedFilter options
+21
-7
lines changed Original file line number Diff line number Diff line change
@@ -841,6 +841,16 @@ def testParseUnknownField(self, message_module):
841
841
self.assertEqual(message.optional_nested_message.bb, 45)
842
842
self.assertEqual(message.optional_int32, 123)
843
843
844
+
def testParseUnknownRepeatedMessage(self, message_module):
845
+
message = message_module.TestAllTypes()
846
+
text = (
847
+
'unknown_repeated: [{}]\n'
848
+
'unknown_repeated2: [<>, {}]\n'
849
+
'optional_nested_message { bb: 45 }'
850
+
)
851
+
text_format.Parse(text, message, allow_unknown_field=True)
852
+
self.assertEqual(message.optional_nested_message.bb, 45)
853
+
844
854
def testParseBadEnumValue(self, message_module):
845
855
message = message_module.TestAllTypes()
846
856
text = 'optional_nested_enum: BARR'
Original file line number Diff line number Diff line change
@@ -1196,7 +1196,7 @@ def _SkipFieldContents(self, tokenizer, field_name, immediate_message_type):
1196
1196
':') and not tokenizer.LookingAt('{') and not tokenizer.LookingAt('<'):
1197
1197
self._DetectSilentMarker(tokenizer, immediate_message_type, field_name)
1198
1198
if tokenizer.LookingAt('['):
1199
-
self._SkipRepeatedFieldValue(tokenizer)
1199
+
self._SkipRepeatedFieldValue(tokenizer, immediate_message_type)
1200
1200
else:
1201
1201
self._SkipFieldValue(tokenizer)
1202
1202
else:
@@ -1271,18 +1271,22 @@ def _SkipFieldValue(self, tokenizer):
1271
1271
not tokenizer.TryConsumeFloat()):
1272
1272
raise ParseError('Invalid field value: ' + tokenizer.token)
1273
1273
1274
-
def _SkipRepeatedFieldValue(self, tokenizer):
1274
+
def _SkipRepeatedFieldValue(self, tokenizer, immediate_message_type):
1275
1275
"""Skips over a repeated field value.
1276
1276
1277
1277
Args:
1278
1278
tokenizer: A tokenizer to parse the field value.
1279
1279
"""
1280
1280
tokenizer.Consume('[')
1281
-
if not tokenizer.LookingAt(']'):
1282
-
self._SkipFieldValue(tokenizer)
1283
-
while tokenizer.TryConsume(','):
1284
-
self._SkipFieldValue(tokenizer)
1285
-
tokenizer.Consume(']')
1281
+
if not tokenizer.TryConsume(']'):
1282
+
while True:
1283
+
if tokenizer.LookingAt('<') or tokenizer.LookingAt('{'):
1284
+
self._SkipFieldMessage(tokenizer, immediate_message_type)
1285
+
else:
1286
+
self._SkipFieldValue(tokenizer)
1287
+
if tokenizer.TryConsume(']'):
1288
+
break
1289
+
tokenizer.Consume(',')
1286
1290
1287
1291
1288
1292
class Tokenizer(object):
You can’t perform that action at this time.
RetroSearch is an open source project built by @garambo | Open a GitHub Issue
Search and Browse the WWW like it's 1997 | Search results from DuckDuckGo
HTML:
3.2
| Encoding:
UTF-8
| Version:
0.7.4