Python google.protobuf.text_format 模块,ParseError() 实例源码
我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用google.protobuf.text_format.ParseError()。
def load_protobuf_from_file(container, filename):
with open(filename, 'rb') as fin:
file_content = fin.read()
# First try to read it as a binary file.
try:
container.ParseFromString(file_content)
print("Parse file [%s] with binary format successfully." % (filename))
return container
except Exception as e: # pylint: disable=broad-except
print ("Info: Trying to parse file [%s] with binary format but failed with error [%s]." % (filename, str(e)))
# Next try to read it as a text file.
try:
from google.protobuf import text_format
text_format.Parse(file_content.decode('UTF-8'), container, allow_unknown_extension=True)
print("Parse file [%s] with text format successfully." % (filename))
except text_format.ParseError as e:
raise IOError("Cannot parse file %s: %s." % (filename, str(e)))
return container
def testConsumeByteString(self):
text = '"string1\''
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
text = 'string1"'
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
text = '\n"\\xt"'
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
text = '\n"\\"'
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
text = '\n"\\x"'
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
def testParseBadEnumValue(self):
message = unittest_pb2.TestAllTypes()
text = 'optional_nested_enum: BARR'
self.assertRaisesWithLiteralMatch(
text_format.ParseError,
('1:23 : Enum type "protobuf_unittest.TestAllTypes.NestedEnum" '
'has no value named BARR.'),
text_format.Parse, text, message)
message = unittest_pb2.TestAllTypes()
text = 'optional_nested_enum: 100'
self.assertRaisesWithLiteralMatch(
text_format.ParseError,
('1:23 : Enum type "protobuf_unittest.TestAllTypes.NestedEnum" '
'has no value with number 100.'),
text_format.Parse, text, message)
def testConsumeByteString(self):
text = '"string1\''
tokenizer = text_format._Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
text = 'string1"'
tokenizer = text_format._Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
text = '\n"\\xt"'
tokenizer = text_format._Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
text = '\n"\\"'
tokenizer = text_format._Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
text = '\n"\\x"'
tokenizer = text_format._Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
def testConsumeByteString(self):
text = '"string1\''
tokenizer = text_format._Tokenizer(text)
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
text = 'string1"'
tokenizer = text_format._Tokenizer(text)
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
text = '\n"\\xt"'
tokenizer = text_format._Tokenizer(text)
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
text = '\n"\\"'
tokenizer = text_format._Tokenizer(text)
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
text = '\n"\\x"'
tokenizer = text_format._Tokenizer(text)
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
def testConsumeByteString(self):
text = '"string1\''
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
text = 'string1"'
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
text = '\n"\\xt"'
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
text = '\n"\\"'
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
text = '\n"\\x"'
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
def testConsumeAndCheckTrailingComment(self):
text = 'some_number: 4 # some comment' # trailing comment on the same line
tokenizer = text_format.Tokenizer(text.splitlines(), skip_comments=False)
self.assertRaises(text_format.ParseError,
tokenizer.ConsumeCommentOrTrailingComment)
self.assertEqual('some_number', tokenizer.ConsumeIdentifier())
self.assertEqual(tokenizer.token, ':')
tokenizer.NextToken()
self.assertRaises(text_format.ParseError,
tokenizer.ConsumeCommentOrTrailingComment)
self.assertEqual(4, tokenizer.ConsumeInteger())
self.assertFalse(tokenizer.AtEnd())
self.assertEqual((True, '# some comment'),
tokenizer.ConsumeCommentOrTrailingComment())
self.assertTrue(tokenizer.AtEnd())
def testConsumeByteString(self):
text = '"string1\''
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
text = 'string1"'
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
text = '\n"\\xt"'
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
text = '\n"\\"'
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
text = '\n"\\x"'
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
def testConsumeByteString(self):
text = '"string1\''
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
text = 'string1"'
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
text = '\n"\\xt"'
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
text = '\n"\\"'
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
text = '\n"\\x"'
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
def testParseBadEnumValue(self, message_module):
message = message_module.TestAllTypes()
text = 'optional_nested_enum: BARR'
six.assertRaisesRegex(self,
text_format.ParseError,
(r'1:23 : Enum type "\w+.TestAllTypes.NestedEnum" '
r'has no value named BARR.'),
text_format.Parse, text, message)
message = message_module.TestAllTypes()
text = 'optional_nested_enum: 100'
six.assertRaisesRegex(self,
text_format.ParseError,
(r'1:23 : Enum type "\w+.TestAllTypes.NestedEnum" '
r'has no value with number 100.'),
text_format.Parse, text, message)
def load_labelmap(path):
"""Loads label map proto.
Args:
path: path to StringIntLabelMap proto text file.
Returns:
a StringIntLabelMapProto
"""
with tf.gfile.GFile(path, 'r') as fid:
label_map_string = fid.read()
label_map = string_int_label_map_pb2.StringIntLabelMap()
try:
text_format.Merge(label_map_string, label_map)
except text_format.ParseError:
label_map.ParseFromString(label_map_string)
_validate_label_map(label_map)
return label_map
def testParseInvalidUtf8(self, message_module):
message = message_module.TestAllTypes()
text = 'repeated_string: "\\xc3\\xc3"'
self.assertRaises(text_format.ParseError, text_format.Parse, text, message)
def testParseSingleWord(self, message_module):
message = message_module.TestAllTypes()
text = 'foo'
six.assertRaisesRegex(self, text_format.ParseError, (
r'1:1 : Message type "\w+.TestAllTypes" has no field named '
r'"foo".'), text_format.Parse, text, message)
def testParseUnknownField(self, message_module):
message = message_module.TestAllTypes()
text = 'unknown_field: 8\n'
six.assertRaisesRegex(self, text_format.ParseError, (
r'1:1 : Message type "\w+.TestAllTypes" has no field named '
r'"unknown_field".'), text_format.Parse, text, message)
def testParseBadEnumValue(self, message_module):
message = message_module.TestAllTypes()
text = 'optional_nested_enum: BARR'
six.assertRaisesRegex(self, text_format.ParseError,
(r'1:23 : Enum type "\w+.TestAllTypes.NestedEnum" '
r'has no value named BARR.'), text_format.Parse,
text, message)
message = message_module.TestAllTypes()
text = 'optional_nested_enum: 100'
six.assertRaisesRegex(self, text_format.ParseError,
(r'1:23 : Enum type "\w+.TestAllTypes.NestedEnum" '
r'has no value with number 100.'), text_format.Parse,
text, message)
def testParseBadIntValue(self, message_module):
message = message_module.TestAllTypes()
text = 'optional_int32: bork'
six.assertRaisesRegex(self, text_format.ParseError,
('1:17 : Couldn\'t parse integer: bork'),
text_format.Parse, text, message)
def testParseMessageByFieldNumber(self):
message = unittest_pb2.TestAllTypes()
text = ('34: 1\n' 'repeated_uint64: 2\n')
text_format.Parse(text, message, allow_field_number=True)
self.assertEqual(1, message.repeated_uint64[0])
self.assertEqual(2, message.repeated_uint64[1])
message = unittest_mset_pb2.TestMessageSetContainer()
text = ('1 {\n'
' 1545008 {\n'
' 15: 23\n'
' }\n'
' 1547769 {\n'
' 25: \"foo\"\n'
' }\n'
'}\n')
text_format.Parse(text, message, allow_field_number=True)
ext1 = unittest_mset_pb2.TestMessageSetExtension1.message_set_extension
ext2 = unittest_mset_pb2.TestMessageSetExtension2.message_set_extension
self.assertEqual(23, message.message_set.Extensions[ext1].i)
self.assertEqual('foo', message.message_set.Extensions[ext2].str)
# Can't parse field number without set allow_field_number=True.
message = unittest_pb2.TestAllTypes()
text = '34:1\n'
six.assertRaisesRegex(self, text_format.ParseError, (
r'1:1 : Message type "\w+.TestAllTypes" has no field named '
r'"34".'), text_format.Parse, text, message)
# Can't parse if field number is not found.
text = '1234:1\n'
six.assertRaisesRegex(
self,
text_format.ParseError,
(r'1:1 : Message type "\w+.TestAllTypes" has no field named '
r'"1234".'),
text_format.Parse,
text,
message,
allow_field_number=True)
def testParseBadExtension(self):
message = unittest_pb2.TestAllExtensions()
text = '[unknown_extension]: 8\n'
six.assertRaisesRegex(self, text_format.ParseError,
'1:2 : Extension "unknown_extension" not registered.',
text_format.Parse, text, message)
message = unittest_pb2.TestAllTypes()
six.assertRaisesRegex(self, text_format.ParseError, (
'1:2 : Message type "protobuf_unittest.TestAllTypes" does not have '
'extensions.'), text_format.Parse, text, message)
def testParseDuplicateExtensionScalars(self):
message = unittest_pb2.TestAllExtensions()
text = ('[protobuf_unittest.optional_int32_extension]: 42 '
'[protobuf_unittest.optional_int32_extension]: 67')
six.assertRaisesRegex(self, text_format.ParseError, (
'1:96 : Message type "protobuf_unittest.TestAllExtensions" '
'should not have multiple '
'"protobuf_unittest.optional_int32_extension" extensions.'),
text_format.Parse, text, message)
def testParseDuplicateNestedMessageScalars(self):
message = unittest_pb2.TestAllTypes()
text = ('optional_nested_message { bb: 1 } '
'optional_nested_message { bb: 2 }')
six.assertRaisesRegex(self, text_format.ParseError, (
'1:65 : Message type "protobuf_unittest.TestAllTypes.NestedMessage" '
'should not have multiple "bb" fields.'), text_format.Parse, text,
message)
def testParseDuplicateScalars(self):
message = unittest_pb2.TestAllTypes()
text = ('optional_int32: 42 ' 'optional_int32: 67')
six.assertRaisesRegex(self, text_format.ParseError, (
'1:36 : Message type "protobuf_unittest.TestAllTypes" should not '
'have multiple "optional_int32" fields.'), text_format.Parse, text,
message)
def testMergeExpandedAnyNoDescriptorPool(self):
message = any_test_pb2.TestAny()
text = ('any_value {\n'
' [type.googleapis.com/protobuf_unittest.OneString] {\n'
' data: "string"\n'
' }\n'
'}\n')
with self.assertRaises(text_format.ParseError) as e:
text_format.Merge(text, message, descriptor_pool=None)
self.assertEqual(str(e.exception),
'Descriptor pool required to parse expanded Any field')
def testMergeExpandedAnyDescriptorPoolMissingType(self):
message = any_test_pb2.TestAny()
text = ('any_value {\n'
' [type.googleapis.com/protobuf_unittest.OneString] {\n'
' data: "string"\n'
' }\n'
'}\n')
with self.assertRaises(text_format.ParseError) as e:
empty_pool = descriptor_pool.DescriptorPool()
text_format.Merge(text, message, descriptor_pool=empty_pool)
self.assertEqual(
str(e.exception),
'Type protobuf_unittest.OneString not found in descriptor pool')
def testConsumeIntegers(self):
# This test only tests the failures in the integer parsing methods as well
# as the '0' special cases.
int64_max = (1 << 63) - 1
uint32_max = (1 << 32) - 1
text = '-1 %d %d' % (uint32_max + 1, int64_max + 1)
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError,
text_format._ConsumeUint32, tokenizer)
self.assertRaises(text_format.ParseError,
text_format._ConsumeUint64, tokenizer)
self.assertEqual(-1, text_format._ConsumeInt32(tokenizer))
self.assertRaises(text_format.ParseError,
text_format._ConsumeUint32, tokenizer)
self.assertRaises(text_format.ParseError,
text_format._ConsumeInt32, tokenizer)
self.assertEqual(uint32_max + 1, text_format._ConsumeInt64(tokenizer))
self.assertRaises(text_format.ParseError,
text_format._ConsumeInt64, tokenizer)
self.assertEqual(int64_max + 1, text_format._ConsumeUint64(tokenizer))
self.assertTrue(tokenizer.AtEnd())
text = '-0 -0 0 0'
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertEqual(0, text_format._ConsumeUint32(tokenizer))
self.assertEqual(0, text_format._ConsumeUint64(tokenizer))
self.assertEqual(0, text_format._ConsumeUint32(tokenizer))
self.assertEqual(0, text_format._ConsumeUint64(tokenizer))
self.assertTrue(tokenizer.AtEnd())
def testConsumeBool(self):
text = 'not-a-bool'
tokenizer = text_format.Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeBool)
def testConsumeTrailingComment(self):
text = 'some_number: 4\n# some comment'
tokenizer = text_format.Tokenizer(text.splitlines(), skip_comments=False)
self.assertRaises(text_format.ParseError, tokenizer.ConsumeComment)
self.assertEqual('some_number', tokenizer.ConsumeIdentifier())
self.assertEqual(tokenizer.token, ':')
tokenizer.NextToken()
self.assertRaises(text_format.ParseError, tokenizer.ConsumeComment)
self.assertEqual(4, tokenizer.ConsumeInteger())
self.assertFalse(tokenizer.AtEnd())
self.assertEqual('# some comment', tokenizer.ConsumeComment())
self.assertTrue(tokenizer.AtEnd())
def testParseInvalidUtf8(self):
message = unittest_pb2.TestAllTypes()
text = 'repeated_string: "\\xc3\\xc3"'
self.assertRaises(text_format.ParseError, text_format.Parse, text, message)
def testParseSingleWord(self):
message = unittest_pb2.TestAllTypes()
text = 'foo'
self.assertRaisesWithLiteralMatch(
text_format.ParseError,
('1:1 : Message type "protobuf_unittest.TestAllTypes" has no field named '
'"foo".'),
text_format.Parse, text, message)
def testParseUnknownField(self):
message = unittest_pb2.TestAllTypes()
text = 'unknown_field: 8\n'
self.assertRaisesWithLiteralMatch(
text_format.ParseError,
('1:1 : Message type "protobuf_unittest.TestAllTypes" has no field named '
'"unknown_field".'),
text_format.Parse, text, message)
def testParseBadExtension(self):
message = unittest_pb2.TestAllExtensions()
text = '[unknown_extension]: 8\n'
self.assertRaisesWithLiteralMatch(
text_format.ParseError,
'1:2 : Extension "unknown_extension" not registered.',
text_format.Parse, text, message)
message = unittest_pb2.TestAllTypes()
self.assertRaisesWithLiteralMatch(
text_format.ParseError,
('1:2 : Message type "protobuf_unittest.TestAllTypes" does not have '
'extensions.'),
text_format.Parse, text, message)
def testParseGroupNotClosed(self):
message = unittest_pb2.TestAllTypes()
text = 'RepeatedGroup: <'
self.assertRaisesWithLiteralMatch(
text_format.ParseError, '1:16 : Expected ">".',
text_format.Parse, text, message)
text = 'RepeatedGroup: {'
self.assertRaisesWithLiteralMatch(
text_format.ParseError, '1:16 : Expected "}".',
text_format.Parse, text, message)
def testParseBadIntValue(self):
message = unittest_pb2.TestAllTypes()
text = 'optional_int32: bork'
self.assertRaisesWithLiteralMatch(
text_format.ParseError,
('1:17 : Couldn\'t parse integer: bork'),
text_format.Parse, text, message)
def testParseRepeatedScalars(self):
message = unittest_pb2.TestAllTypes()
text = ('optional_int32: 42 '
'optional_int32: 67')
self.assertRaisesWithLiteralMatch(
text_format.ParseError,
('1:36 : Message type "protobuf_unittest.TestAllTypes" should not '
'have multiple "optional_int32" fields.'),
text_format.Parse, text, message)
def testParseRepeatedNestedMessageScalars(self):
message = unittest_pb2.TestAllTypes()
text = ('optional_nested_message { bb: 1 } '
'optional_nested_message { bb: 2 }')
self.assertRaisesWithLiteralMatch(
text_format.ParseError,
('1:65 : Message type "protobuf_unittest.TestAllTypes.NestedMessage" '
'should not have multiple "bb" fields.'),
text_format.Parse, text, message)
def testParseRepeatedExtensionScalars(self):
message = unittest_pb2.TestAllExtensions()
text = ('[protobuf_unittest.optional_int32_extension]: 42 '
'[protobuf_unittest.optional_int32_extension]: 67')
self.assertRaisesWithLiteralMatch(
text_format.ParseError,
('1:96 : Message type "protobuf_unittest.TestAllExtensions" '
'should not have multiple '
'"protobuf_unittest.optional_int32_extension" extensions.'),
text_format.Parse, text, message)
def testConsumeIntegers(self):
# This test only tests the failures in the integer parsing methods as well
# as the '0' special cases.
int64_max = (1 << 63) - 1
uint32_max = (1 << 32) - 1
text = '-1 %d %d' % (uint32_max + 1, int64_max + 1)
tokenizer = text_format._Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeUint32)
self.assertRaises(text_format.ParseError, tokenizer.ConsumeUint64)
self.assertEqual(-1, tokenizer.ConsumeInt32())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeUint32)
self.assertRaises(text_format.ParseError, tokenizer.ConsumeInt32)
self.assertEqual(uint32_max + 1, tokenizer.ConsumeInt64())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeInt64)
self.assertEqual(int64_max + 1, tokenizer.ConsumeUint64())
self.assertTrue(tokenizer.AtEnd())
text = '-0 -0 0 0'
tokenizer = text_format._Tokenizer(text.splitlines())
self.assertEqual(0, tokenizer.ConsumeUint32())
self.assertEqual(0, tokenizer.ConsumeUint64())
self.assertEqual(0, tokenizer.ConsumeUint32())
self.assertEqual(0, tokenizer.ConsumeUint64())
self.assertTrue(tokenizer.AtEnd())
def testConsumeBool(self):
text = 'not-a-bool'
tokenizer = text_format._Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeBool)
def testParseInvalidUtf8(self):
message = unittest_pb2.TestAllTypes()
text = 'repeated_string: "\\xc3\\xc3"'
self.assertRaises(text_format.ParseError, text_format.Parse, text, message)
def testParseSingleWord(self):
message = unittest_pb2.TestAllTypes()
text = 'foo'
self.assertRaisesWithLiteralMatch(
text_format.ParseError,
('1:1 : Message type "protobuf_unittest.TestAllTypes" has no field named '
'"foo".'),
text_format.Parse, text, message)
def testParseUnknownField(self):
message = unittest_pb2.TestAllTypes()
text = 'unknown_field: 8\n'
self.assertRaisesWithLiteralMatch(
text_format.ParseError,
('1:1 : Message type "protobuf_unittest.TestAllTypes" has no field named '
'"unknown_field".'),
text_format.Parse, text, message)
def testParseBadExtension(self):
message = unittest_pb2.TestAllExtensions()
text = '[unknown_extension]: 8\n'
self.assertRaisesWithLiteralMatch(
text_format.ParseError,
'1:2 : Extension "unknown_extension" not registered.',
text_format.Parse, text, message)
message = unittest_pb2.TestAllTypes()
self.assertRaisesWithLiteralMatch(
text_format.ParseError,
('1:2 : Message type "protobuf_unittest.TestAllTypes" does not have '
'extensions.'),
text_format.Parse, text, message)
def testParseBadIntValue(self):
message = unittest_pb2.TestAllTypes()
text = 'optional_int32: bork'
self.assertRaisesWithLiteralMatch(
text_format.ParseError,
('1:17 : Couldn\'t parse integer: bork'),
text_format.Parse, text, message)
def testParseRepeatedScalars(self):
message = unittest_pb2.TestAllTypes()
text = ('optional_int32: 42 '
'optional_int32: 67')
self.assertRaisesWithLiteralMatch(
text_format.ParseError,
('1:36 : Message type "protobuf_unittest.TestAllTypes" should not '
'have multiple "optional_int32" fields.'),
text_format.Parse, text, message)
def testParseRepeatedNestedMessageScalars(self):
message = unittest_pb2.TestAllTypes()
text = ('optional_nested_message { bb: 1 } '
'optional_nested_message { bb: 2 }')
self.assertRaisesWithLiteralMatch(
text_format.ParseError,
('1:65 : Message type "protobuf_unittest.TestAllTypes.NestedMessage" '
'should not have multiple "bb" fields.'),
text_format.Parse, text, message)
def testParseRepeatedExtensionScalars(self):
message = unittest_pb2.TestAllExtensions()
text = ('[protobuf_unittest.optional_int32_extension]: 42 '
'[protobuf_unittest.optional_int32_extension]: 67')
self.assertRaisesWithLiteralMatch(
text_format.ParseError,
('1:96 : Message type "protobuf_unittest.TestAllExtensions" '
'should not have multiple '
'"protobuf_unittest.optional_int32_extension" extensions.'),
text_format.Parse, text, message)
def testConsumeBool(self):
text = 'not-a-bool'
tokenizer = text_format._Tokenizer(text.splitlines())
self.assertRaises(text_format.ParseError, tokenizer.ConsumeBool)
def testMergeInvalidUtf8(self):
message = unittest_pb2.TestAllTypes()
text = 'repeated_string: "\\xc3\\xc3"'
self.assertRaises(text_format.ParseError, text_format.Merge, text, message)
def testMergeSingleWord(self):
message = unittest_pb2.TestAllTypes()
text = 'foo'
self.assertRaisesWithMessage(
text_format.ParseError,
('1:1 : Message type "protobuf_unittest.TestAllTypes" has no field named '
'"foo".'),
text_format.Merge, text, message)
def testMergeUnknownField(self):
message = unittest_pb2.TestAllTypes()
text = 'unknown_field: 8\n'
self.assertRaisesWithMessage(
text_format.ParseError,
('1:1 : Message type "protobuf_unittest.TestAllTypes" has no field named '
'"unknown_field".'),
text_format.Merge, text, message)
def testMergeBadExtension(self):
message = unittest_pb2.TestAllExtensions()
text = '[unknown_extension]: 8\n'
self.assertRaisesWithMessage(
text_format.ParseError,
'1:2 : Extension "unknown_extension" not registered.',
text_format.Merge, text, message)
message = unittest_pb2.TestAllTypes()
self.assertRaisesWithMessage(
text_format.ParseError,
('1:2 : Message type "protobuf_unittest.TestAllTypes" does not have '
'extensions.'),
text_format.Merge, text, message)