aboutsummaryrefslogtreecommitdiffhomepage
path: root/python/google/protobuf/internal
diff options
context:
space:
mode:
Diffstat (limited to 'python/google/protobuf/internal')
-rwxr-xr-xpython/google/protobuf/internal/containers.py2
-rw-r--r--python/google/protobuf/internal/descriptor_pool_test.py36
-rwxr-xr-xpython/google/protobuf/internal/encoder.py7
-rwxr-xr-xpython/google/protobuf/internal/message_test.py27
-rwxr-xr-xpython/google/protobuf/internal/python_message.py5
-rw-r--r--python/google/protobuf/internal/python_protobuf.cc63
-rwxr-xr-xpython/google/protobuf/internal/reflection_test.py9
-rw-r--r--python/google/protobuf/internal/symbol_database_test.py9
-rwxr-xr-xpython/google/protobuf/internal/text_format_test.py51
9 files changed, 201 insertions, 8 deletions
diff --git a/python/google/protobuf/internal/containers.py b/python/google/protobuf/internal/containers.py
index de13018e..68be9e54 100755
--- a/python/google/protobuf/internal/containers.py
+++ b/python/google/protobuf/internal/containers.py
@@ -275,7 +275,7 @@ class RepeatedScalarFieldContainer(BaseContainer):
new_values = [self._type_checker.CheckValue(elem) for elem in elem_seq_iter]
if new_values:
self._values.extend(new_values)
- self._message_listener.Modified()
+ self._message_listener.Modified()
def MergeFrom(self, other):
"""Appends the contents of another repeated field of the same type to this
diff --git a/python/google/protobuf/internal/descriptor_pool_test.py b/python/google/protobuf/internal/descriptor_pool_test.py
index 1e710dcf..2ba1d285 100644
--- a/python/google/protobuf/internal/descriptor_pool_test.py
+++ b/python/google/protobuf/internal/descriptor_pool_test.py
@@ -71,6 +71,13 @@ class DescriptorPoolTest(unittest.TestCase):
self.pool.Add(self.factory_test1_fd)
self.pool.Add(self.factory_test2_fd)
+ self.pool.Add(descriptor_pb2.FileDescriptorProto.FromString(
+ unittest_import_public_pb2.DESCRIPTOR.serialized_pb))
+ self.pool.Add(descriptor_pb2.FileDescriptorProto.FromString(
+ unittest_import_pb2.DESCRIPTOR.serialized_pb))
+ self.pool.Add(descriptor_pb2.FileDescriptorProto.FromString(
+ unittest_pb2.DESCRIPTOR.serialized_pb))
+
def testFindFileByName(self):
name1 = 'google/protobuf/internal/factory_test1.proto'
file_desc1 = self.pool.FindFileByName(name1)
@@ -107,6 +114,20 @@ class DescriptorPoolTest(unittest.TestCase):
self.assertEqual('google.protobuf.python.internal', file_desc2.package)
self.assertIn('Factory2Message', file_desc2.message_types_by_name)
+ # Tests top level extension.
+ file_desc3 = self.pool.FindFileContainingSymbol(
+ 'google.protobuf.python.internal.another_field')
+ self.assertIsInstance(file_desc3, descriptor.FileDescriptor)
+ self.assertEqual('google/protobuf/internal/factory_test2.proto',
+ file_desc3.name)
+
+ # Tests nested extension inside a message.
+ file_desc4 = self.pool.FindFileContainingSymbol(
+ 'google.protobuf.python.internal.Factory2Message.one_more_field')
+ self.assertIsInstance(file_desc4, descriptor.FileDescriptor)
+ self.assertEqual('google/protobuf/internal/factory_test2.proto',
+ file_desc4.name)
+
def testFindFileContainingSymbolFailure(self):
with self.assertRaises(KeyError):
self.pool.FindFileContainingSymbol('Does not exist')
@@ -311,6 +332,10 @@ class DescriptorPoolTest(unittest.TestCase):
self.pool.FindExtensionByName(
'google.protobuf.python.internal.Factory1Message.list_value')
+ def testFindService(self):
+ service = self.pool.FindServiceByName('protobuf_unittest.TestService')
+ self.assertEqual(service.full_name, 'protobuf_unittest.TestService')
+
def testUserDefinedDB(self):
db = descriptor_database.DescriptorDatabase()
self.pool = descriptor_pool.DescriptorPool(db)
@@ -645,6 +670,17 @@ class AddDescriptorTest(unittest.TestCase):
@unittest.skipIf(api_implementation.Type() == 'cpp',
'With the cpp implementation, Add() must be called first')
+ def testService(self):
+ pool = descriptor_pool.DescriptorPool()
+ with self.assertRaises(KeyError):
+ pool.FindServiceByName('protobuf_unittest.TestService')
+ pool.AddServiceDescriptor(unittest_pb2._TESTSERVICE)
+ self.assertEqual(
+ 'protobuf_unittest.TestService',
+ pool.FindServiceByName('protobuf_unittest.TestService').full_name)
+
+ @unittest.skipIf(api_implementation.Type() == 'cpp',
+ 'With the cpp implementation, Add() must be called first')
def testFile(self):
pool = descriptor_pool.DescriptorPool()
pool.AddFileDescriptor(unittest_pb2.DESCRIPTOR)
diff --git a/python/google/protobuf/internal/encoder.py b/python/google/protobuf/internal/encoder.py
index 48ef2df3..80e59cab 100755
--- a/python/google/protobuf/internal/encoder.py
+++ b/python/google/protobuf/internal/encoder.py
@@ -340,7 +340,7 @@ def MessageSetItemSizer(field_number):
# Map is special: it needs custom logic to compute its size properly.
-def MapSizer(field_descriptor):
+def MapSizer(field_descriptor, is_message_map):
"""Returns a sizer for a map field."""
# Can't look at field_descriptor.message_type._concrete_class because it may
@@ -355,9 +355,12 @@ def MapSizer(field_descriptor):
# It's wasteful to create the messages and throw them away one second
# later since we'll do the same for the actual encode. But there's not an
# obvious way to avoid this within the current design without tons of code
- # duplication.
+ # duplication. For message map, value.ByteSize() should be called to
+ # update the status.
entry_msg = message_type._concrete_class(key=key, value=value)
total += message_sizer(entry_msg)
+ if is_message_map:
+ value.ByteSize()
return total
return FieldSize
diff --git a/python/google/protobuf/internal/message_test.py b/python/google/protobuf/internal/message_test.py
index 9986c0d9..e8b794f0 100755
--- a/python/google/protobuf/internal/message_test.py
+++ b/python/google/protobuf/internal/message_test.py
@@ -564,6 +564,11 @@ class MessageTest(BaseTestCase):
self.assertIsInstance(m.repeated_nested_message,
collections.MutableSequence)
+ def testRepeatedFieldInsideNestedMessage(self, message_module):
+ m = message_module.NestedTestAllTypes()
+ m.payload.repeated_int32.extend([])
+ self.assertTrue(m.HasField('payload'))
+
def ensureNestedMessageExists(self, msg, attribute):
"""Make sure that a nested message object exists.
@@ -1432,6 +1437,18 @@ class Proto3Test(BaseTestCase):
self.assertIn(-456, msg2.map_int32_foreign_message)
self.assertEqual(2, len(msg2.map_int32_foreign_message))
+ def testMapByteSize(self):
+ msg = map_unittest_pb2.TestMap()
+ msg.map_int32_int32[1] = 1
+ size = msg.ByteSize()
+ msg.map_int32_int32[1] = 128
+ self.assertEqual(msg.ByteSize(), size + 1)
+
+ msg.map_int32_foreign_message[19].c = 1
+ size = msg.ByteSize()
+ msg.map_int32_foreign_message[19].c = 128
+ self.assertEqual(msg.ByteSize(), size + 1)
+
def testMergeFrom(self):
msg = map_unittest_pb2.TestMap()
msg.map_int32_int32[12] = 34
@@ -1456,7 +1473,15 @@ class Proto3Test(BaseTestCase):
self.assertEqual(5, msg2.map_int32_foreign_message[111].c)
self.assertEqual(10, msg2.map_int32_foreign_message[222].c)
self.assertFalse(msg2.map_int32_foreign_message[222].HasField('d'))
- self.assertEqual(15, old_map_value.c)
+ if api_implementation.Type() != 'cpp':
+ # During the call to MergeFrom(), the C++ implementation will have
+ # deallocated the underlying message, but this is very difficult to detect
+ # properly. The line below is likely to cause a segmentation fault.
+ # With the Python implementation, old_map_value is just 'detached' from
+ # the main message. Using it will not crash of course, but since it still
+ # have a reference to the parent message I'm sure we can find interesting
+ # ways to cause inconsistencies.
+ self.assertEqual(15, old_map_value.c)
# Verify that there is only one entry per key, even though the MergeFrom
# may have internally created multiple entries for a single key in the
diff --git a/python/google/protobuf/internal/python_message.py b/python/google/protobuf/internal/python_message.py
index 4b701039..cb97cb28 100755
--- a/python/google/protobuf/internal/python_message.py
+++ b/python/google/protobuf/internal/python_message.py
@@ -288,7 +288,8 @@ def _AttachFieldHelpers(cls, field_descriptor):
if is_map_entry:
field_encoder = encoder.MapEncoder(field_descriptor)
- sizer = encoder.MapSizer(field_descriptor)
+ sizer = encoder.MapSizer(field_descriptor,
+ _IsMessageMapField(field_descriptor))
elif _IsMessageSetExtension(field_descriptor):
field_encoder = encoder.MessageSetItemEncoder(field_descriptor.number)
sizer = encoder.MessageSetItemSizer(field_descriptor.number)
@@ -891,7 +892,7 @@ def _AddHasExtensionMethod(cls):
def _InternalUnpackAny(msg):
"""Unpacks Any message and returns the unpacked message.
- This internal method is differnt from public Any Unpack method which takes
+ This internal method is different from public Any Unpack method which takes
the target message as argument. _InternalUnpackAny method does not have
target message type and need to find the message type in descriptor pool.
diff --git a/python/google/protobuf/internal/python_protobuf.cc b/python/google/protobuf/internal/python_protobuf.cc
new file mode 100644
index 00000000..f90cc438
--- /dev/null
+++ b/python/google/protobuf/internal/python_protobuf.cc
@@ -0,0 +1,63 @@
+// Protocol Buffers - Google's data interchange format
+// Copyright 2008 Google Inc. All rights reserved.
+// https://developers.google.com/protocol-buffers/
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Author: qrczak@google.com (Marcin Kowalczyk)
+
+#include <google/protobuf/python/python_protobuf.h>
+
+namespace google {
+namespace protobuf {
+namespace python {
+
+static const Message* GetCProtoInsidePyProtoStub(PyObject* msg) {
+ return NULL;
+}
+static Message* MutableCProtoInsidePyProtoStub(PyObject* msg) {
+ return NULL;
+}
+
+// This is initialized with a default, stub implementation.
+// If python-google.protobuf.cc is loaded, the function pointer is overridden
+// with a full implementation.
+const Message* (*GetCProtoInsidePyProtoPtr)(PyObject* msg) =
+ GetCProtoInsidePyProtoStub;
+Message* (*MutableCProtoInsidePyProtoPtr)(PyObject* msg) =
+ MutableCProtoInsidePyProtoStub;
+
+const Message* GetCProtoInsidePyProto(PyObject* msg) {
+ return GetCProtoInsidePyProtoPtr(msg);
+}
+Message* MutableCProtoInsidePyProto(PyObject* msg) {
+ return MutableCProtoInsidePyProtoPtr(msg);
+}
+
+} // namespace python
+} // namespace protobuf
+} // namespace google
diff --git a/python/google/protobuf/internal/reflection_test.py b/python/google/protobuf/internal/reflection_test.py
index 0e881015..55b0d72e 100755
--- a/python/google/protobuf/internal/reflection_test.py
+++ b/python/google/protobuf/internal/reflection_test.py
@@ -1551,7 +1551,14 @@ class ReflectionTest(BaseTestCase):
container = copy.deepcopy(proto1.repeated_int32)
self.assertEqual([2, 3], container)
- # TODO(anuraag): Implement deepcopy for repeated composite / extension dict
+ message1 = proto1.repeated_nested_message.add()
+ message1.bb = 1
+ messages = copy.deepcopy(proto1.repeated_nested_message)
+ self.assertEqual(proto1.repeated_nested_message, messages)
+ message1.bb = 2
+ self.assertNotEqual(proto1.repeated_nested_message, messages)
+
+ # TODO(anuraag): Implement deepcopy for extension dict
def testClear(self):
proto = unittest_pb2.TestAllTypes()
diff --git a/python/google/protobuf/internal/symbol_database_test.py b/python/google/protobuf/internal/symbol_database_test.py
index 4f5173b2..af42681a 100644
--- a/python/google/protobuf/internal/symbol_database_test.py
+++ b/python/google/protobuf/internal/symbol_database_test.py
@@ -60,6 +60,7 @@ class SymbolDatabaseTest(unittest.TestCase):
db.RegisterMessage(unittest_pb2.TestAllTypes.RepeatedGroup)
db.RegisterEnumDescriptor(unittest_pb2.ForeignEnum.DESCRIPTOR)
db.RegisterEnumDescriptor(unittest_pb2.TestAllTypes.NestedEnum.DESCRIPTOR)
+ db.RegisterServiceDescriptor(unittest_pb2._TESTSERVICE)
return db
def testGetPrototype(self):
@@ -109,7 +110,13 @@ class SymbolDatabaseTest(unittest.TestCase):
self._Database().pool.FindMessageTypeByName(
'protobuf_unittest.TestAllTypes.NestedMessage').full_name)
- def testFindFindContainingSymbol(self):
+ def testFindServiceByName(self):
+ self.assertEqual(
+ 'protobuf_unittest.TestService',
+ self._Database().pool.FindServiceByName(
+ 'protobuf_unittest.TestService').full_name)
+
+ def testFindFileContainingSymbol(self):
# Lookup based on either enum or message.
self.assertEqual(
'google/protobuf/unittest.proto',
diff --git a/python/google/protobuf/internal/text_format_test.py b/python/google/protobuf/internal/text_format_test.py
index 176cbd15..188310b2 100755
--- a/python/google/protobuf/internal/text_format_test.py
+++ b/python/google/protobuf/internal/text_format_test.py
@@ -1119,6 +1119,11 @@ class Proto3Tests(unittest.TestCase):
packed_message = unittest_pb2.OneString()
message.any_value.Unpack(packed_message)
self.assertEqual('string', packed_message.data)
+ message.Clear()
+ text_format.Parse(text, message, descriptor_pool=descriptor_pool.Default())
+ packed_message = unittest_pb2.OneString()
+ message.any_value.Unpack(packed_message)
+ self.assertEqual('string', packed_message.data)
def testMergeExpandedAnyRepeated(self):
message = any_test_pb2.TestAny()
@@ -1373,6 +1378,52 @@ class TokenizerTest(unittest.TestCase):
self.assertEqual('# some comment', tokenizer.ConsumeComment())
self.assertTrue(tokenizer.AtEnd())
+ def testConsumeLineComment(self):
+ tokenizer = text_format.Tokenizer('# some comment'.splitlines(),
+ skip_comments=False)
+ self.assertFalse(tokenizer.AtEnd())
+ self.assertEqual((False, '# some comment'),
+ tokenizer.ConsumeCommentOrTrailingComment())
+ self.assertTrue(tokenizer.AtEnd())
+
+ def testConsumeTwoLineComments(self):
+ text = '# some comment\n# another comment'
+ tokenizer = text_format.Tokenizer(text.splitlines(), skip_comments=False)
+ self.assertEqual((False, '# some comment'),
+ tokenizer.ConsumeCommentOrTrailingComment())
+ self.assertFalse(tokenizer.AtEnd())
+ self.assertEqual((False, '# another comment'),
+ tokenizer.ConsumeCommentOrTrailingComment())
+ self.assertTrue(tokenizer.AtEnd())
+
+ def testConsumeAndCheckTrailingComment(self):
+ text = 'some_number: 4 # some comment' # trailing comment on the same line
+ tokenizer = text_format.Tokenizer(text.splitlines(), skip_comments=False)
+ self.assertRaises(text_format.ParseError,
+ tokenizer.ConsumeCommentOrTrailingComment)
+
+ self.assertEqual('some_number', tokenizer.ConsumeIdentifier())
+ self.assertEqual(tokenizer.token, ':')
+ tokenizer.NextToken()
+ self.assertRaises(text_format.ParseError,
+ tokenizer.ConsumeCommentOrTrailingComment)
+ self.assertEqual(4, tokenizer.ConsumeInteger())
+ self.assertFalse(tokenizer.AtEnd())
+
+ self.assertEqual((True, '# some comment'),
+ tokenizer.ConsumeCommentOrTrailingComment())
+ self.assertTrue(tokenizer.AtEnd())
+
+ def testHashinComment(self):
+ text = 'some_number: 4 # some comment # not a new comment'
+ tokenizer = text_format.Tokenizer(text.splitlines(), skip_comments=False)
+ self.assertEqual('some_number', tokenizer.ConsumeIdentifier())
+ self.assertEqual(tokenizer.token, ':')
+ tokenizer.NextToken()
+ self.assertEqual(4, tokenizer.ConsumeInteger())
+ self.assertEqual((True, '# some comment # not a new comment'),
+ tokenizer.ConsumeCommentOrTrailingComment())
+ self.assertTrue(tokenizer.AtEnd())
if __name__ == '__main__':
unittest.main()