aboutsummaryrefslogtreecommitdiffhomepage
path: root/python/google/protobuf
diff options
context:
space:
mode:
Diffstat (limited to 'python/google/protobuf')
-rw-r--r--python/google/protobuf/descriptor_database.py11
-rw-r--r--python/google/protobuf/descriptor_pool.py99
-rwxr-xr-xpython/google/protobuf/internal/containers.py2
-rw-r--r--python/google/protobuf/internal/descriptor_pool_test.py36
-rwxr-xr-xpython/google/protobuf/internal/encoder.py7
-rwxr-xr-xpython/google/protobuf/internal/message_test.py27
-rwxr-xr-xpython/google/protobuf/internal/python_message.py5
-rw-r--r--python/google/protobuf/internal/python_protobuf.cc63
-rwxr-xr-xpython/google/protobuf/internal/reflection_test.py9
-rw-r--r--python/google/protobuf/internal/symbol_database_test.py9
-rwxr-xr-xpython/google/protobuf/internal/text_format_test.py51
-rw-r--r--python/google/protobuf/pyext/descriptor.cc19
-rw-r--r--python/google/protobuf/pyext/descriptor.h2
-rw-r--r--python/google/protobuf/pyext/descriptor_pool.cc20
-rw-r--r--python/google/protobuf/pyext/descriptor_pool.h1
-rw-r--r--python/google/protobuf/pyext/repeated_composite_container.cc29
-rw-r--r--python/google/protobuf/pyext/scoped_pyobject_ptr.h59
-rw-r--r--python/google/protobuf/python_protobuf.h (renamed from python/google/protobuf/pyext/python_protobuf.h)0
-rwxr-xr-xpython/google/protobuf/reflection.py6
-rw-r--r--python/google/protobuf/symbol_database.py11
-rwxr-xr-xpython/google/protobuf/text_format.py35
21 files changed, 440 insertions, 61 deletions
diff --git a/python/google/protobuf/descriptor_database.py b/python/google/protobuf/descriptor_database.py
index 1333f996..40bcdd72 100644
--- a/python/google/protobuf/descriptor_database.py
+++ b/python/google/protobuf/descriptor_database.py
@@ -54,9 +54,9 @@ class DescriptorDatabase(object):
Args:
file_desc_proto: The FileDescriptorProto to add.
Raises:
- DescriptorDatabaseException: if an attempt is made to add a proto
- with the same name but different definition than an exisiting
- proto in the database.
+ DescriptorDatabaseConflictingDefinitionError: if an attempt is made to
+ add a proto with the same name but different definition than an
+ exisiting proto in the database.
"""
proto_name = file_desc_proto.name
if proto_name not in self._file_desc_protos_by_file:
@@ -65,7 +65,7 @@ class DescriptorDatabase(object):
raise DescriptorDatabaseConflictingDefinitionError(
'%s already added, but with different descriptor.' % proto_name)
- # Add the top-level Message, Enum and Extension descriptors to the index.
+ # Add all the top-level descriptors to the index.
package = file_desc_proto.package
for message in file_desc_proto.message_type:
self._file_desc_protos_by_symbol.update(
@@ -76,6 +76,9 @@ class DescriptorDatabase(object):
for extension in file_desc_proto.extension:
self._file_desc_protos_by_symbol[
'.'.join((package, extension.name))] = file_desc_proto
+ for service in file_desc_proto.service:
+ self._file_desc_protos_by_symbol[
+ '.'.join((package, service.name))] = file_desc_proto
def FindFileByName(self, name):
"""Finds the file descriptor proto by file name.
diff --git a/python/google/protobuf/descriptor_pool.py b/python/google/protobuf/descriptor_pool.py
index fc3a7f44..7844575f 100644
--- a/python/google/protobuf/descriptor_pool.py
+++ b/python/google/protobuf/descriptor_pool.py
@@ -124,6 +124,7 @@ class DescriptorPool(object):
self._descriptor_db = descriptor_db
self._descriptors = {}
self._enum_descriptors = {}
+ self._service_descriptors = {}
self._file_descriptors = {}
self._toplevel_extensions = {}
# We store extensions in two two-level mappings: The first key is the
@@ -174,7 +175,7 @@ class DescriptorPool(object):
def AddEnumDescriptor(self, enum_desc):
"""Adds an EnumDescriptor to the pool.
- This method also registers the FileDescriptor associated with the message.
+ This method also registers the FileDescriptor associated with the enum.
Args:
enum_desc: An EnumDescriptor.
@@ -186,6 +187,18 @@ class DescriptorPool(object):
self._enum_descriptors[enum_desc.full_name] = enum_desc
self.AddFileDescriptor(enum_desc.file)
+ def AddServiceDescriptor(self, service_desc):
+ """Adds a ServiceDescriptor to the pool.
+
+ Args:
+ service_desc: A ServiceDescriptor.
+ """
+
+ if not isinstance(service_desc, descriptor.ServiceDescriptor):
+ raise TypeError('Expected instance of descriptor.ServiceDescriptor.')
+
+ self._service_descriptors[service_desc.full_name] = service_desc
+
def AddExtensionDescriptor(self, extension):
"""Adds a FieldDescriptor describing an extension to the pool.
@@ -252,7 +265,7 @@ class DescriptorPool(object):
A FileDescriptor for the named file.
Raises:
- KeyError: if the file can not be found in the pool.
+ KeyError: if the file cannot be found in the pool.
"""
try:
@@ -281,7 +294,7 @@ class DescriptorPool(object):
A FileDescriptor that contains the specified symbol.
Raises:
- KeyError: if the file can not be found in the pool.
+ KeyError: if the file cannot be found in the pool.
"""
symbol = _NormalizeFullyQualifiedName(symbol)
@@ -296,15 +309,18 @@ class DescriptorPool(object):
pass
try:
- file_proto = self._internal_db.FindFileContainingSymbol(symbol)
- except KeyError as error:
- if self._descriptor_db:
- file_proto = self._descriptor_db.FindFileContainingSymbol(symbol)
- else:
- raise error
- if not file_proto:
+ return self._FindFileContainingSymbolInDb(symbol)
+ except KeyError:
+ pass
+
+ # Try nested extensions inside a message.
+ message_name, _, extension_name = symbol.rpartition('.')
+ try:
+ scope = self.FindMessageTypeByName(message_name)
+ assert scope.extensions_by_name[extension_name]
+ return scope.file
+ except KeyError:
raise KeyError('Cannot find a file containing %s' % symbol)
- return self._ConvertFileProtoToFileDescriptor(file_proto)
def FindMessageTypeByName(self, full_name):
"""Loads the named descriptor from the pool.
@@ -314,11 +330,14 @@ class DescriptorPool(object):
Returns:
The descriptor for the named type.
+
+ Raises:
+ KeyError: if the message cannot be found in the pool.
"""
full_name = _NormalizeFullyQualifiedName(full_name)
if full_name not in self._descriptors:
- self.FindFileContainingSymbol(full_name)
+ self._FindFileContainingSymbolInDb(full_name)
return self._descriptors[full_name]
def FindEnumTypeByName(self, full_name):
@@ -329,11 +348,14 @@ class DescriptorPool(object):
Returns:
The enum descriptor for the named type.
+
+ Raises:
+ KeyError: if the enum cannot be found in the pool.
"""
full_name = _NormalizeFullyQualifiedName(full_name)
if full_name not in self._enum_descriptors:
- self.FindFileContainingSymbol(full_name)
+ self._FindFileContainingSymbolInDb(full_name)
return self._enum_descriptors[full_name]
def FindFieldByName(self, full_name):
@@ -344,6 +366,9 @@ class DescriptorPool(object):
Returns:
The field descriptor for the named field.
+
+ Raises:
+ KeyError: if the field cannot be found in the pool.
"""
full_name = _NormalizeFullyQualifiedName(full_name)
message_name, _, field_name = full_name.rpartition('.')
@@ -358,6 +383,9 @@ class DescriptorPool(object):
Returns:
A FieldDescriptor, describing the named extension.
+
+ Raises:
+ KeyError: if the extension cannot be found in the pool.
"""
full_name = _NormalizeFullyQualifiedName(full_name)
try:
@@ -374,7 +402,7 @@ class DescriptorPool(object):
scope = self.FindMessageTypeByName(message_name)
except KeyError:
# Some extensions are defined at file scope.
- scope = self.FindFileContainingSymbol(full_name)
+ scope = self._FindFileContainingSymbolInDb(full_name)
return scope.extensions_by_name[extension_name]
def FindExtensionByNumber(self, message_descriptor, number):
@@ -390,7 +418,7 @@ class DescriptorPool(object):
Returns:
A FieldDescriptor describing the extension.
- Raise:
+ Raises:
KeyError: when no extension with the given number is known for the
specified message.
"""
@@ -410,6 +438,46 @@ class DescriptorPool(object):
"""
return list(self._extensions_by_number[message_descriptor].values())
+ def FindServiceByName(self, full_name):
+ """Loads the named service descriptor from the pool.
+
+ Args:
+ full_name: The full name of the service descriptor to load.
+
+ Returns:
+ The service descriptor for the named service.
+
+ Raises:
+ KeyError: if the service cannot be found in the pool.
+ """
+ full_name = _NormalizeFullyQualifiedName(full_name)
+ if full_name not in self._service_descriptors:
+ self._FindFileContainingSymbolInDb(full_name)
+ return self._service_descriptors[full_name]
+
+ def _FindFileContainingSymbolInDb(self, symbol):
+ """Finds the file in descriptor DB containing the specified symbol.
+
+ Args:
+ symbol: The name of the symbol to search for.
+
+ Returns:
+ A FileDescriptor that contains the specified symbol.
+
+ Raises:
+ KeyError: if the file cannot be found in the descriptor database.
+ """
+ try:
+ file_proto = self._internal_db.FindFileContainingSymbol(symbol)
+ except KeyError as error:
+ if self._descriptor_db:
+ file_proto = self._descriptor_db.FindFileContainingSymbol(symbol)
+ else:
+ raise error
+ if not file_proto:
+ raise KeyError('Cannot find a file containing %s' % symbol)
+ return self._ConvertFileProtoToFileDescriptor(file_proto)
+
def _ConvertFileProtoToFileDescriptor(self, file_proto):
"""Creates a FileDescriptor from a proto or returns a cached copy.
@@ -804,6 +872,7 @@ class DescriptorPool(object):
methods=methods,
options=_OptionsOrNone(service_proto),
file=file_desc)
+ self._service_descriptors[service_name] = desc
return desc
def _MakeMethodDescriptor(self, method_proto, service_name, package, scope,
diff --git a/python/google/protobuf/internal/containers.py b/python/google/protobuf/internal/containers.py
index de13018e..68be9e54 100755
--- a/python/google/protobuf/internal/containers.py
+++ b/python/google/protobuf/internal/containers.py
@@ -275,7 +275,7 @@ class RepeatedScalarFieldContainer(BaseContainer):
new_values = [self._type_checker.CheckValue(elem) for elem in elem_seq_iter]
if new_values:
self._values.extend(new_values)
- self._message_listener.Modified()
+ self._message_listener.Modified()
def MergeFrom(self, other):
"""Appends the contents of another repeated field of the same type to this
diff --git a/python/google/protobuf/internal/descriptor_pool_test.py b/python/google/protobuf/internal/descriptor_pool_test.py
index 1e710dcf..2ba1d285 100644
--- a/python/google/protobuf/internal/descriptor_pool_test.py
+++ b/python/google/protobuf/internal/descriptor_pool_test.py
@@ -71,6 +71,13 @@ class DescriptorPoolTest(unittest.TestCase):
self.pool.Add(self.factory_test1_fd)
self.pool.Add(self.factory_test2_fd)
+ self.pool.Add(descriptor_pb2.FileDescriptorProto.FromString(
+ unittest_import_public_pb2.DESCRIPTOR.serialized_pb))
+ self.pool.Add(descriptor_pb2.FileDescriptorProto.FromString(
+ unittest_import_pb2.DESCRIPTOR.serialized_pb))
+ self.pool.Add(descriptor_pb2.FileDescriptorProto.FromString(
+ unittest_pb2.DESCRIPTOR.serialized_pb))
+
def testFindFileByName(self):
name1 = 'google/protobuf/internal/factory_test1.proto'
file_desc1 = self.pool.FindFileByName(name1)
@@ -107,6 +114,20 @@ class DescriptorPoolTest(unittest.TestCase):
self.assertEqual('google.protobuf.python.internal', file_desc2.package)
self.assertIn('Factory2Message', file_desc2.message_types_by_name)
+ # Tests top level extension.
+ file_desc3 = self.pool.FindFileContainingSymbol(
+ 'google.protobuf.python.internal.another_field')
+ self.assertIsInstance(file_desc3, descriptor.FileDescriptor)
+ self.assertEqual('google/protobuf/internal/factory_test2.proto',
+ file_desc3.name)
+
+ # Tests nested extension inside a message.
+ file_desc4 = self.pool.FindFileContainingSymbol(
+ 'google.protobuf.python.internal.Factory2Message.one_more_field')
+ self.assertIsInstance(file_desc4, descriptor.FileDescriptor)
+ self.assertEqual('google/protobuf/internal/factory_test2.proto',
+ file_desc4.name)
+
def testFindFileContainingSymbolFailure(self):
with self.assertRaises(KeyError):
self.pool.FindFileContainingSymbol('Does not exist')
@@ -311,6 +332,10 @@ class DescriptorPoolTest(unittest.TestCase):
self.pool.FindExtensionByName(
'google.protobuf.python.internal.Factory1Message.list_value')
+ def testFindService(self):
+ service = self.pool.FindServiceByName('protobuf_unittest.TestService')
+ self.assertEqual(service.full_name, 'protobuf_unittest.TestService')
+
def testUserDefinedDB(self):
db = descriptor_database.DescriptorDatabase()
self.pool = descriptor_pool.DescriptorPool(db)
@@ -645,6 +670,17 @@ class AddDescriptorTest(unittest.TestCase):
@unittest.skipIf(api_implementation.Type() == 'cpp',
'With the cpp implementation, Add() must be called first')
+ def testService(self):
+ pool = descriptor_pool.DescriptorPool()
+ with self.assertRaises(KeyError):
+ pool.FindServiceByName('protobuf_unittest.TestService')
+ pool.AddServiceDescriptor(unittest_pb2._TESTSERVICE)
+ self.assertEqual(
+ 'protobuf_unittest.TestService',
+ pool.FindServiceByName('protobuf_unittest.TestService').full_name)
+
+ @unittest.skipIf(api_implementation.Type() == 'cpp',
+ 'With the cpp implementation, Add() must be called first')
def testFile(self):
pool = descriptor_pool.DescriptorPool()
pool.AddFileDescriptor(unittest_pb2.DESCRIPTOR)
diff --git a/python/google/protobuf/internal/encoder.py b/python/google/protobuf/internal/encoder.py
index 48ef2df3..80e59cab 100755
--- a/python/google/protobuf/internal/encoder.py
+++ b/python/google/protobuf/internal/encoder.py
@@ -340,7 +340,7 @@ def MessageSetItemSizer(field_number):
# Map is special: it needs custom logic to compute its size properly.
-def MapSizer(field_descriptor):
+def MapSizer(field_descriptor, is_message_map):
"""Returns a sizer for a map field."""
# Can't look at field_descriptor.message_type._concrete_class because it may
@@ -355,9 +355,12 @@ def MapSizer(field_descriptor):
# It's wasteful to create the messages and throw them away one second
# later since we'll do the same for the actual encode. But there's not an
# obvious way to avoid this within the current design without tons of code
- # duplication.
+ # duplication. For message map, value.ByteSize() should be called to
+ # update the status.
entry_msg = message_type._concrete_class(key=key, value=value)
total += message_sizer(entry_msg)
+ if is_message_map:
+ value.ByteSize()
return total
return FieldSize
diff --git a/python/google/protobuf/internal/message_test.py b/python/google/protobuf/internal/message_test.py
index 9986c0d9..e8b794f0 100755
--- a/python/google/protobuf/internal/message_test.py
+++ b/python/google/protobuf/internal/message_test.py
@@ -564,6 +564,11 @@ class MessageTest(BaseTestCase):
self.assertIsInstance(m.repeated_nested_message,
collections.MutableSequence)
+ def testRepeatedFieldInsideNestedMessage(self, message_module):
+ m = message_module.NestedTestAllTypes()
+ m.payload.repeated_int32.extend([])
+ self.assertTrue(m.HasField('payload'))
+
def ensureNestedMessageExists(self, msg, attribute):
"""Make sure that a nested message object exists.
@@ -1432,6 +1437,18 @@ class Proto3Test(BaseTestCase):
self.assertIn(-456, msg2.map_int32_foreign_message)
self.assertEqual(2, len(msg2.map_int32_foreign_message))
+ def testMapByteSize(self):
+ msg = map_unittest_pb2.TestMap()
+ msg.map_int32_int32[1] = 1
+ size = msg.ByteSize()
+ msg.map_int32_int32[1] = 128
+ self.assertEqual(msg.ByteSize(), size + 1)
+
+ msg.map_int32_foreign_message[19].c = 1
+ size = msg.ByteSize()
+ msg.map_int32_foreign_message[19].c = 128
+ self.assertEqual(msg.ByteSize(), size + 1)
+
def testMergeFrom(self):
msg = map_unittest_pb2.TestMap()
msg.map_int32_int32[12] = 34
@@ -1456,7 +1473,15 @@ class Proto3Test(BaseTestCase):
self.assertEqual(5, msg2.map_int32_foreign_message[111].c)
self.assertEqual(10, msg2.map_int32_foreign_message[222].c)
self.assertFalse(msg2.map_int32_foreign_message[222].HasField('d'))
- self.assertEqual(15, old_map_value.c)
+ if api_implementation.Type() != 'cpp':
+ # During the call to MergeFrom(), the C++ implementation will have
+ # deallocated the underlying message, but this is very difficult to detect
+ # properly. The line below is likely to cause a segmentation fault.
+ # With the Python implementation, old_map_value is just 'detached' from
+ # the main message. Using it will not crash of course, but since it still
+ # have a reference to the parent message I'm sure we can find interesting
+ # ways to cause inconsistencies.
+ self.assertEqual(15, old_map_value.c)
# Verify that there is only one entry per key, even though the MergeFrom
# may have internally created multiple entries for a single key in the
diff --git a/python/google/protobuf/internal/python_message.py b/python/google/protobuf/internal/python_message.py
index 4b701039..cb97cb28 100755
--- a/python/google/protobuf/internal/python_message.py
+++ b/python/google/protobuf/internal/python_message.py
@@ -288,7 +288,8 @@ def _AttachFieldHelpers(cls, field_descriptor):
if is_map_entry:
field_encoder = encoder.MapEncoder(field_descriptor)
- sizer = encoder.MapSizer(field_descriptor)
+ sizer = encoder.MapSizer(field_descriptor,
+ _IsMessageMapField(field_descriptor))
elif _IsMessageSetExtension(field_descriptor):
field_encoder = encoder.MessageSetItemEncoder(field_descriptor.number)
sizer = encoder.MessageSetItemSizer(field_descriptor.number)
@@ -891,7 +892,7 @@ def _AddHasExtensionMethod(cls):
def _InternalUnpackAny(msg):
"""Unpacks Any message and returns the unpacked message.
- This internal method is differnt from public Any Unpack method which takes
+ This internal method is different from public Any Unpack method which takes
the target message as argument. _InternalUnpackAny method does not have
target message type and need to find the message type in descriptor pool.
diff --git a/python/google/protobuf/internal/python_protobuf.cc b/python/google/protobuf/internal/python_protobuf.cc
new file mode 100644
index 00000000..f90cc438
--- /dev/null
+++ b/python/google/protobuf/internal/python_protobuf.cc
@@ -0,0 +1,63 @@
+// Protocol Buffers - Google's data interchange format
+// Copyright 2008 Google Inc. All rights reserved.
+// https://developers.google.com/protocol-buffers/
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Author: qrczak@google.com (Marcin Kowalczyk)
+
+#include <google/protobuf/python/python_protobuf.h>
+
+namespace google {
+namespace protobuf {
+namespace python {
+
+static const Message* GetCProtoInsidePyProtoStub(PyObject* msg) {
+ return NULL;
+}
+static Message* MutableCProtoInsidePyProtoStub(PyObject* msg) {
+ return NULL;
+}
+
+// This is initialized with a default, stub implementation.
+// If python-google.protobuf.cc is loaded, the function pointer is overridden
+// with a full implementation.
+const Message* (*GetCProtoInsidePyProtoPtr)(PyObject* msg) =
+ GetCProtoInsidePyProtoStub;
+Message* (*MutableCProtoInsidePyProtoPtr)(PyObject* msg) =
+ MutableCProtoInsidePyProtoStub;
+
+const Message* GetCProtoInsidePyProto(PyObject* msg) {
+ return GetCProtoInsidePyProtoPtr(msg);
+}
+Message* MutableCProtoInsidePyProto(PyObject* msg) {
+ return MutableCProtoInsidePyProtoPtr(msg);
+}
+
+} // namespace python
+} // namespace protobuf
+} // namespace google
diff --git a/python/google/protobuf/internal/reflection_test.py b/python/google/protobuf/internal/reflection_test.py
index 0e881015..55b0d72e 100755
--- a/python/google/protobuf/internal/reflection_test.py
+++ b/python/google/protobuf/internal/reflection_test.py
@@ -1551,7 +1551,14 @@ class ReflectionTest(BaseTestCase):
container = copy.deepcopy(proto1.repeated_int32)
self.assertEqual([2, 3], container)
- # TODO(anuraag): Implement deepcopy for repeated composite / extension dict
+ message1 = proto1.repeated_nested_message.add()
+ message1.bb = 1
+ messages = copy.deepcopy(proto1.repeated_nested_message)
+ self.assertEqual(proto1.repeated_nested_message, messages)
+ message1.bb = 2
+ self.assertNotEqual(proto1.repeated_nested_message, messages)
+
+ # TODO(anuraag): Implement deepcopy for extension dict
def testClear(self):
proto = unittest_pb2.TestAllTypes()
diff --git a/python/google/protobuf/internal/symbol_database_test.py b/python/google/protobuf/internal/symbol_database_test.py
index 4f5173b2..af42681a 100644
--- a/python/google/protobuf/internal/symbol_database_test.py
+++ b/python/google/protobuf/internal/symbol_database_test.py
@@ -60,6 +60,7 @@ class SymbolDatabaseTest(unittest.TestCase):
db.RegisterMessage(unittest_pb2.TestAllTypes.RepeatedGroup)
db.RegisterEnumDescriptor(unittest_pb2.ForeignEnum.DESCRIPTOR)
db.RegisterEnumDescriptor(unittest_pb2.TestAllTypes.NestedEnum.DESCRIPTOR)
+ db.RegisterServiceDescriptor(unittest_pb2._TESTSERVICE)
return db
def testGetPrototype(self):
@@ -109,7 +110,13 @@ class SymbolDatabaseTest(unittest.TestCase):
self._Database().pool.FindMessageTypeByName(
'protobuf_unittest.TestAllTypes.NestedMessage').full_name)
- def testFindFindContainingSymbol(self):
+ def testFindServiceByName(self):
+ self.assertEqual(
+ 'protobuf_unittest.TestService',
+ self._Database().pool.FindServiceByName(
+ 'protobuf_unittest.TestService').full_name)
+
+ def testFindFileContainingSymbol(self):
# Lookup based on either enum or message.
self.assertEqual(
'google/protobuf/unittest.proto',
diff --git a/python/google/protobuf/internal/text_format_test.py b/python/google/protobuf/internal/text_format_test.py
index 176cbd15..188310b2 100755
--- a/python/google/protobuf/internal/text_format_test.py
+++ b/python/google/protobuf/internal/text_format_test.py
@@ -1119,6 +1119,11 @@ class Proto3Tests(unittest.TestCase):
packed_message = unittest_pb2.OneString()
message.any_value.Unpack(packed_message)
self.assertEqual('string', packed_message.data)
+ message.Clear()
+ text_format.Parse(text, message, descriptor_pool=descriptor_pool.Default())
+ packed_message = unittest_pb2.OneString()
+ message.any_value.Unpack(packed_message)
+ self.assertEqual('string', packed_message.data)
def testMergeExpandedAnyRepeated(self):
message = any_test_pb2.TestAny()
@@ -1373,6 +1378,52 @@ class TokenizerTest(unittest.TestCase):
self.assertEqual('# some comment', tokenizer.ConsumeComment())
self.assertTrue(tokenizer.AtEnd())
+ def testConsumeLineComment(self):
+ tokenizer = text_format.Tokenizer('# some comment'.splitlines(),
+ skip_comments=False)
+ self.assertFalse(tokenizer.AtEnd())
+ self.assertEqual((False, '# some comment'),
+ tokenizer.ConsumeCommentOrTrailingComment())
+ self.assertTrue(tokenizer.AtEnd())
+
+ def testConsumeTwoLineComments(self):
+ text = '# some comment\n# another comment'
+ tokenizer = text_format.Tokenizer(text.splitlines(), skip_comments=False)
+ self.assertEqual((False, '# some comment'),
+ tokenizer.ConsumeCommentOrTrailingComment())
+ self.assertFalse(tokenizer.AtEnd())
+ self.assertEqual((False, '# another comment'),
+ tokenizer.ConsumeCommentOrTrailingComment())
+ self.assertTrue(tokenizer.AtEnd())
+
+ def testConsumeAndCheckTrailingComment(self):
+ text = 'some_number: 4 # some comment' # trailing comment on the same line
+ tokenizer = text_format.Tokenizer(text.splitlines(), skip_comments=False)
+ self.assertRaises(text_format.ParseError,
+ tokenizer.ConsumeCommentOrTrailingComment)
+
+ self.assertEqual('some_number', tokenizer.ConsumeIdentifier())
+ self.assertEqual(tokenizer.token, ':')
+ tokenizer.NextToken()
+ self.assertRaises(text_format.ParseError,
+ tokenizer.ConsumeCommentOrTrailingComment)
+ self.assertEqual(4, tokenizer.ConsumeInteger())
+ self.assertFalse(tokenizer.AtEnd())
+
+ self.assertEqual((True, '# some comment'),
+ tokenizer.ConsumeCommentOrTrailingComment())
+ self.assertTrue(tokenizer.AtEnd())
+
+ def testHashinComment(self):
+ text = 'some_number: 4 # some comment # not a new comment'
+ tokenizer = text_format.Tokenizer(text.splitlines(), skip_comments=False)
+ self.assertEqual('some_number', tokenizer.ConsumeIdentifier())
+ self.assertEqual(tokenizer.token, ':')
+ tokenizer.NextToken()
+ self.assertEqual(4, tokenizer.ConsumeInteger())
+ self.assertEqual((True, '# some comment # not a new comment'),
+ tokenizer.ConsumeCommentOrTrailingComment())
+ self.assertTrue(tokenizer.AtEnd())
if __name__ == '__main__':
unittest.main()
diff --git a/python/google/protobuf/pyext/descriptor.cc b/python/google/protobuf/pyext/descriptor.cc
index 924ae0b9..f13e1bc1 100644
--- a/python/google/protobuf/pyext/descriptor.cc
+++ b/python/google/protobuf/pyext/descriptor.cc
@@ -32,6 +32,7 @@
#include <Python.h>
#include <frameobject.h>
+#include <google/protobuf/stubs/hash.h>
#include <string>
#include <google/protobuf/io/coded_stream.h>
@@ -1666,6 +1667,15 @@ PyObject* PyServiceDescriptor_FromDescriptor(
&PyServiceDescriptor_Type, service_descriptor, NULL);
}
+const ServiceDescriptor* PyServiceDescriptor_AsDescriptor(PyObject* obj) {
+ if (!PyObject_TypeCheck(obj, &PyServiceDescriptor_Type)) {
+ PyErr_SetString(PyExc_TypeError, "Not a ServiceDescriptor");
+ return NULL;
+ }
+ return reinterpret_cast<const ServiceDescriptor*>(
+ reinterpret_cast<PyBaseDescriptor*>(obj)->descriptor);
+}
+
namespace method_descriptor {
// Unchecked accessor to the C++ pointer.
@@ -1769,6 +1779,15 @@ PyObject* PyMethodDescriptor_FromDescriptor(
&PyMethodDescriptor_Type, method_descriptor, NULL);
}
+const MethodDescriptor* PyMethodDescriptor_AsDescriptor(PyObject* obj) {
+ if (!PyObject_TypeCheck(obj, &PyMethodDescriptor_Type)) {
+ PyErr_SetString(PyExc_TypeError, "Not a MethodDescriptor");
+ return NULL;
+ }
+ return reinterpret_cast<const MethodDescriptor*>(
+ reinterpret_cast<PyBaseDescriptor*>(obj)->descriptor);
+}
+
// Add a enum values to a type dictionary.
static bool AddEnumValues(PyTypeObject *type,
const EnumDescriptor* enum_descriptor) {
diff --git a/python/google/protobuf/pyext/descriptor.h b/python/google/protobuf/pyext/descriptor.h
index 1ae0e672..f081df84 100644
--- a/python/google/protobuf/pyext/descriptor.h
+++ b/python/google/protobuf/pyext/descriptor.h
@@ -80,6 +80,8 @@ const Descriptor* PyMessageDescriptor_AsDescriptor(PyObject* obj);
const FieldDescriptor* PyFieldDescriptor_AsDescriptor(PyObject* obj);
const EnumDescriptor* PyEnumDescriptor_AsDescriptor(PyObject* obj);
const FileDescriptor* PyFileDescriptor_AsDescriptor(PyObject* obj);
+const ServiceDescriptor* PyServiceDescriptor_AsDescriptor(PyObject* obj);
+const MethodDescriptor* PyMethodDescriptor_AsDescriptor(PyObject* obj);
// Returns the raw C++ pointer.
const void* PyDescriptor_AsVoidPtr(PyObject* obj);
diff --git a/python/google/protobuf/pyext/descriptor_pool.cc b/python/google/protobuf/pyext/descriptor_pool.cc
index fa66bf9a..16f4d49d 100644
--- a/python/google/protobuf/pyext/descriptor_pool.cc
+++ b/python/google/protobuf/pyext/descriptor_pool.cc
@@ -39,6 +39,7 @@
#include <google/protobuf/pyext/message.h>
#include <google/protobuf/pyext/message_factory.h>
#include <google/protobuf/pyext/scoped_pyobject_ptr.h>
+#include <google/protobuf/stubs/hash.h>
#if PY_MAJOR_VERSION >= 3
#define PyString_FromStringAndSize PyUnicode_FromStringAndSize
@@ -437,8 +438,23 @@ PyObject* AddExtensionDescriptor(PyDescriptorPool* self, PyObject* descriptor) {
Py_RETURN_NONE;
}
-// The code below loads new Descriptors from a serialized FileDescriptorProto.
+PyObject* AddServiceDescriptor(PyDescriptorPool* self, PyObject* descriptor) {
+ const ServiceDescriptor* service_descriptor =
+ PyServiceDescriptor_AsDescriptor(descriptor);
+ if (!service_descriptor) {
+ return NULL;
+ }
+ if (service_descriptor !=
+ self->pool->FindServiceByName(service_descriptor->full_name())) {
+ PyErr_Format(PyExc_ValueError,
+ "The service descriptor %s does not belong to this pool",
+ service_descriptor->full_name().c_str());
+ return NULL;
+ }
+ Py_RETURN_NONE;
+}
+// The code below loads new Descriptors from a serialized FileDescriptorProto.
// Collects errors that occur during proto file building to allow them to be
// propagated in the python exception instead of only living in ERROR logs.
@@ -538,6 +554,8 @@ static PyMethodDef Methods[] = {
"No-op. Add() must have been called before." },
{ "AddExtensionDescriptor", (PyCFunction)AddExtensionDescriptor, METH_O,
"No-op. Add() must have been called before." },
+ { "AddServiceDescriptor", (PyCFunction)AddServiceDescriptor, METH_O,
+ "No-op. Add() must have been called before." },
{ "FindFileByName", (PyCFunction)FindFileByName, METH_O,
"Searches for a file descriptor by its .proto name." },
diff --git a/python/google/protobuf/pyext/descriptor_pool.h b/python/google/protobuf/pyext/descriptor_pool.h
index c4d7d403..53ee53dc 100644
--- a/python/google/protobuf/pyext/descriptor_pool.h
+++ b/python/google/protobuf/pyext/descriptor_pool.h
@@ -85,6 +85,7 @@ extern PyTypeObject PyDescriptorPool_Type;
namespace cdescriptor_pool {
+
// Looks up a message by name.
// Returns a message Descriptor, or NULL if not found.
const Descriptor* FindMessageTypeByName(PyDescriptorPool* self,
diff --git a/python/google/protobuf/pyext/repeated_composite_container.cc b/python/google/protobuf/pyext/repeated_composite_container.cc
index 43a2bc12..9cb4e9a1 100644
--- a/python/google/protobuf/pyext/repeated_composite_container.cc
+++ b/python/google/protobuf/pyext/repeated_composite_container.cc
@@ -47,6 +47,7 @@
#include <google/protobuf/pyext/descriptor_pool.h>
#include <google/protobuf/pyext/message.h>
#include <google/protobuf/pyext/scoped_pyobject_ptr.h>
+#include <google/protobuf/reflection.h>
#if PY_MAJOR_VERSION >= 3
#define PyInt_Check PyLong_Check
@@ -485,6 +486,32 @@ int Release(RepeatedCompositeContainer* self) {
return 0;
}
+PyObject* DeepCopy(RepeatedCompositeContainer* self, PyObject* arg) {
+ ScopedPyObjectPtr cloneObj(
+ PyType_GenericAlloc(&RepeatedCompositeContainer_Type, 0));
+ if (cloneObj == NULL) {
+ return NULL;
+ }
+ RepeatedCompositeContainer* clone =
+ reinterpret_cast<RepeatedCompositeContainer*>(cloneObj.get());
+
+ Message* new_message = self->message->New();
+ clone->parent = NULL;
+ clone->parent_field_descriptor = self->parent_field_descriptor;
+ clone->message = new_message;
+ clone->owner.reset(new_message);
+ Py_INCREF(self->child_message_class);
+ clone->child_message_class = self->child_message_class;
+ clone->child_messages = PyList_New(0);
+
+ new_message->GetReflection()
+ ->GetMutableRepeatedFieldRef<Message>(new_message,
+ self->parent_field_descriptor)
+ .MergeFrom(self->message->GetReflection()->GetRepeatedFieldRef<Message>(
+ *self->message, self->parent_field_descriptor));
+ return cloneObj.release();
+}
+
int SetOwner(RepeatedCompositeContainer* self,
const shared_ptr<Message>& new_owner) {
GOOGLE_CHECK_ATTACHED(self);
@@ -551,6 +578,8 @@ static PyMappingMethods MpMethods = {
};
static PyMethodDef Methods[] = {
+ { "__deepcopy__", (PyCFunction)DeepCopy, METH_VARARGS,
+ "Makes a deep copy of the class." },
{ "add", (PyCFunction) Add, METH_VARARGS | METH_KEYWORDS,
"Adds an object to the repeated container." },
{ "extend", (PyCFunction) Extend, METH_O,
diff --git a/python/google/protobuf/pyext/scoped_pyobject_ptr.h b/python/google/protobuf/pyext/scoped_pyobject_ptr.h
index a128cd4c..a2afa7f1 100644
--- a/python/google/protobuf/pyext/scoped_pyobject_ptr.h
+++ b/python/google/protobuf/pyext/scoped_pyobject_ptr.h
@@ -36,61 +36,70 @@
#include <google/protobuf/stubs/common.h>
#include <Python.h>
-
namespace google {
-class ScopedPyObjectPtr {
+namespace protobuf {
+namespace python {
+
+// Owns a python object and decrements the reference count on destruction.
+// This class is not threadsafe.
+template <typename PyObjectStruct>
+class ScopedPythonPtr {
public:
- // Constructor. Defaults to initializing with NULL.
- // There is no way to create an uninitialized ScopedPyObjectPtr.
- explicit ScopedPyObjectPtr(PyObject* p = NULL) : ptr_(p) { }
+ // Takes the ownership of the specified object to ScopedPythonPtr.
+ // The reference count of the specified py_object is not incremented.
+ explicit ScopedPythonPtr(PyObjectStruct* py_object = NULL)
+ : ptr_(py_object) {}
- // Destructor. If there is a PyObject object, delete it.
- ~ScopedPyObjectPtr() {
- Py_XDECREF(ptr_);
- }
+ // If a PyObject is owned, decrement its reference count.
+ ~ScopedPythonPtr() { Py_XDECREF(ptr_); }
- // Reset. Deletes the current owned object, if any.
- // Then takes ownership of a new object, if given.
+ // Deletes the current owned object, if any.
+ // Then takes ownership of a new object without incrementing the reference
+ // count.
// This function must be called with a reference that you own.
// this->reset(this->get()) is wrong!
// this->reset(this->release()) is OK.
- PyObject* reset(PyObject* p = NULL) {
+ PyObjectStruct* reset(PyObjectStruct* p = NULL) {
Py_XDECREF(ptr_);
ptr_ = p;
return ptr_;
}
- // Releases ownership of the object.
+ // Releases ownership of the object without decrementing the reference count.
// The caller now owns the returned reference.
- PyObject* release() {
+ PyObjectStruct* release() {
PyObject* p = ptr_;
ptr_ = NULL;
return p;
}
- PyObject* operator->() const {
+ PyObjectStruct* operator->() const {
assert(ptr_ != NULL);
return ptr_;
}
- PyObject* get() const { return ptr_; }
+ PyObjectStruct* get() const { return ptr_; }
- Py_ssize_t refcnt() const { return Py_REFCNT(ptr_); }
+ PyObject* as_pyobject() const { return reinterpret_cast<PyObject*>(ptr_); }
+ // Increments the reference count fo the current object.
+ // Should not be called when no object is held.
void inc() const { Py_INCREF(ptr_); }
- // Comparison operators.
- // These return whether a ScopedPyObjectPtr and a raw pointer
- // refer to the same object, not just to two different but equal
- // objects.
- bool operator==(const PyObject* p) const { return ptr_ == p; }
- bool operator!=(const PyObject* p) const { return ptr_ != p; }
+ // True when a ScopedPyObjectPtr and a raw pointer refer to the same object.
+ // Comparison operators are non reflexive.
+ bool operator==(const PyObjectStruct* p) const { return ptr_ == p; }
+ bool operator!=(const PyObjectStruct* p) const { return ptr_ != p; }
private:
- PyObject* ptr_;
+ PyObjectStruct* ptr_;
- GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(ScopedPyObjectPtr);
+ GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(ScopedPythonPtr);
};
+typedef ScopedPythonPtr<PyObject> ScopedPyObjectPtr;
+
+} // namespace python
+} // namespace protobuf
} // namespace google
#endif // GOOGLE_PROTOBUF_PYTHON_CPP_SCOPED_PYOBJECT_PTR_H__
diff --git a/python/google/protobuf/pyext/python_protobuf.h b/python/google/protobuf/python_protobuf.h
index beb6e460..beb6e460 100644
--- a/python/google/protobuf/pyext/python_protobuf.h
+++ b/python/google/protobuf/python_protobuf.h
diff --git a/python/google/protobuf/reflection.py b/python/google/protobuf/reflection.py
index 05bafd69..f4ce8caf 100755
--- a/python/google/protobuf/reflection.py
+++ b/python/google/protobuf/reflection.py
@@ -107,7 +107,7 @@ def MakeClass(descriptor):
The Message class object described by the descriptor.
"""
if descriptor in MESSAGE_CLASS_CACHE:
- return MESSAGE_CLASS_CACHE[descriptor]
+ return MESSAGE_CLASS_CACHE[descriptor]
attributes = {}
for name, nested_type in descriptor.nested_types_by_name.items():
@@ -115,7 +115,7 @@ def MakeClass(descriptor):
attributes[GeneratedProtocolMessageType._DESCRIPTOR_KEY] = descriptor
- result = GeneratedProtocolMessageType(str(descriptor.name), (message.Message,),
- attributes)
+ result = GeneratedProtocolMessageType(
+ str(descriptor.name), (message.Message,), attributes)
MESSAGE_CLASS_CACHE[descriptor] = result
return result
diff --git a/python/google/protobuf/symbol_database.py b/python/google/protobuf/symbol_database.py
index ecbef211..07341efa 100644
--- a/python/google/protobuf/symbol_database.py
+++ b/python/google/protobuf/symbol_database.py
@@ -94,6 +94,17 @@ class SymbolDatabase(message_factory.MessageFactory):
self.pool.AddEnumDescriptor(enum_descriptor)
return enum_descriptor
+ def RegisterServiceDescriptor(self, service_descriptor):
+ """Registers the given service descriptor in the local database.
+
+ Args:
+ service_descriptor: a descriptor.ServiceDescriptor.
+
+ Returns:
+ The provided descriptor.
+ """
+ self.pool.AddServiceDescriptor(service_descriptor)
+
def RegisterFileDescriptor(self, file_descriptor):
"""Registers the given file descriptor in the local database.
diff --git a/python/google/protobuf/text_format.py b/python/google/protobuf/text_format.py
index 90f6ce42..c216e097 100755
--- a/python/google/protobuf/text_format.py
+++ b/python/google/protobuf/text_format.py
@@ -422,7 +422,8 @@ class _Printer(object):
def Parse(text,
message,
allow_unknown_extension=False,
- allow_field_number=False):
+ allow_field_number=False,
+ descriptor_pool=None):
"""Parses a text representation of a protocol message into a message.
Args:
@@ -431,6 +432,7 @@ def Parse(text,
allow_unknown_extension: if True, skip over missing extensions and keep
parsing
allow_field_number: if True, both field number and field name are allowed.
+ descriptor_pool: A DescriptorPool used to resolve Any types.
Returns:
The same message passed as argument.
@@ -440,8 +442,11 @@ def Parse(text,
"""
if not isinstance(text, str):
text = text.decode('utf-8')
- return ParseLines(
- text.split('\n'), message, allow_unknown_extension, allow_field_number)
+ return ParseLines(text.split('\n'),
+ message,
+ allow_unknown_extension,
+ allow_field_number,
+ descriptor_pool=descriptor_pool)
def Merge(text,
@@ -479,7 +484,8 @@ def Merge(text,
def ParseLines(lines,
message,
allow_unknown_extension=False,
- allow_field_number=False):
+ allow_field_number=False,
+ descriptor_pool=None):
"""Parses a text representation of a protocol message into a message.
Args:
@@ -496,7 +502,9 @@ def ParseLines(lines,
Raises:
ParseError: On text parsing problems.
"""
- parser = _Parser(allow_unknown_extension, allow_field_number)
+ parser = _Parser(allow_unknown_extension,
+ allow_field_number,
+ descriptor_pool=descriptor_pool)
return parser.ParseLines(lines, message)
@@ -513,6 +521,7 @@ def MergeLines(lines,
allow_unknown_extension: if True, skip over missing extensions and keep
parsing
allow_field_number: if True, both field number and field name are allowed.
+ descriptor_pool: A DescriptorPool used to resolve Any types.
Returns:
The same message passed as argument.
@@ -1023,6 +1032,22 @@ class Tokenizer(object):
self.NextToken()
return result
+ def ConsumeCommentOrTrailingComment(self):
+ """Consumes a comment, returns a 2-tuple (trailing bool, comment str)."""
+
+ # Tokenizer initializes _previous_line and _previous_column to 0. As the
+ # tokenizer starts, it looks like there is a previous token on the line.
+ just_started = self._line == 0 and self._column == 0
+
+ before_parsing = self._previous_line
+ comment = self.ConsumeComment()
+
+ # A trailing comment is a comment on the same line than the previous token.
+ trailing = (self._previous_line == before_parsing
+ and not just_started)
+
+ return trailing, comment
+
def TryConsumeIdentifier(self):
try:
self.ConsumeIdentifier()