aboutsummaryrefslogtreecommitdiffhomepage
path: root/third_party/protobuf/python/google
diff options
context:
space:
mode:
authorGravatar Philipp Wollermann <philwo@google.com>2016-11-23 13:16:13 +0100
committerGravatar Philipp Wollermann <philwo@google.com>2016-11-23 14:58:36 +0100
commited787feff0a6f532ee0432fed75806553f2fd77b (patch)
treec9033346e1befc9afed0afc0ec28cccb722dbe81 /third_party/protobuf/python/google
parent7dd281e652513b0bfe5d7e8bf3a91fb27e109552 (diff)
Remove old and outdated protobuf libs and protoc versions.
Fixes #2119 and related bootstrap breakage. Change-Id: I7ca5b9f509e182780f3486c22bf30da3d4104f15 RELNOTES: None.
Diffstat (limited to 'third_party/protobuf/python/google')
-rwxr-xr-xthird_party/protobuf/python/google/__init__.py4
-rwxr-xr-xthird_party/protobuf/python/google/protobuf/__init__.py39
-rwxr-xr-xthird_party/protobuf/python/google/protobuf/descriptor.py966
-rw-r--r--third_party/protobuf/python/google/protobuf/descriptor_database.py141
-rw-r--r--third_party/protobuf/python/google/protobuf/descriptor_pool.py749
-rwxr-xr-xthird_party/protobuf/python/google/protobuf/internal/__init__.py0
-rwxr-xr-xthird_party/protobuf/python/google/protobuf/internal/_parameterized.py443
-rw-r--r--third_party/protobuf/python/google/protobuf/internal/any_test.proto42
-rw-r--r--third_party/protobuf/python/google/protobuf/internal/api_implementation.cc129
-rwxr-xr-xthird_party/protobuf/python/google/protobuf/internal/api_implementation.py107
-rwxr-xr-xthird_party/protobuf/python/google/protobuf/internal/containers.py611
-rwxr-xr-xthird_party/protobuf/python/google/protobuf/internal/decoder.py854
-rw-r--r--third_party/protobuf/python/google/protobuf/internal/descriptor_pool_test1.proto96
-rw-r--r--third_party/protobuf/python/google/protobuf/internal/descriptor_pool_test2.proto72
-rwxr-xr-xthird_party/protobuf/python/google/protobuf/internal/encoder.py823
-rw-r--r--third_party/protobuf/python/google/protobuf/internal/enum_type_wrapper.py89
-rw-r--r--third_party/protobuf/python/google/protobuf/internal/factory_test1.proto58
-rw-r--r--third_party/protobuf/python/google/protobuf/internal/factory_test2.proto99
-rw-r--r--third_party/protobuf/python/google/protobuf/internal/import_test_package/__init__.py33
-rw-r--r--third_party/protobuf/python/google/protobuf/internal/import_test_package/inner.proto37
-rw-r--r--third_party/protobuf/python/google/protobuf/internal/import_test_package/outer.proto39
-rwxr-xr-xthird_party/protobuf/python/google/protobuf/internal/message_listener.py78
-rw-r--r--third_party/protobuf/python/google/protobuf/internal/message_set_extensions.proto74
-rw-r--r--third_party/protobuf/python/google/protobuf/internal/missing_enum_values.proto56
-rw-r--r--third_party/protobuf/python/google/protobuf/internal/more_extensions.proto59
-rw-r--r--third_party/protobuf/python/google/protobuf/internal/more_extensions_dynamic.proto50
-rw-r--r--third_party/protobuf/python/google/protobuf/internal/more_messages.proto52
-rw-r--r--third_party/protobuf/python/google/protobuf/internal/packed_field_test.proto73
-rwxr-xr-xthird_party/protobuf/python/google/protobuf/internal/python_message.py1520
-rw-r--r--third_party/protobuf/python/google/protobuf/internal/test_bad_identifiers.proto53
-rwxr-xr-xthird_party/protobuf/python/google/protobuf/internal/type_checkers.py341
-rw-r--r--third_party/protobuf/python/google/protobuf/internal/well_known_types.py720
-rwxr-xr-xthird_party/protobuf/python/google/protobuf/internal/wire_format.py268
-rw-r--r--third_party/protobuf/python/google/protobuf/json_format.py645
-rwxr-xr-xthird_party/protobuf/python/google/protobuf/message.py292
-rw-r--r--third_party/protobuf/python/google/protobuf/message_factory.py147
-rw-r--r--third_party/protobuf/python/google/protobuf/proto_builder.py130
-rw-r--r--third_party/protobuf/python/google/protobuf/pyext/README6
-rw-r--r--third_party/protobuf/python/google/protobuf/pyext/__init__.py4
-rw-r--r--third_party/protobuf/python/google/protobuf/pyext/cpp_message.py65
-rw-r--r--third_party/protobuf/python/google/protobuf/pyext/descriptor.cc1583
-rw-r--r--third_party/protobuf/python/google/protobuf/pyext/descriptor.h97
-rw-r--r--third_party/protobuf/python/google/protobuf/pyext/descriptor_containers.cc1652
-rw-r--r--third_party/protobuf/python/google/protobuf/pyext/descriptor_containers.h101
-rw-r--r--third_party/protobuf/python/google/protobuf/pyext/descriptor_database.cc145
-rw-r--r--third_party/protobuf/python/google/protobuf/pyext/descriptor_database.h75
-rw-r--r--third_party/protobuf/python/google/protobuf/pyext/descriptor_pool.cc593
-rw-r--r--third_party/protobuf/python/google/protobuf/pyext/descriptor_pool.h164
-rw-r--r--third_party/protobuf/python/google/protobuf/pyext/extension_dict.cc320
-rw-r--r--third_party/protobuf/python/google/protobuf/pyext/extension_dict.h131
-rw-r--r--third_party/protobuf/python/google/protobuf/pyext/map_container.cc965
-rw-r--r--third_party/protobuf/python/google/protobuf/pyext/map_container.h141
-rw-r--r--third_party/protobuf/python/google/protobuf/pyext/message.cc3091
-rw-r--r--third_party/protobuf/python/google/protobuf/pyext/message.h330
-rw-r--r--third_party/protobuf/python/google/protobuf/pyext/proto2_api_test.proto40
-rw-r--r--third_party/protobuf/python/google/protobuf/pyext/python.proto68
-rw-r--r--third_party/protobuf/python/google/protobuf/pyext/python_protobuf.h57
-rw-r--r--third_party/protobuf/python/google/protobuf/pyext/repeated_composite_container.cc614
-rw-r--r--third_party/protobuf/python/google/protobuf/pyext/repeated_composite_container.h178
-rw-r--r--third_party/protobuf/python/google/protobuf/pyext/repeated_scalar_container.cc812
-rw-r--r--third_party/protobuf/python/google/protobuf/pyext/repeated_scalar_container.h122
-rw-r--r--third_party/protobuf/python/google/protobuf/pyext/scoped_pyobject_ptr.h96
-rwxr-xr-xthird_party/protobuf/python/google/protobuf/reflection.py120
-rwxr-xr-xthird_party/protobuf/python/google/protobuf/service.py226
-rwxr-xr-xthird_party/protobuf/python/google/protobuf/service_reflection.py284
-rw-r--r--third_party/protobuf/python/google/protobuf/symbol_database.py185
-rw-r--r--third_party/protobuf/python/google/protobuf/text_encoding.py107
-rwxr-xr-xthird_party/protobuf/python/google/protobuf/text_format.py1112
68 files changed, 0 insertions, 23473 deletions
diff --git a/third_party/protobuf/python/google/__init__.py b/third_party/protobuf/python/google/__init__.py
deleted file mode 100755
index 5585614122..0000000000
--- a/third_party/protobuf/python/google/__init__.py
+++ /dev/null
@@ -1,4 +0,0 @@
-try:
- __import__('pkg_resources').declare_namespace(__name__)
-except ImportError:
- __path__ = __import__('pkgutil').extend_path(__path__, __name__)
diff --git a/third_party/protobuf/python/google/protobuf/__init__.py b/third_party/protobuf/python/google/protobuf/__init__.py
deleted file mode 100755
index fcb1734e47..0000000000
--- a/third_party/protobuf/python/google/protobuf/__init__.py
+++ /dev/null
@@ -1,39 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc. All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-# * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-# Copyright 2007 Google Inc. All Rights Reserved.
-
-__version__ = '3.0.0b2.post2'
-
-if __name__ != '__main__':
- try:
- __import__('pkg_resources').declare_namespace(__name__)
- except ImportError:
- __path__ = __import__('pkgutil').extend_path(__path__, __name__)
diff --git a/third_party/protobuf/python/google/protobuf/descriptor.py b/third_party/protobuf/python/google/protobuf/descriptor.py
deleted file mode 100755
index 5f613c880b..0000000000
--- a/third_party/protobuf/python/google/protobuf/descriptor.py
+++ /dev/null
@@ -1,966 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc. All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-# * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""Descriptors essentially contain exactly the information found in a .proto
-file, in types that make this information accessible in Python.
-"""
-
-__author__ = 'robinson@google.com (Will Robinson)'
-
-import six
-
-from google.protobuf.internal import api_implementation
-
-_USE_C_DESCRIPTORS = False
-if api_implementation.Type() == 'cpp':
- # Used by MakeDescriptor in cpp mode
- import os
- import uuid
- from google.protobuf.pyext import _message
- _USE_C_DESCRIPTORS = getattr(_message, '_USE_C_DESCRIPTORS', False)
-
-
-class Error(Exception):
- """Base error for this module."""
-
-
-class TypeTransformationError(Error):
- """Error transforming between python proto type and corresponding C++ type."""
-
-
-if _USE_C_DESCRIPTORS:
- # This metaclass allows to override the behavior of code like
- # isinstance(my_descriptor, FieldDescriptor)
- # and make it return True when the descriptor is an instance of the extension
- # type written in C++.
- class DescriptorMetaclass(type):
- def __instancecheck__(cls, obj):
- if super(DescriptorMetaclass, cls).__instancecheck__(obj):
- return True
- if isinstance(obj, cls._C_DESCRIPTOR_CLASS):
- return True
- return False
-else:
- # The standard metaclass; nothing changes.
- DescriptorMetaclass = type
-
-
-class DescriptorBase(six.with_metaclass(DescriptorMetaclass)):
-
- """Descriptors base class.
-
- This class is the base of all descriptor classes. It provides common options
- related functionality.
-
- Attributes:
- has_options: True if the descriptor has non-default options. Usually it
- is not necessary to read this -- just call GetOptions() which will
- happily return the default instance. However, it's sometimes useful
- for efficiency, and also useful inside the protobuf implementation to
- avoid some bootstrapping issues.
- """
-
- if _USE_C_DESCRIPTORS:
- # The class, or tuple of classes, that are considered as "virtual
- # subclasses" of this descriptor class.
- _C_DESCRIPTOR_CLASS = ()
-
- def __init__(self, options, options_class_name):
- """Initialize the descriptor given its options message and the name of the
- class of the options message. The name of the class is required in case
- the options message is None and has to be created.
- """
- self._options = options
- self._options_class_name = options_class_name
-
- # Does this descriptor have non-default options?
- self.has_options = options is not None
-
- def _SetOptions(self, options, options_class_name):
- """Sets the descriptor's options
-
- This function is used in generated proto2 files to update descriptor
- options. It must not be used outside proto2.
- """
- self._options = options
- self._options_class_name = options_class_name
-
- # Does this descriptor have non-default options?
- self.has_options = options is not None
-
- def GetOptions(self):
- """Retrieves descriptor options.
-
- This method returns the options set or creates the default options for the
- descriptor.
- """
- if self._options:
- return self._options
- from google.protobuf import descriptor_pb2
- try:
- options_class = getattr(descriptor_pb2, self._options_class_name)
- except AttributeError:
- raise RuntimeError('Unknown options class name %s!' %
- (self._options_class_name))
- self._options = options_class()
- return self._options
-
-
-class _NestedDescriptorBase(DescriptorBase):
- """Common class for descriptors that can be nested."""
-
- def __init__(self, options, options_class_name, name, full_name,
- file, containing_type, serialized_start=None,
- serialized_end=None):
- """Constructor.
-
- Args:
- options: Protocol message options or None
- to use default message options.
- options_class_name: (str) The class name of the above options.
-
- name: (str) Name of this protocol message type.
- full_name: (str) Fully-qualified name of this protocol message type,
- which will include protocol "package" name and the name of any
- enclosing types.
- file: (FileDescriptor) Reference to file info.
- containing_type: if provided, this is a nested descriptor, with this
- descriptor as parent, otherwise None.
- serialized_start: The start index (inclusive) in block in the
- file.serialized_pb that describes this descriptor.
- serialized_end: The end index (exclusive) in block in the
- file.serialized_pb that describes this descriptor.
- """
- super(_NestedDescriptorBase, self).__init__(
- options, options_class_name)
-
- self.name = name
- # TODO(falk): Add function to calculate full_name instead of having it in
- # memory?
- self.full_name = full_name
- self.file = file
- self.containing_type = containing_type
-
- self._serialized_start = serialized_start
- self._serialized_end = serialized_end
-
- def GetTopLevelContainingType(self):
- """Returns the root if this is a nested type, or itself if its the root."""
- desc = self
- while desc.containing_type is not None:
- desc = desc.containing_type
- return desc
-
- def CopyToProto(self, proto):
- """Copies this to the matching proto in descriptor_pb2.
-
- Args:
- proto: An empty proto instance from descriptor_pb2.
-
- Raises:
- Error: If self couldnt be serialized, due to to few constructor arguments.
- """
- if (self.file is not None and
- self._serialized_start is not None and
- self._serialized_end is not None):
- proto.ParseFromString(self.file.serialized_pb[
- self._serialized_start:self._serialized_end])
- else:
- raise Error('Descriptor does not contain serialization.')
-
-
-class Descriptor(_NestedDescriptorBase):
-
- """Descriptor for a protocol message type.
-
- A Descriptor instance has the following attributes:
-
- name: (str) Name of this protocol message type.
- full_name: (str) Fully-qualified name of this protocol message type,
- which will include protocol "package" name and the name of any
- enclosing types.
-
- containing_type: (Descriptor) Reference to the descriptor of the
- type containing us, or None if this is top-level.
-
- fields: (list of FieldDescriptors) Field descriptors for all
- fields in this type.
- fields_by_number: (dict int -> FieldDescriptor) Same FieldDescriptor
- objects as in |fields|, but indexed by "number" attribute in each
- FieldDescriptor.
- fields_by_name: (dict str -> FieldDescriptor) Same FieldDescriptor
- objects as in |fields|, but indexed by "name" attribute in each
- FieldDescriptor.
- fields_by_camelcase_name: (dict str -> FieldDescriptor) Same
- FieldDescriptor objects as in |fields|, but indexed by
- "camelcase_name" attribute in each FieldDescriptor.
-
- nested_types: (list of Descriptors) Descriptor references
- for all protocol message types nested within this one.
- nested_types_by_name: (dict str -> Descriptor) Same Descriptor
- objects as in |nested_types|, but indexed by "name" attribute
- in each Descriptor.
-
- enum_types: (list of EnumDescriptors) EnumDescriptor references
- for all enums contained within this type.
- enum_types_by_name: (dict str ->EnumDescriptor) Same EnumDescriptor
- objects as in |enum_types|, but indexed by "name" attribute
- in each EnumDescriptor.
- enum_values_by_name: (dict str -> EnumValueDescriptor) Dict mapping
- from enum value name to EnumValueDescriptor for that value.
-
- extensions: (list of FieldDescriptor) All extensions defined directly
- within this message type (NOT within a nested type).
- extensions_by_name: (dict, string -> FieldDescriptor) Same FieldDescriptor
- objects as |extensions|, but indexed by "name" attribute of each
- FieldDescriptor.
-
- is_extendable: Does this type define any extension ranges?
-
- oneofs: (list of OneofDescriptor) The list of descriptors for oneof fields
- in this message.
- oneofs_by_name: (dict str -> OneofDescriptor) Same objects as in |oneofs|,
- but indexed by "name" attribute.
-
- file: (FileDescriptor) Reference to file descriptor.
- """
-
- if _USE_C_DESCRIPTORS:
- _C_DESCRIPTOR_CLASS = _message.Descriptor
-
- def __new__(cls, name, full_name, filename, containing_type, fields,
- nested_types, enum_types, extensions, options=None,
- is_extendable=True, extension_ranges=None, oneofs=None,
- file=None, serialized_start=None, serialized_end=None,
- syntax=None):
- _message.Message._CheckCalledFromGeneratedFile()
- return _message.default_pool.FindMessageTypeByName(full_name)
-
- # NOTE(tmarek): The file argument redefining a builtin is nothing we can
- # fix right now since we don't know how many clients already rely on the
- # name of the argument.
- def __init__(self, name, full_name, filename, containing_type, fields,
- nested_types, enum_types, extensions, options=None,
- is_extendable=True, extension_ranges=None, oneofs=None,
- file=None, serialized_start=None, serialized_end=None,
- syntax=None): # pylint:disable=redefined-builtin
- """Arguments to __init__() are as described in the description
- of Descriptor fields above.
-
- Note that filename is an obsolete argument, that is not used anymore.
- Please use file.name to access this as an attribute.
- """
- super(Descriptor, self).__init__(
- options, 'MessageOptions', name, full_name, file,
- containing_type, serialized_start=serialized_start,
- serialized_end=serialized_end)
-
- # We have fields in addition to fields_by_name and fields_by_number,
- # so that:
- # 1. Clients can index fields by "order in which they're listed."
- # 2. Clients can easily iterate over all fields with the terse
- # syntax: for f in descriptor.fields: ...
- self.fields = fields
- for field in self.fields:
- field.containing_type = self
- self.fields_by_number = dict((f.number, f) for f in fields)
- self.fields_by_name = dict((f.name, f) for f in fields)
- self._fields_by_camelcase_name = None
-
- self.nested_types = nested_types
- for nested_type in nested_types:
- nested_type.containing_type = self
- self.nested_types_by_name = dict((t.name, t) for t in nested_types)
-
- self.enum_types = enum_types
- for enum_type in self.enum_types:
- enum_type.containing_type = self
- self.enum_types_by_name = dict((t.name, t) for t in enum_types)
- self.enum_values_by_name = dict(
- (v.name, v) for t in enum_types for v in t.values)
-
- self.extensions = extensions
- for extension in self.extensions:
- extension.extension_scope = self
- self.extensions_by_name = dict((f.name, f) for f in extensions)
- self.is_extendable = is_extendable
- self.extension_ranges = extension_ranges
- self.oneofs = oneofs if oneofs is not None else []
- self.oneofs_by_name = dict((o.name, o) for o in self.oneofs)
- for oneof in self.oneofs:
- oneof.containing_type = self
- self.syntax = syntax or "proto2"
-
- @property
- def fields_by_camelcase_name(self):
- if self._fields_by_camelcase_name is None:
- self._fields_by_camelcase_name = dict(
- (f.camelcase_name, f) for f in self.fields)
- return self._fields_by_camelcase_name
-
- def EnumValueName(self, enum, value):
- """Returns the string name of an enum value.
-
- This is just a small helper method to simplify a common operation.
-
- Args:
- enum: string name of the Enum.
- value: int, value of the enum.
-
- Returns:
- string name of the enum value.
-
- Raises:
- KeyError if either the Enum doesn't exist or the value is not a valid
- value for the enum.
- """
- return self.enum_types_by_name[enum].values_by_number[value].name
-
- def CopyToProto(self, proto):
- """Copies this to a descriptor_pb2.DescriptorProto.
-
- Args:
- proto: An empty descriptor_pb2.DescriptorProto.
- """
- # This function is overriden to give a better doc comment.
- super(Descriptor, self).CopyToProto(proto)
-
-
-# TODO(robinson): We should have aggressive checking here,
-# for example:
-# * If you specify a repeated field, you should not be allowed
-# to specify a default value.
-# * [Other examples here as needed].
-#
-# TODO(robinson): for this and other *Descriptor classes, we
-# might also want to lock things down aggressively (e.g.,
-# prevent clients from setting the attributes). Having
-# stronger invariants here in general will reduce the number
-# of runtime checks we must do in reflection.py...
-class FieldDescriptor(DescriptorBase):
-
- """Descriptor for a single field in a .proto file.
-
- A FieldDescriptor instance has the following attributes:
-
- name: (str) Name of this field, exactly as it appears in .proto.
- full_name: (str) Name of this field, including containing scope. This is
- particularly relevant for extensions.
- camelcase_name: (str) Camelcase name of this field.
- index: (int) Dense, 0-indexed index giving the order that this
- field textually appears within its message in the .proto file.
- number: (int) Tag number declared for this field in the .proto file.
-
- type: (One of the TYPE_* constants below) Declared type.
- cpp_type: (One of the CPPTYPE_* constants below) C++ type used to
- represent this field.
-
- label: (One of the LABEL_* constants below) Tells whether this
- field is optional, required, or repeated.
- has_default_value: (bool) True if this field has a default value defined,
- otherwise false.
- default_value: (Varies) Default value of this field. Only
- meaningful for non-repeated scalar fields. Repeated fields
- should always set this to [], and non-repeated composite
- fields should always set this to None.
-
- containing_type: (Descriptor) Descriptor of the protocol message
- type that contains this field. Set by the Descriptor constructor
- if we're passed into one.
- Somewhat confusingly, for extension fields, this is the
- descriptor of the EXTENDED message, not the descriptor
- of the message containing this field. (See is_extension and
- extension_scope below).
- message_type: (Descriptor) If a composite field, a descriptor
- of the message type contained in this field. Otherwise, this is None.
- enum_type: (EnumDescriptor) If this field contains an enum, a
- descriptor of that enum. Otherwise, this is None.
-
- is_extension: True iff this describes an extension field.
- extension_scope: (Descriptor) Only meaningful if is_extension is True.
- Gives the message that immediately contains this extension field.
- Will be None iff we're a top-level (file-level) extension field.
-
- options: (descriptor_pb2.FieldOptions) Protocol message field options or
- None to use default field options.
-
- containing_oneof: (OneofDescriptor) If the field is a member of a oneof
- union, contains its descriptor. Otherwise, None.
- """
-
- # Must be consistent with C++ FieldDescriptor::Type enum in
- # descriptor.h.
- #
- # TODO(robinson): Find a way to eliminate this repetition.
- TYPE_DOUBLE = 1
- TYPE_FLOAT = 2
- TYPE_INT64 = 3
- TYPE_UINT64 = 4
- TYPE_INT32 = 5
- TYPE_FIXED64 = 6
- TYPE_FIXED32 = 7
- TYPE_BOOL = 8
- TYPE_STRING = 9
- TYPE_GROUP = 10
- TYPE_MESSAGE = 11
- TYPE_BYTES = 12
- TYPE_UINT32 = 13
- TYPE_ENUM = 14
- TYPE_SFIXED32 = 15
- TYPE_SFIXED64 = 16
- TYPE_SINT32 = 17
- TYPE_SINT64 = 18
- MAX_TYPE = 18
-
- # Must be consistent with C++ FieldDescriptor::CppType enum in
- # descriptor.h.
- #
- # TODO(robinson): Find a way to eliminate this repetition.
- CPPTYPE_INT32 = 1
- CPPTYPE_INT64 = 2
- CPPTYPE_UINT32 = 3
- CPPTYPE_UINT64 = 4
- CPPTYPE_DOUBLE = 5
- CPPTYPE_FLOAT = 6
- CPPTYPE_BOOL = 7
- CPPTYPE_ENUM = 8
- CPPTYPE_STRING = 9
- CPPTYPE_MESSAGE = 10
- MAX_CPPTYPE = 10
-
- _PYTHON_TO_CPP_PROTO_TYPE_MAP = {
- TYPE_DOUBLE: CPPTYPE_DOUBLE,
- TYPE_FLOAT: CPPTYPE_FLOAT,
- TYPE_ENUM: CPPTYPE_ENUM,
- TYPE_INT64: CPPTYPE_INT64,
- TYPE_SINT64: CPPTYPE_INT64,
- TYPE_SFIXED64: CPPTYPE_INT64,
- TYPE_UINT64: CPPTYPE_UINT64,
- TYPE_FIXED64: CPPTYPE_UINT64,
- TYPE_INT32: CPPTYPE_INT32,
- TYPE_SFIXED32: CPPTYPE_INT32,
- TYPE_SINT32: CPPTYPE_INT32,
- TYPE_UINT32: CPPTYPE_UINT32,
- TYPE_FIXED32: CPPTYPE_UINT32,
- TYPE_BYTES: CPPTYPE_STRING,
- TYPE_STRING: CPPTYPE_STRING,
- TYPE_BOOL: CPPTYPE_BOOL,
- TYPE_MESSAGE: CPPTYPE_MESSAGE,
- TYPE_GROUP: CPPTYPE_MESSAGE
- }
-
- # Must be consistent with C++ FieldDescriptor::Label enum in
- # descriptor.h.
- #
- # TODO(robinson): Find a way to eliminate this repetition.
- LABEL_OPTIONAL = 1
- LABEL_REQUIRED = 2
- LABEL_REPEATED = 3
- MAX_LABEL = 3
-
- # Must be consistent with C++ constants kMaxNumber, kFirstReservedNumber,
- # and kLastReservedNumber in descriptor.h
- MAX_FIELD_NUMBER = (1 << 29) - 1
- FIRST_RESERVED_FIELD_NUMBER = 19000
- LAST_RESERVED_FIELD_NUMBER = 19999
-
- if _USE_C_DESCRIPTORS:
- _C_DESCRIPTOR_CLASS = _message.FieldDescriptor
-
- def __new__(cls, name, full_name, index, number, type, cpp_type, label,
- default_value, message_type, enum_type, containing_type,
- is_extension, extension_scope, options=None,
- has_default_value=True, containing_oneof=None):
- _message.Message._CheckCalledFromGeneratedFile()
- if is_extension:
- return _message.default_pool.FindExtensionByName(full_name)
- else:
- return _message.default_pool.FindFieldByName(full_name)
-
- def __init__(self, name, full_name, index, number, type, cpp_type, label,
- default_value, message_type, enum_type, containing_type,
- is_extension, extension_scope, options=None,
- has_default_value=True, containing_oneof=None):
- """The arguments are as described in the description of FieldDescriptor
- attributes above.
-
- Note that containing_type may be None, and may be set later if necessary
- (to deal with circular references between message types, for example).
- Likewise for extension_scope.
- """
- super(FieldDescriptor, self).__init__(options, 'FieldOptions')
- self.name = name
- self.full_name = full_name
- self._camelcase_name = None
- self.index = index
- self.number = number
- self.type = type
- self.cpp_type = cpp_type
- self.label = label
- self.has_default_value = has_default_value
- self.default_value = default_value
- self.containing_type = containing_type
- self.message_type = message_type
- self.enum_type = enum_type
- self.is_extension = is_extension
- self.extension_scope = extension_scope
- self.containing_oneof = containing_oneof
- if api_implementation.Type() == 'cpp':
- if is_extension:
- self._cdescriptor = _message.default_pool.FindExtensionByName(full_name)
- else:
- self._cdescriptor = _message.default_pool.FindFieldByName(full_name)
- else:
- self._cdescriptor = None
-
- @property
- def camelcase_name(self):
- if self._camelcase_name is None:
- self._camelcase_name = _ToCamelCase(self.name)
- return self._camelcase_name
-
- @staticmethod
- def ProtoTypeToCppProtoType(proto_type):
- """Converts from a Python proto type to a C++ Proto Type.
-
- The Python ProtocolBuffer classes specify both the 'Python' datatype and the
- 'C++' datatype - and they're not the same. This helper method should
- translate from one to another.
-
- Args:
- proto_type: the Python proto type (descriptor.FieldDescriptor.TYPE_*)
- Returns:
- descriptor.FieldDescriptor.CPPTYPE_*, the C++ type.
- Raises:
- TypeTransformationError: when the Python proto type isn't known.
- """
- try:
- return FieldDescriptor._PYTHON_TO_CPP_PROTO_TYPE_MAP[proto_type]
- except KeyError:
- raise TypeTransformationError('Unknown proto_type: %s' % proto_type)
-
-
-class EnumDescriptor(_NestedDescriptorBase):
-
- """Descriptor for an enum defined in a .proto file.
-
- An EnumDescriptor instance has the following attributes:
-
- name: (str) Name of the enum type.
- full_name: (str) Full name of the type, including package name
- and any enclosing type(s).
-
- values: (list of EnumValueDescriptors) List of the values
- in this enum.
- values_by_name: (dict str -> EnumValueDescriptor) Same as |values|,
- but indexed by the "name" field of each EnumValueDescriptor.
- values_by_number: (dict int -> EnumValueDescriptor) Same as |values|,
- but indexed by the "number" field of each EnumValueDescriptor.
- containing_type: (Descriptor) Descriptor of the immediate containing
- type of this enum, or None if this is an enum defined at the
- top level in a .proto file. Set by Descriptor's constructor
- if we're passed into one.
- file: (FileDescriptor) Reference to file descriptor.
- options: (descriptor_pb2.EnumOptions) Enum options message or
- None to use default enum options.
- """
-
- if _USE_C_DESCRIPTORS:
- _C_DESCRIPTOR_CLASS = _message.EnumDescriptor
-
- def __new__(cls, name, full_name, filename, values,
- containing_type=None, options=None, file=None,
- serialized_start=None, serialized_end=None):
- _message.Message._CheckCalledFromGeneratedFile()
- return _message.default_pool.FindEnumTypeByName(full_name)
-
- def __init__(self, name, full_name, filename, values,
- containing_type=None, options=None, file=None,
- serialized_start=None, serialized_end=None):
- """Arguments are as described in the attribute description above.
-
- Note that filename is an obsolete argument, that is not used anymore.
- Please use file.name to access this as an attribute.
- """
- super(EnumDescriptor, self).__init__(
- options, 'EnumOptions', name, full_name, file,
- containing_type, serialized_start=serialized_start,
- serialized_end=serialized_end)
-
- self.values = values
- for value in self.values:
- value.type = self
- self.values_by_name = dict((v.name, v) for v in values)
- self.values_by_number = dict((v.number, v) for v in values)
-
- def CopyToProto(self, proto):
- """Copies this to a descriptor_pb2.EnumDescriptorProto.
-
- Args:
- proto: An empty descriptor_pb2.EnumDescriptorProto.
- """
- # This function is overriden to give a better doc comment.
- super(EnumDescriptor, self).CopyToProto(proto)
-
-
-class EnumValueDescriptor(DescriptorBase):
-
- """Descriptor for a single value within an enum.
-
- name: (str) Name of this value.
- index: (int) Dense, 0-indexed index giving the order that this
- value appears textually within its enum in the .proto file.
- number: (int) Actual number assigned to this enum value.
- type: (EnumDescriptor) EnumDescriptor to which this value
- belongs. Set by EnumDescriptor's constructor if we're
- passed into one.
- options: (descriptor_pb2.EnumValueOptions) Enum value options message or
- None to use default enum value options options.
- """
-
- if _USE_C_DESCRIPTORS:
- _C_DESCRIPTOR_CLASS = _message.EnumValueDescriptor
-
- def __new__(cls, name, index, number, type=None, options=None):
- _message.Message._CheckCalledFromGeneratedFile()
- # There is no way we can build a complete EnumValueDescriptor with the
- # given parameters (the name of the Enum is not known, for example).
- # Fortunately generated files just pass it to the EnumDescriptor()
- # constructor, which will ignore it, so returning None is good enough.
- return None
-
- def __init__(self, name, index, number, type=None, options=None):
- """Arguments are as described in the attribute description above."""
- super(EnumValueDescriptor, self).__init__(options, 'EnumValueOptions')
- self.name = name
- self.index = index
- self.number = number
- self.type = type
-
-
-class OneofDescriptor(object):
- """Descriptor for a oneof field.
-
- name: (str) Name of the oneof field.
- full_name: (str) Full name of the oneof field, including package name.
- index: (int) 0-based index giving the order of the oneof field inside
- its containing type.
- containing_type: (Descriptor) Descriptor of the protocol message
- type that contains this field. Set by the Descriptor constructor
- if we're passed into one.
- fields: (list of FieldDescriptor) The list of field descriptors this
- oneof can contain.
- """
-
- if _USE_C_DESCRIPTORS:
- _C_DESCRIPTOR_CLASS = _message.OneofDescriptor
-
- def __new__(cls, name, full_name, index, containing_type, fields):
- _message.Message._CheckCalledFromGeneratedFile()
- return _message.default_pool.FindOneofByName(full_name)
-
- def __init__(self, name, full_name, index, containing_type, fields):
- """Arguments are as described in the attribute description above."""
- self.name = name
- self.full_name = full_name
- self.index = index
- self.containing_type = containing_type
- self.fields = fields
-
-
-class ServiceDescriptor(_NestedDescriptorBase):
-
- """Descriptor for a service.
-
- name: (str) Name of the service.
- full_name: (str) Full name of the service, including package name.
- index: (int) 0-indexed index giving the order that this services
- definition appears withing the .proto file.
- methods: (list of MethodDescriptor) List of methods provided by this
- service.
- options: (descriptor_pb2.ServiceOptions) Service options message or
- None to use default service options.
- file: (FileDescriptor) Reference to file info.
- """
-
- def __init__(self, name, full_name, index, methods, options=None, file=None,
- serialized_start=None, serialized_end=None):
- super(ServiceDescriptor, self).__init__(
- options, 'ServiceOptions', name, full_name, file,
- None, serialized_start=serialized_start,
- serialized_end=serialized_end)
- self.index = index
- self.methods = methods
- # Set the containing service for each method in this service.
- for method in self.methods:
- method.containing_service = self
-
- def FindMethodByName(self, name):
- """Searches for the specified method, and returns its descriptor."""
- for method in self.methods:
- if name == method.name:
- return method
- return None
-
- def CopyToProto(self, proto):
- """Copies this to a descriptor_pb2.ServiceDescriptorProto.
-
- Args:
- proto: An empty descriptor_pb2.ServiceDescriptorProto.
- """
- # This function is overriden to give a better doc comment.
- super(ServiceDescriptor, self).CopyToProto(proto)
-
-
-class MethodDescriptor(DescriptorBase):
-
- """Descriptor for a method in a service.
-
- name: (str) Name of the method within the service.
- full_name: (str) Full name of method.
- index: (int) 0-indexed index of the method inside the service.
- containing_service: (ServiceDescriptor) The service that contains this
- method.
- input_type: The descriptor of the message that this method accepts.
- output_type: The descriptor of the message that this method returns.
- options: (descriptor_pb2.MethodOptions) Method options message or
- None to use default method options.
- """
-
- def __init__(self, name, full_name, index, containing_service,
- input_type, output_type, options=None):
- """The arguments are as described in the description of MethodDescriptor
- attributes above.
-
- Note that containing_service may be None, and may be set later if necessary.
- """
- super(MethodDescriptor, self).__init__(options, 'MethodOptions')
- self.name = name
- self.full_name = full_name
- self.index = index
- self.containing_service = containing_service
- self.input_type = input_type
- self.output_type = output_type
-
-
-class FileDescriptor(DescriptorBase):
- """Descriptor for a file. Mimics the descriptor_pb2.FileDescriptorProto.
-
- Note that enum_types_by_name, extensions_by_name, and dependencies
- fields are only set by the message_factory module, and not by the
- generated proto code.
-
- name: name of file, relative to root of source tree.
- package: name of the package
- syntax: string indicating syntax of the file (can be "proto2" or "proto3")
- serialized_pb: (str) Byte string of serialized
- descriptor_pb2.FileDescriptorProto.
- dependencies: List of other FileDescriptors this FileDescriptor depends on.
- message_types_by_name: Dict of message names of their descriptors.
- enum_types_by_name: Dict of enum names and their descriptors.
- extensions_by_name: Dict of extension names and their descriptors.
- pool: the DescriptorPool this descriptor belongs to. When not passed to the
- constructor, the global default pool is used.
- """
-
- if _USE_C_DESCRIPTORS:
- _C_DESCRIPTOR_CLASS = _message.FileDescriptor
-
- def __new__(cls, name, package, options=None, serialized_pb=None,
- dependencies=None, syntax=None, pool=None):
- # FileDescriptor() is called from various places, not only from generated
- # files, to register dynamic proto files and messages.
- if serialized_pb:
- # TODO(amauryfa): use the pool passed as argument. This will work only
- # for C++-implemented DescriptorPools.
- return _message.default_pool.AddSerializedFile(serialized_pb)
- else:
- return super(FileDescriptor, cls).__new__(cls)
-
- def __init__(self, name, package, options=None, serialized_pb=None,
- dependencies=None, syntax=None, pool=None):
- """Constructor."""
- super(FileDescriptor, self).__init__(options, 'FileOptions')
-
- if pool is None:
- from google.protobuf import descriptor_pool
- pool = descriptor_pool.Default()
- self.pool = pool
- self.message_types_by_name = {}
- self.name = name
- self.package = package
- self.syntax = syntax or "proto2"
- self.serialized_pb = serialized_pb
-
- self.enum_types_by_name = {}
- self.extensions_by_name = {}
- self.dependencies = (dependencies or [])
-
- if (api_implementation.Type() == 'cpp' and
- self.serialized_pb is not None):
- _message.default_pool.AddSerializedFile(self.serialized_pb)
-
- def CopyToProto(self, proto):
- """Copies this to a descriptor_pb2.FileDescriptorProto.
-
- Args:
- proto: An empty descriptor_pb2.FileDescriptorProto.
- """
- proto.ParseFromString(self.serialized_pb)
-
-
-def _ParseOptions(message, string):
- """Parses serialized options.
-
- This helper function is used to parse serialized options in generated
- proto2 files. It must not be used outside proto2.
- """
- message.ParseFromString(string)
- return message
-
-
-def _ToCamelCase(name):
- """Converts name to camel-case and returns it."""
- capitalize_next = False
- result = []
-
- for c in name:
- if c == '_':
- if result:
- capitalize_next = True
- elif capitalize_next:
- result.append(c.upper())
- capitalize_next = False
- else:
- result += c
-
- # Lower-case the first letter.
- if result and result[0].isupper():
- result[0] = result[0].lower()
- return ''.join(result)
-
-
-def MakeDescriptor(desc_proto, package='', build_file_if_cpp=True,
- syntax=None):
- """Make a protobuf Descriptor given a DescriptorProto protobuf.
-
- Handles nested descriptors. Note that this is limited to the scope of defining
- a message inside of another message. Composite fields can currently only be
- resolved if the message is defined in the same scope as the field.
-
- Args:
- desc_proto: The descriptor_pb2.DescriptorProto protobuf message.
- package: Optional package name for the new message Descriptor (string).
- build_file_if_cpp: Update the C++ descriptor pool if api matches.
- Set to False on recursion, so no duplicates are created.
- syntax: The syntax/semantics that should be used. Set to "proto3" to get
- proto3 field presence semantics.
- Returns:
- A Descriptor for protobuf messages.
- """
- if api_implementation.Type() == 'cpp' and build_file_if_cpp:
- # The C++ implementation requires all descriptors to be backed by the same
- # definition in the C++ descriptor pool. To do this, we build a
- # FileDescriptorProto with the same definition as this descriptor and build
- # it into the pool.
- from google.protobuf import descriptor_pb2
- file_descriptor_proto = descriptor_pb2.FileDescriptorProto()
- file_descriptor_proto.message_type.add().MergeFrom(desc_proto)
-
- # Generate a random name for this proto file to prevent conflicts with any
- # imported ones. We need to specify a file name so the descriptor pool
- # accepts our FileDescriptorProto, but it is not important what that file
- # name is actually set to.
- proto_name = str(uuid.uuid4())
-
- if package:
- file_descriptor_proto.name = os.path.join(package.replace('.', '/'),
- proto_name + '.proto')
- file_descriptor_proto.package = package
- else:
- file_descriptor_proto.name = proto_name + '.proto'
-
- _message.default_pool.Add(file_descriptor_proto)
- result = _message.default_pool.FindFileByName(file_descriptor_proto.name)
-
- if _USE_C_DESCRIPTORS:
- return result.message_types_by_name[desc_proto.name]
-
- full_message_name = [desc_proto.name]
- if package: full_message_name.insert(0, package)
-
- # Create Descriptors for enum types
- enum_types = {}
- for enum_proto in desc_proto.enum_type:
- full_name = '.'.join(full_message_name + [enum_proto.name])
- enum_desc = EnumDescriptor(
- enum_proto.name, full_name, None, [
- EnumValueDescriptor(enum_val.name, ii, enum_val.number)
- for ii, enum_val in enumerate(enum_proto.value)])
- enum_types[full_name] = enum_desc
-
- # Create Descriptors for nested types
- nested_types = {}
- for nested_proto in desc_proto.nested_type:
- full_name = '.'.join(full_message_name + [nested_proto.name])
- # Nested types are just those defined inside of the message, not all types
- # used by fields in the message, so no loops are possible here.
- nested_desc = MakeDescriptor(nested_proto,
- package='.'.join(full_message_name),
- build_file_if_cpp=False,
- syntax=syntax)
- nested_types[full_name] = nested_desc
-
- fields = []
- for field_proto in desc_proto.field:
- full_name = '.'.join(full_message_name + [field_proto.name])
- enum_desc = None
- nested_desc = None
- if field_proto.HasField('type_name'):
- type_name = field_proto.type_name
- full_type_name = '.'.join(full_message_name +
- [type_name[type_name.rfind('.')+1:]])
- if full_type_name in nested_types:
- nested_desc = nested_types[full_type_name]
- elif full_type_name in enum_types:
- enum_desc = enum_types[full_type_name]
- # Else type_name references a non-local type, which isn't implemented
- field = FieldDescriptor(
- field_proto.name, full_name, field_proto.number - 1,
- field_proto.number, field_proto.type,
- FieldDescriptor.ProtoTypeToCppProtoType(field_proto.type),
- field_proto.label, None, nested_desc, enum_desc, None, False, None,
- options=field_proto.options, has_default_value=False)
- fields.append(field)
-
- desc_name = '.'.join(full_message_name)
- return Descriptor(desc_proto.name, desc_name, None, None, fields,
- list(nested_types.values()), list(enum_types.values()), [],
- options=desc_proto.options)
diff --git a/third_party/protobuf/python/google/protobuf/descriptor_database.py b/third_party/protobuf/python/google/protobuf/descriptor_database.py
deleted file mode 100644
index 1333f9966e..0000000000
--- a/third_party/protobuf/python/google/protobuf/descriptor_database.py
+++ /dev/null
@@ -1,141 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc. All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-# * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""Provides a container for DescriptorProtos."""
-
-__author__ = 'matthewtoia@google.com (Matt Toia)'
-
-
-class Error(Exception):
- pass
-
-
-class DescriptorDatabaseConflictingDefinitionError(Error):
- """Raised when a proto is added with the same name & different descriptor."""
-
-
-class DescriptorDatabase(object):
- """A container accepting FileDescriptorProtos and maps DescriptorProtos."""
-
- def __init__(self):
- self._file_desc_protos_by_file = {}
- self._file_desc_protos_by_symbol = {}
-
- def Add(self, file_desc_proto):
- """Adds the FileDescriptorProto and its types to this database.
-
- Args:
- file_desc_proto: The FileDescriptorProto to add.
- Raises:
- DescriptorDatabaseException: if an attempt is made to add a proto
- with the same name but different definition than an exisiting
- proto in the database.
- """
- proto_name = file_desc_proto.name
- if proto_name not in self._file_desc_protos_by_file:
- self._file_desc_protos_by_file[proto_name] = file_desc_proto
- elif self._file_desc_protos_by_file[proto_name] != file_desc_proto:
- raise DescriptorDatabaseConflictingDefinitionError(
- '%s already added, but with different descriptor.' % proto_name)
-
- # Add the top-level Message, Enum and Extension descriptors to the index.
- package = file_desc_proto.package
- for message in file_desc_proto.message_type:
- self._file_desc_protos_by_symbol.update(
- (name, file_desc_proto) for name in _ExtractSymbols(message, package))
- for enum in file_desc_proto.enum_type:
- self._file_desc_protos_by_symbol[
- '.'.join((package, enum.name))] = file_desc_proto
- for extension in file_desc_proto.extension:
- self._file_desc_protos_by_symbol[
- '.'.join((package, extension.name))] = file_desc_proto
-
- def FindFileByName(self, name):
- """Finds the file descriptor proto by file name.
-
- Typically the file name is a relative path ending to a .proto file. The
- proto with the given name will have to have been added to this database
- using the Add method or else an error will be raised.
-
- Args:
- name: The file name to find.
-
- Returns:
- The file descriptor proto matching the name.
-
- Raises:
- KeyError if no file by the given name was added.
- """
-
- return self._file_desc_protos_by_file[name]
-
- def FindFileContainingSymbol(self, symbol):
- """Finds the file descriptor proto containing the specified symbol.
-
- The symbol should be a fully qualified name including the file descriptor's
- package and any containing messages. Some examples:
-
- 'some.package.name.Message'
- 'some.package.name.Message.NestedEnum'
-
- The file descriptor proto containing the specified symbol must be added to
- this database using the Add method or else an error will be raised.
-
- Args:
- symbol: The fully qualified symbol name.
-
- Returns:
- The file descriptor proto containing the symbol.
-
- Raises:
- KeyError if no file contains the specified symbol.
- """
-
- return self._file_desc_protos_by_symbol[symbol]
-
-
-def _ExtractSymbols(desc_proto, package):
- """Pulls out all the symbols from a descriptor proto.
-
- Args:
- desc_proto: The proto to extract symbols from.
- package: The package containing the descriptor type.
-
- Yields:
- The fully qualified name found in the descriptor.
- """
-
- message_name = '.'.join((package, desc_proto.name))
- yield message_name
- for nested_type in desc_proto.nested_type:
- for symbol in _ExtractSymbols(nested_type, message_name):
- yield symbol
- for enum_type in desc_proto.enum_type:
- yield '.'.join((message_name, enum_type.name))
diff --git a/third_party/protobuf/python/google/protobuf/descriptor_pool.py b/third_party/protobuf/python/google/protobuf/descriptor_pool.py
deleted file mode 100644
index 3e80795c86..0000000000
--- a/third_party/protobuf/python/google/protobuf/descriptor_pool.py
+++ /dev/null
@@ -1,749 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc. All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-# * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""Provides DescriptorPool to use as a container for proto2 descriptors.
-
-The DescriptorPool is used in conjection with a DescriptorDatabase to maintain
-a collection of protocol buffer descriptors for use when dynamically creating
-message types at runtime.
-
-For most applications protocol buffers should be used via modules generated by
-the protocol buffer compiler tool. This should only be used when the type of
-protocol buffers used in an application or library cannot be predetermined.
-
-Below is a straightforward example on how to use this class:
-
- pool = DescriptorPool()
- file_descriptor_protos = [ ... ]
- for file_descriptor_proto in file_descriptor_protos:
- pool.Add(file_descriptor_proto)
- my_message_descriptor = pool.FindMessageTypeByName('some.package.MessageType')
-
-The message descriptor can be used in conjunction with the message_factory
-module in order to create a protocol buffer class that can be encoded and
-decoded.
-
-If you want to get a Python class for the specified proto, use the
-helper functions inside google.protobuf.message_factory
-directly instead of this class.
-"""
-
-__author__ = 'matthewtoia@google.com (Matt Toia)'
-
-from google.protobuf import descriptor
-from google.protobuf import descriptor_database
-from google.protobuf import text_encoding
-
-
-_USE_C_DESCRIPTORS = descriptor._USE_C_DESCRIPTORS
-
-
-def _NormalizeFullyQualifiedName(name):
- """Remove leading period from fully-qualified type name.
-
- Due to b/13860351 in descriptor_database.py, types in the root namespace are
- generated with a leading period. This function removes that prefix.
-
- Args:
- name: A str, the fully-qualified symbol name.
-
- Returns:
- A str, the normalized fully-qualified symbol name.
- """
- return name.lstrip('.')
-
-
-class DescriptorPool(object):
- """A collection of protobufs dynamically constructed by descriptor protos."""
-
- if _USE_C_DESCRIPTORS:
-
- def __new__(cls, descriptor_db=None):
- # pylint: disable=protected-access
- return descriptor._message.DescriptorPool(descriptor_db)
-
- def __init__(self, descriptor_db=None):
- """Initializes a Pool of proto buffs.
-
- The descriptor_db argument to the constructor is provided to allow
- specialized file descriptor proto lookup code to be triggered on demand. An
- example would be an implementation which will read and compile a file
- specified in a call to FindFileByName() and not require the call to Add()
- at all. Results from this database will be cached internally here as well.
-
- Args:
- descriptor_db: A secondary source of file descriptors.
- """
-
- self._internal_db = descriptor_database.DescriptorDatabase()
- self._descriptor_db = descriptor_db
- self._descriptors = {}
- self._enum_descriptors = {}
- self._file_descriptors = {}
-
- def Add(self, file_desc_proto):
- """Adds the FileDescriptorProto and its types to this pool.
-
- Args:
- file_desc_proto: The FileDescriptorProto to add.
- """
-
- self._internal_db.Add(file_desc_proto)
-
- def AddSerializedFile(self, serialized_file_desc_proto):
- """Adds the FileDescriptorProto and its types to this pool.
-
- Args:
- serialized_file_desc_proto: A bytes string, serialization of the
- FileDescriptorProto to add.
- """
-
- # pylint: disable=g-import-not-at-top
- from google.protobuf import descriptor_pb2
- file_desc_proto = descriptor_pb2.FileDescriptorProto.FromString(
- serialized_file_desc_proto)
- self.Add(file_desc_proto)
-
- def AddDescriptor(self, desc):
- """Adds a Descriptor to the pool, non-recursively.
-
- If the Descriptor contains nested messages or enums, the caller must
- explicitly register them. This method also registers the FileDescriptor
- associated with the message.
-
- Args:
- desc: A Descriptor.
- """
- if not isinstance(desc, descriptor.Descriptor):
- raise TypeError('Expected instance of descriptor.Descriptor.')
-
- self._descriptors[desc.full_name] = desc
- self.AddFileDescriptor(desc.file)
-
- def AddEnumDescriptor(self, enum_desc):
- """Adds an EnumDescriptor to the pool.
-
- This method also registers the FileDescriptor associated with the message.
-
- Args:
- enum_desc: An EnumDescriptor.
- """
-
- if not isinstance(enum_desc, descriptor.EnumDescriptor):
- raise TypeError('Expected instance of descriptor.EnumDescriptor.')
-
- self._enum_descriptors[enum_desc.full_name] = enum_desc
- self.AddFileDescriptor(enum_desc.file)
-
- def AddFileDescriptor(self, file_desc):
- """Adds a FileDescriptor to the pool, non-recursively.
-
- If the FileDescriptor contains messages or enums, the caller must explicitly
- register them.
-
- Args:
- file_desc: A FileDescriptor.
- """
-
- if not isinstance(file_desc, descriptor.FileDescriptor):
- raise TypeError('Expected instance of descriptor.FileDescriptor.')
- self._file_descriptors[file_desc.name] = file_desc
-
- def FindFileByName(self, file_name):
- """Gets a FileDescriptor by file name.
-
- Args:
- file_name: The path to the file to get a descriptor for.
-
- Returns:
- A FileDescriptor for the named file.
-
- Raises:
- KeyError: if the file can not be found in the pool.
- """
-
- try:
- return self._file_descriptors[file_name]
- except KeyError:
- pass
-
- try:
- file_proto = self._internal_db.FindFileByName(file_name)
- except KeyError as error:
- if self._descriptor_db:
- file_proto = self._descriptor_db.FindFileByName(file_name)
- else:
- raise error
- if not file_proto:
- raise KeyError('Cannot find a file named %s' % file_name)
- return self._ConvertFileProtoToFileDescriptor(file_proto)
-
- def FindFileContainingSymbol(self, symbol):
- """Gets the FileDescriptor for the file containing the specified symbol.
-
- Args:
- symbol: The name of the symbol to search for.
-
- Returns:
- A FileDescriptor that contains the specified symbol.
-
- Raises:
- KeyError: if the file can not be found in the pool.
- """
-
- symbol = _NormalizeFullyQualifiedName(symbol)
- try:
- return self._descriptors[symbol].file
- except KeyError:
- pass
-
- try:
- return self._enum_descriptors[symbol].file
- except KeyError:
- pass
-
- try:
- file_proto = self._internal_db.FindFileContainingSymbol(symbol)
- except KeyError as error:
- if self._descriptor_db:
- file_proto = self._descriptor_db.FindFileContainingSymbol(symbol)
- else:
- raise error
- if not file_proto:
- raise KeyError('Cannot find a file containing %s' % symbol)
- return self._ConvertFileProtoToFileDescriptor(file_proto)
-
- def FindMessageTypeByName(self, full_name):
- """Loads the named descriptor from the pool.
-
- Args:
- full_name: The full name of the descriptor to load.
-
- Returns:
- The descriptor for the named type.
- """
-
- full_name = _NormalizeFullyQualifiedName(full_name)
- if full_name not in self._descriptors:
- self.FindFileContainingSymbol(full_name)
- return self._descriptors[full_name]
-
- def FindEnumTypeByName(self, full_name):
- """Loads the named enum descriptor from the pool.
-
- Args:
- full_name: The full name of the enum descriptor to load.
-
- Returns:
- The enum descriptor for the named type.
- """
-
- full_name = _NormalizeFullyQualifiedName(full_name)
- if full_name not in self._enum_descriptors:
- self.FindFileContainingSymbol(full_name)
- return self._enum_descriptors[full_name]
-
- def FindFieldByName(self, full_name):
- """Loads the named field descriptor from the pool.
-
- Args:
- full_name: The full name of the field descriptor to load.
-
- Returns:
- The field descriptor for the named field.
- """
- full_name = _NormalizeFullyQualifiedName(full_name)
- message_name, _, field_name = full_name.rpartition('.')
- message_descriptor = self.FindMessageTypeByName(message_name)
- return message_descriptor.fields_by_name[field_name]
-
- def FindExtensionByName(self, full_name):
- """Loads the named extension descriptor from the pool.
-
- Args:
- full_name: The full name of the extension descriptor to load.
-
- Returns:
- A FieldDescriptor, describing the named extension.
- """
- full_name = _NormalizeFullyQualifiedName(full_name)
- message_name, _, extension_name = full_name.rpartition('.')
- try:
- # Most extensions are nested inside a message.
- scope = self.FindMessageTypeByName(message_name)
- except KeyError:
- # Some extensions are defined at file scope.
- scope = self.FindFileContainingSymbol(full_name)
- return scope.extensions_by_name[extension_name]
-
- def _ConvertFileProtoToFileDescriptor(self, file_proto):
- """Creates a FileDescriptor from a proto or returns a cached copy.
-
- This method also has the side effect of loading all the symbols found in
- the file into the appropriate dictionaries in the pool.
-
- Args:
- file_proto: The proto to convert.
-
- Returns:
- A FileDescriptor matching the passed in proto.
- """
-
- if file_proto.name not in self._file_descriptors:
- built_deps = list(self._GetDeps(file_proto.dependency))
- direct_deps = [self.FindFileByName(n) for n in file_proto.dependency]
-
- file_descriptor = descriptor.FileDescriptor(
- pool=self,
- name=file_proto.name,
- package=file_proto.package,
- syntax=file_proto.syntax,
- options=file_proto.options,
- serialized_pb=file_proto.SerializeToString(),
- dependencies=direct_deps)
- if _USE_C_DESCRIPTORS:
- # When using C++ descriptors, all objects defined in the file were added
- # to the C++ database when the FileDescriptor was built above.
- # Just add them to this descriptor pool.
- def _AddMessageDescriptor(message_desc):
- self._descriptors[message_desc.full_name] = message_desc
- for nested in message_desc.nested_types:
- _AddMessageDescriptor(nested)
- for enum_type in message_desc.enum_types:
- _AddEnumDescriptor(enum_type)
- def _AddEnumDescriptor(enum_desc):
- self._enum_descriptors[enum_desc.full_name] = enum_desc
- for message_type in file_descriptor.message_types_by_name.values():
- _AddMessageDescriptor(message_type)
- for enum_type in file_descriptor.enum_types_by_name.values():
- _AddEnumDescriptor(enum_type)
- else:
- scope = {}
-
- # This loop extracts all the message and enum types from all the
- # dependencies of the file_proto. This is necessary to create the
- # scope of available message types when defining the passed in
- # file proto.
- for dependency in built_deps:
- scope.update(self._ExtractSymbols(
- dependency.message_types_by_name.values()))
- scope.update((_PrefixWithDot(enum.full_name), enum)
- for enum in dependency.enum_types_by_name.values())
-
- for message_type in file_proto.message_type:
- message_desc = self._ConvertMessageDescriptor(
- message_type, file_proto.package, file_descriptor, scope,
- file_proto.syntax)
- file_descriptor.message_types_by_name[message_desc.name] = (
- message_desc)
-
- for enum_type in file_proto.enum_type:
- file_descriptor.enum_types_by_name[enum_type.name] = (
- self._ConvertEnumDescriptor(enum_type, file_proto.package,
- file_descriptor, None, scope))
-
- for index, extension_proto in enumerate(file_proto.extension):
- extension_desc = self._MakeFieldDescriptor(
- extension_proto, file_proto.package, index, is_extension=True)
- extension_desc.containing_type = self._GetTypeFromScope(
- file_descriptor.package, extension_proto.extendee, scope)
- self._SetFieldType(extension_proto, extension_desc,
- file_descriptor.package, scope)
- file_descriptor.extensions_by_name[extension_desc.name] = (
- extension_desc)
-
- for desc_proto in file_proto.message_type:
- self._SetAllFieldTypes(file_proto.package, desc_proto, scope)
-
- if file_proto.package:
- desc_proto_prefix = _PrefixWithDot(file_proto.package)
- else:
- desc_proto_prefix = ''
-
- for desc_proto in file_proto.message_type:
- desc = self._GetTypeFromScope(
- desc_proto_prefix, desc_proto.name, scope)
- file_descriptor.message_types_by_name[desc_proto.name] = desc
-
- self.Add(file_proto)
- self._file_descriptors[file_proto.name] = file_descriptor
-
- return self._file_descriptors[file_proto.name]
-
- def _ConvertMessageDescriptor(self, desc_proto, package=None, file_desc=None,
- scope=None, syntax=None):
- """Adds the proto to the pool in the specified package.
-
- Args:
- desc_proto: The descriptor_pb2.DescriptorProto protobuf message.
- package: The package the proto should be located in.
- file_desc: The file containing this message.
- scope: Dict mapping short and full symbols to message and enum types.
-
- Returns:
- The added descriptor.
- """
-
- if package:
- desc_name = '.'.join((package, desc_proto.name))
- else:
- desc_name = desc_proto.name
-
- if file_desc is None:
- file_name = None
- else:
- file_name = file_desc.name
-
- if scope is None:
- scope = {}
-
- nested = [
- self._ConvertMessageDescriptor(
- nested, desc_name, file_desc, scope, syntax)
- for nested in desc_proto.nested_type]
- enums = [
- self._ConvertEnumDescriptor(enum, desc_name, file_desc, None, scope)
- for enum in desc_proto.enum_type]
- fields = [self._MakeFieldDescriptor(field, desc_name, index)
- for index, field in enumerate(desc_proto.field)]
- extensions = [
- self._MakeFieldDescriptor(extension, desc_name, index,
- is_extension=True)
- for index, extension in enumerate(desc_proto.extension)]
- oneofs = [
- descriptor.OneofDescriptor(desc.name, '.'.join((desc_name, desc.name)),
- index, None, [])
- for index, desc in enumerate(desc_proto.oneof_decl)]
- extension_ranges = [(r.start, r.end) for r in desc_proto.extension_range]
- if extension_ranges:
- is_extendable = True
- else:
- is_extendable = False
- desc = descriptor.Descriptor(
- name=desc_proto.name,
- full_name=desc_name,
- filename=file_name,
- containing_type=None,
- fields=fields,
- oneofs=oneofs,
- nested_types=nested,
- enum_types=enums,
- extensions=extensions,
- options=desc_proto.options,
- is_extendable=is_extendable,
- extension_ranges=extension_ranges,
- file=file_desc,
- serialized_start=None,
- serialized_end=None,
- syntax=syntax)
- for nested in desc.nested_types:
- nested.containing_type = desc
- for enum in desc.enum_types:
- enum.containing_type = desc
- for field_index, field_desc in enumerate(desc_proto.field):
- if field_desc.HasField('oneof_index'):
- oneof_index = field_desc.oneof_index
- oneofs[oneof_index].fields.append(fields[field_index])
- fields[field_index].containing_oneof = oneofs[oneof_index]
-
- scope[_PrefixWithDot(desc_name)] = desc
- self._descriptors[desc_name] = desc
- return desc
-
- def _ConvertEnumDescriptor(self, enum_proto, package=None, file_desc=None,
- containing_type=None, scope=None):
- """Make a protobuf EnumDescriptor given an EnumDescriptorProto protobuf.
-
- Args:
- enum_proto: The descriptor_pb2.EnumDescriptorProto protobuf message.
- package: Optional package name for the new message EnumDescriptor.
- file_desc: The file containing the enum descriptor.
- containing_type: The type containing this enum.
- scope: Scope containing available types.
-
- Returns:
- The added descriptor
- """
-
- if package:
- enum_name = '.'.join((package, enum_proto.name))
- else:
- enum_name = enum_proto.name
-
- if file_desc is None:
- file_name = None
- else:
- file_name = file_desc.name
-
- values = [self._MakeEnumValueDescriptor(value, index)
- for index, value in enumerate(enum_proto.value)]
- desc = descriptor.EnumDescriptor(name=enum_proto.name,
- full_name=enum_name,
- filename=file_name,
- file=file_desc,
- values=values,
- containing_type=containing_type,
- options=enum_proto.options)
- scope['.%s' % enum_name] = desc
- self._enum_descriptors[enum_name] = desc
- return desc
-
- def _MakeFieldDescriptor(self, field_proto, message_name, index,
- is_extension=False):
- """Creates a field descriptor from a FieldDescriptorProto.
-
- For message and enum type fields, this method will do a look up
- in the pool for the appropriate descriptor for that type. If it
- is unavailable, it will fall back to the _source function to
- create it. If this type is still unavailable, construction will
- fail.
-
- Args:
- field_proto: The proto describing the field.
- message_name: The name of the containing message.
- index: Index of the field
- is_extension: Indication that this field is for an extension.
-
- Returns:
- An initialized FieldDescriptor object
- """
-
- if message_name:
- full_name = '.'.join((message_name, field_proto.name))
- else:
- full_name = field_proto.name
-
- return descriptor.FieldDescriptor(
- name=field_proto.name,
- full_name=full_name,
- index=index,
- number=field_proto.number,
- type=field_proto.type,
- cpp_type=None,
- message_type=None,
- enum_type=None,
- containing_type=None,
- label=field_proto.label,
- has_default_value=False,
- default_value=None,
- is_extension=is_extension,
- extension_scope=None,
- options=field_proto.options)
-
- def _SetAllFieldTypes(self, package, desc_proto, scope):
- """Sets all the descriptor's fields's types.
-
- This method also sets the containing types on any extensions.
-
- Args:
- package: The current package of desc_proto.
- desc_proto: The message descriptor to update.
- scope: Enclosing scope of available types.
- """
-
- package = _PrefixWithDot(package)
-
- main_desc = self._GetTypeFromScope(package, desc_proto.name, scope)
-
- if package == '.':
- nested_package = _PrefixWithDot(desc_proto.name)
- else:
- nested_package = '.'.join([package, desc_proto.name])
-
- for field_proto, field_desc in zip(desc_proto.field, main_desc.fields):
- self._SetFieldType(field_proto, field_desc, nested_package, scope)
-
- for extension_proto, extension_desc in (
- zip(desc_proto.extension, main_desc.extensions)):
- extension_desc.containing_type = self._GetTypeFromScope(
- nested_package, extension_proto.extendee, scope)
- self._SetFieldType(extension_proto, extension_desc, nested_package, scope)
-
- for nested_type in desc_proto.nested_type:
- self._SetAllFieldTypes(nested_package, nested_type, scope)
-
- def _SetFieldType(self, field_proto, field_desc, package, scope):
- """Sets the field's type, cpp_type, message_type and enum_type.
-
- Args:
- field_proto: Data about the field in proto format.
- field_desc: The descriptor to modiy.
- package: The package the field's container is in.
- scope: Enclosing scope of available types.
- """
- if field_proto.type_name:
- desc = self._GetTypeFromScope(package, field_proto.type_name, scope)
- else:
- desc = None
-
- if not field_proto.HasField('type'):
- if isinstance(desc, descriptor.Descriptor):
- field_proto.type = descriptor.FieldDescriptor.TYPE_MESSAGE
- else:
- field_proto.type = descriptor.FieldDescriptor.TYPE_ENUM
-
- field_desc.cpp_type = descriptor.FieldDescriptor.ProtoTypeToCppProtoType(
- field_proto.type)
-
- if (field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE
- or field_proto.type == descriptor.FieldDescriptor.TYPE_GROUP):
- field_desc.message_type = desc
-
- if field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM:
- field_desc.enum_type = desc
-
- if field_proto.label == descriptor.FieldDescriptor.LABEL_REPEATED:
- field_desc.has_default_value = False
- field_desc.default_value = []
- elif field_proto.HasField('default_value'):
- field_desc.has_default_value = True
- if (field_proto.type == descriptor.FieldDescriptor.TYPE_DOUBLE or
- field_proto.type == descriptor.FieldDescriptor.TYPE_FLOAT):
- field_desc.default_value = float(field_proto.default_value)
- elif field_proto.type == descriptor.FieldDescriptor.TYPE_STRING:
- field_desc.default_value = field_proto.default_value
- elif field_proto.type == descriptor.FieldDescriptor.TYPE_BOOL:
- field_desc.default_value = field_proto.default_value.lower() == 'true'
- elif field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM:
- field_desc.default_value = field_desc.enum_type.values_by_name[
- field_proto.default_value].number
- elif field_proto.type == descriptor.FieldDescriptor.TYPE_BYTES:
- field_desc.default_value = text_encoding.CUnescape(
- field_proto.default_value)
- else:
- # All other types are of the "int" type.
- field_desc.default_value = int(field_proto.default_value)
- else:
- field_desc.has_default_value = False
- if (field_proto.type == descriptor.FieldDescriptor.TYPE_DOUBLE or
- field_proto.type == descriptor.FieldDescriptor.TYPE_FLOAT):
- field_desc.default_value = 0.0
- elif field_proto.type == descriptor.FieldDescriptor.TYPE_STRING:
- field_desc.default_value = u''
- elif field_proto.type == descriptor.FieldDescriptor.TYPE_BOOL:
- field_desc.default_value = False
- elif field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM:
- field_desc.default_value = field_desc.enum_type.values[0].number
- elif field_proto.type == descriptor.FieldDescriptor.TYPE_BYTES:
- field_desc.default_value = b''
- else:
- # All other types are of the "int" type.
- field_desc.default_value = 0
-
- field_desc.type = field_proto.type
-
- def _MakeEnumValueDescriptor(self, value_proto, index):
- """Creates a enum value descriptor object from a enum value proto.
-
- Args:
- value_proto: The proto describing the enum value.
- index: The index of the enum value.
-
- Returns:
- An initialized EnumValueDescriptor object.
- """
-
- return descriptor.EnumValueDescriptor(
- name=value_proto.name,
- index=index,
- number=value_proto.number,
- options=value_proto.options,
- type=None)
-
- def _ExtractSymbols(self, descriptors):
- """Pulls out all the symbols from descriptor protos.
-
- Args:
- descriptors: The messages to extract descriptors from.
- Yields:
- A two element tuple of the type name and descriptor object.
- """
-
- for desc in descriptors:
- yield (_PrefixWithDot(desc.full_name), desc)
- for symbol in self._ExtractSymbols(desc.nested_types):
- yield symbol
- for enum in desc.enum_types:
- yield (_PrefixWithDot(enum.full_name), enum)
-
- def _GetDeps(self, dependencies):
- """Recursively finds dependencies for file protos.
-
- Args:
- dependencies: The names of the files being depended on.
-
- Yields:
- Each direct and indirect dependency.
- """
-
- for dependency in dependencies:
- dep_desc = self.FindFileByName(dependency)
- yield dep_desc
- for parent_dep in dep_desc.dependencies:
- yield parent_dep
-
- def _GetTypeFromScope(self, package, type_name, scope):
- """Finds a given type name in the current scope.
-
- Args:
- package: The package the proto should be located in.
- type_name: The name of the type to be found in the scope.
- scope: Dict mapping short and full symbols to message and enum types.
-
- Returns:
- The descriptor for the requested type.
- """
- if type_name not in scope:
- components = _PrefixWithDot(package).split('.')
- while components:
- possible_match = '.'.join(components + [type_name])
- if possible_match in scope:
- type_name = possible_match
- break
- else:
- components.pop(-1)
- return scope[type_name]
-
-
-def _PrefixWithDot(name):
- return name if name.startswith('.') else '.%s' % name
-
-
-if _USE_C_DESCRIPTORS:
- # TODO(amauryfa): This pool could be constructed from Python code, when we
- # support a flag like 'use_cpp_generated_pool=True'.
- # pylint: disable=protected-access
- _DEFAULT = descriptor._message.default_pool
-else:
- _DEFAULT = DescriptorPool()
-
-
-def Default():
- return _DEFAULT
diff --git a/third_party/protobuf/python/google/protobuf/internal/__init__.py b/third_party/protobuf/python/google/protobuf/internal/__init__.py
deleted file mode 100755
index e69de29bb2..0000000000
--- a/third_party/protobuf/python/google/protobuf/internal/__init__.py
+++ /dev/null
diff --git a/third_party/protobuf/python/google/protobuf/internal/_parameterized.py b/third_party/protobuf/python/google/protobuf/internal/_parameterized.py
deleted file mode 100755
index dea3f19975..0000000000
--- a/third_party/protobuf/python/google/protobuf/internal/_parameterized.py
+++ /dev/null
@@ -1,443 +0,0 @@
-#! /usr/bin/env python
-#
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc. All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-# * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""Adds support for parameterized tests to Python's unittest TestCase class.
-
-A parameterized test is a method in a test case that is invoked with different
-argument tuples.
-
-A simple example:
-
- class AdditionExample(parameterized.ParameterizedTestCase):
- @parameterized.Parameters(
- (1, 2, 3),
- (4, 5, 9),
- (1, 1, 3))
- def testAddition(self, op1, op2, result):
- self.assertEqual(result, op1 + op2)
-
-
-Each invocation is a separate test case and properly isolated just
-like a normal test method, with its own setUp/tearDown cycle. In the
-example above, there are three separate testcases, one of which will
-fail due to an assertion error (1 + 1 != 3).
-
-Parameters for invididual test cases can be tuples (with positional parameters)
-or dictionaries (with named parameters):
-
- class AdditionExample(parameterized.ParameterizedTestCase):
- @parameterized.Parameters(
- {'op1': 1, 'op2': 2, 'result': 3},
- {'op1': 4, 'op2': 5, 'result': 9},
- )
- def testAddition(self, op1, op2, result):
- self.assertEqual(result, op1 + op2)
-
-If a parameterized test fails, the error message will show the
-original test name (which is modified internally) and the arguments
-for the specific invocation, which are part of the string returned by
-the shortDescription() method on test cases.
-
-The id method of the test, used internally by the unittest framework,
-is also modified to show the arguments. To make sure that test names
-stay the same across several invocations, object representations like
-
- >>> class Foo(object):
- ... pass
- >>> repr(Foo())
- '<__main__.Foo object at 0x23d8610>'
-
-are turned into '<__main__.Foo>'. For even more descriptive names,
-especially in test logs, you can use the NamedParameters decorator. In
-this case, only tuples are supported, and the first parameters has to
-be a string (or an object that returns an apt name when converted via
-str()):
-
- class NamedExample(parameterized.ParameterizedTestCase):
- @parameterized.NamedParameters(
- ('Normal', 'aa', 'aaa', True),
- ('EmptyPrefix', '', 'abc', True),
- ('BothEmpty', '', '', True))
- def testStartsWith(self, prefix, string, result):
- self.assertEqual(result, strings.startswith(prefix))
-
-Named tests also have the benefit that they can be run individually
-from the command line:
-
- $ testmodule.py NamedExample.testStartsWithNormal
- .
- --------------------------------------------------------------------
- Ran 1 test in 0.000s
-
- OK
-
-Parameterized Classes
-=====================
-If invocation arguments are shared across test methods in a single
-ParameterizedTestCase class, instead of decorating all test methods
-individually, the class itself can be decorated:
-
- @parameterized.Parameters(
- (1, 2, 3)
- (4, 5, 9))
- class ArithmeticTest(parameterized.ParameterizedTestCase):
- def testAdd(self, arg1, arg2, result):
- self.assertEqual(arg1 + arg2, result)
-
- def testSubtract(self, arg2, arg2, result):
- self.assertEqual(result - arg1, arg2)
-
-Inputs from Iterables
-=====================
-If parameters should be shared across several test cases, or are dynamically
-created from other sources, a single non-tuple iterable can be passed into
-the decorator. This iterable will be used to obtain the test cases:
-
- class AdditionExample(parameterized.ParameterizedTestCase):
- @parameterized.Parameters(
- c.op1, c.op2, c.result for c in testcases
- )
- def testAddition(self, op1, op2, result):
- self.assertEqual(result, op1 + op2)
-
-
-Single-Argument Test Methods
-============================
-If a test method takes only one argument, the single argument does not need to
-be wrapped into a tuple:
-
- class NegativeNumberExample(parameterized.ParameterizedTestCase):
- @parameterized.Parameters(
- -1, -3, -4, -5
- )
- def testIsNegative(self, arg):
- self.assertTrue(IsNegative(arg))
-"""
-
-__author__ = 'tmarek@google.com (Torsten Marek)'
-
-import collections
-import functools
-import re
-import types
-try:
- import unittest2 as unittest
-except ImportError:
- import unittest
-import uuid
-
-import six
-
-ADDR_RE = re.compile(r'\<([a-zA-Z0-9_\-\.]+) object at 0x[a-fA-F0-9]+\>')
-_SEPARATOR = uuid.uuid1().hex
-_FIRST_ARG = object()
-_ARGUMENT_REPR = object()
-
-
-def _CleanRepr(obj):
- return ADDR_RE.sub(r'<\1>', repr(obj))
-
-
-# Helper function formerly from the unittest module, removed from it in
-# Python 2.7.
-def _StrClass(cls):
- return '%s.%s' % (cls.__module__, cls.__name__)
-
-
-def _NonStringIterable(obj):
- return (isinstance(obj, collections.Iterable) and not
- isinstance(obj, six.string_types))
-
-
-def _FormatParameterList(testcase_params):
- if isinstance(testcase_params, collections.Mapping):
- return ', '.join('%s=%s' % (argname, _CleanRepr(value))
- for argname, value in testcase_params.items())
- elif _NonStringIterable(testcase_params):
- return ', '.join(map(_CleanRepr, testcase_params))
- else:
- return _FormatParameterList((testcase_params,))
-
-
-class _ParameterizedTestIter(object):
- """Callable and iterable class for producing new test cases."""
-
- def __init__(self, test_method, testcases, naming_type):
- """Returns concrete test functions for a test and a list of parameters.
-
- The naming_type is used to determine the name of the concrete
- functions as reported by the unittest framework. If naming_type is
- _FIRST_ARG, the testcases must be tuples, and the first element must
- have a string representation that is a valid Python identifier.
-
- Args:
- test_method: The decorated test method.
- testcases: (list of tuple/dict) A list of parameter
- tuples/dicts for individual test invocations.
- naming_type: The test naming type, either _NAMED or _ARGUMENT_REPR.
- """
- self._test_method = test_method
- self.testcases = testcases
- self._naming_type = naming_type
-
- def __call__(self, *args, **kwargs):
- raise RuntimeError('You appear to be running a parameterized test case '
- 'without having inherited from parameterized.'
- 'ParameterizedTestCase. This is bad because none of '
- 'your test cases are actually being run.')
-
- def __iter__(self):
- test_method = self._test_method
- naming_type = self._naming_type
-
- def MakeBoundParamTest(testcase_params):
- @functools.wraps(test_method)
- def BoundParamTest(self):
- if isinstance(testcase_params, collections.Mapping):
- test_method(self, **testcase_params)
- elif _NonStringIterable(testcase_params):
- test_method(self, *testcase_params)
- else:
- test_method(self, testcase_params)
-
- if naming_type is _FIRST_ARG:
- # Signal the metaclass that the name of the test function is unique
- # and descriptive.
- BoundParamTest.__x_use_name__ = True
- BoundParamTest.__name__ += str(testcase_params[0])
- testcase_params = testcase_params[1:]
- elif naming_type is _ARGUMENT_REPR:
- # __x_extra_id__ is used to pass naming information to the __new__
- # method of TestGeneratorMetaclass.
- # The metaclass will make sure to create a unique, but nondescriptive
- # name for this test.
- BoundParamTest.__x_extra_id__ = '(%s)' % (
- _FormatParameterList(testcase_params),)
- else:
- raise RuntimeError('%s is not a valid naming type.' % (naming_type,))
-
- BoundParamTest.__doc__ = '%s(%s)' % (
- BoundParamTest.__name__, _FormatParameterList(testcase_params))
- if test_method.__doc__:
- BoundParamTest.__doc__ += '\n%s' % (test_method.__doc__,)
- return BoundParamTest
- return (MakeBoundParamTest(c) for c in self.testcases)
-
-
-def _IsSingletonList(testcases):
- """True iff testcases contains only a single non-tuple element."""
- return len(testcases) == 1 and not isinstance(testcases[0], tuple)
-
-
-def _ModifyClass(class_object, testcases, naming_type):
- assert not getattr(class_object, '_id_suffix', None), (
- 'Cannot add parameters to %s,'
- ' which already has parameterized methods.' % (class_object,))
- class_object._id_suffix = id_suffix = {}
- # We change the size of __dict__ while we iterate over it,
- # which Python 3.x will complain about, so use copy().
- for name, obj in class_object.__dict__.copy().items():
- if (name.startswith(unittest.TestLoader.testMethodPrefix)
- and isinstance(obj, types.FunctionType)):
- delattr(class_object, name)
- methods = {}
- _UpdateClassDictForParamTestCase(
- methods, id_suffix, name,
- _ParameterizedTestIter(obj, testcases, naming_type))
- for name, meth in methods.items():
- setattr(class_object, name, meth)
-
-
-def _ParameterDecorator(naming_type, testcases):
- """Implementation of the parameterization decorators.
-
- Args:
- naming_type: The naming type.
- testcases: Testcase parameters.
-
- Returns:
- A function for modifying the decorated object.
- """
- def _Apply(obj):
- if isinstance(obj, type):
- _ModifyClass(
- obj,
- list(testcases) if not isinstance(testcases, collections.Sequence)
- else testcases,
- naming_type)
- return obj
- else:
- return _ParameterizedTestIter(obj, testcases, naming_type)
-
- if _IsSingletonList(testcases):
- assert _NonStringIterable(testcases[0]), (
- 'Single parameter argument must be a non-string iterable')
- testcases = testcases[0]
-
- return _Apply
-
-
-def Parameters(*testcases):
- """A decorator for creating parameterized tests.
-
- See the module docstring for a usage example.
- Args:
- *testcases: Parameters for the decorated method, either a single
- iterable, or a list of tuples/dicts/objects (for tests
- with only one argument).
-
- Returns:
- A test generator to be handled by TestGeneratorMetaclass.
- """
- return _ParameterDecorator(_ARGUMENT_REPR, testcases)
-
-
-def NamedParameters(*testcases):
- """A decorator for creating parameterized tests.
-
- See the module docstring for a usage example. The first element of
- each parameter tuple should be a string and will be appended to the
- name of the test method.
-
- Args:
- *testcases: Parameters for the decorated method, either a single
- iterable, or a list of tuples.
-
- Returns:
- A test generator to be handled by TestGeneratorMetaclass.
- """
- return _ParameterDecorator(_FIRST_ARG, testcases)
-
-
-class TestGeneratorMetaclass(type):
- """Metaclass for test cases with test generators.
-
- A test generator is an iterable in a testcase that produces callables. These
- callables must be single-argument methods. These methods are injected into
- the class namespace and the original iterable is removed. If the name of the
- iterable conforms to the test pattern, the injected methods will be picked
- up as tests by the unittest framework.
-
- In general, it is supposed to be used in conjuction with the
- Parameters decorator.
- """
-
- def __new__(mcs, class_name, bases, dct):
- dct['_id_suffix'] = id_suffix = {}
- for name, obj in dct.items():
- if (name.startswith(unittest.TestLoader.testMethodPrefix) and
- _NonStringIterable(obj)):
- iterator = iter(obj)
- dct.pop(name)
- _UpdateClassDictForParamTestCase(dct, id_suffix, name, iterator)
-
- return type.__new__(mcs, class_name, bases, dct)
-
-
-def _UpdateClassDictForParamTestCase(dct, id_suffix, name, iterator):
- """Adds individual test cases to a dictionary.
-
- Args:
- dct: The target dictionary.
- id_suffix: The dictionary for mapping names to test IDs.
- name: The original name of the test case.
- iterator: The iterator generating the individual test cases.
- """
- for idx, func in enumerate(iterator):
- assert callable(func), 'Test generators must yield callables, got %r' % (
- func,)
- if getattr(func, '__x_use_name__', False):
- new_name = func.__name__
- else:
- new_name = '%s%s%d' % (name, _SEPARATOR, idx)
- assert new_name not in dct, (
- 'Name of parameterized test case "%s" not unique' % (new_name,))
- dct[new_name] = func
- id_suffix[new_name] = getattr(func, '__x_extra_id__', '')
-
-
-class ParameterizedTestCase(unittest.TestCase):
- """Base class for test cases using the Parameters decorator."""
- __metaclass__ = TestGeneratorMetaclass
-
- def _OriginalName(self):
- return self._testMethodName.split(_SEPARATOR)[0]
-
- def __str__(self):
- return '%s (%s)' % (self._OriginalName(), _StrClass(self.__class__))
-
- def id(self): # pylint: disable=invalid-name
- """Returns the descriptive ID of the test.
-
- This is used internally by the unittesting framework to get a name
- for the test to be used in reports.
-
- Returns:
- The test id.
- """
- return '%s.%s%s' % (_StrClass(self.__class__),
- self._OriginalName(),
- self._id_suffix.get(self._testMethodName, ''))
-
-
-def CoopParameterizedTestCase(other_base_class):
- """Returns a new base class with a cooperative metaclass base.
-
- This enables the ParameterizedTestCase to be used in combination
- with other base classes that have custom metaclasses, such as
- mox.MoxTestBase.
-
- Only works with metaclasses that do not override type.__new__.
-
- Example:
-
- import google3
- import mox
-
- from google3.testing.pybase import parameterized
-
- class ExampleTest(parameterized.CoopParameterizedTestCase(mox.MoxTestBase)):
- ...
-
- Args:
- other_base_class: (class) A test case base class.
-
- Returns:
- A new class object.
- """
- metaclass = type(
- 'CoopMetaclass',
- (other_base_class.__metaclass__,
- TestGeneratorMetaclass), {})
- return metaclass(
- 'CoopParameterizedTestCase',
- (other_base_class, ParameterizedTestCase), {})
diff --git a/third_party/protobuf/python/google/protobuf/internal/any_test.proto b/third_party/protobuf/python/google/protobuf/internal/any_test.proto
deleted file mode 100644
index cd641ca0b8..0000000000
--- a/third_party/protobuf/python/google/protobuf/internal/any_test.proto
+++ /dev/null
@@ -1,42 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Author: jieluo@google.com (Jie Luo)
-
-syntax = "proto3";
-
-package google.protobuf.internal;
-
-import "google/protobuf/any.proto";
-
-message TestAny {
- google.protobuf.Any value = 1;
- int32 int_value = 2;
-}
diff --git a/third_party/protobuf/python/google/protobuf/internal/api_implementation.cc b/third_party/protobuf/python/google/protobuf/internal/api_implementation.cc
deleted file mode 100644
index 6db12e8dc6..0000000000
--- a/third_party/protobuf/python/google/protobuf/internal/api_implementation.cc
+++ /dev/null
@@ -1,129 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-#include <Python.h>
-
-namespace google {
-namespace protobuf {
-namespace python {
-
-// Version constant.
-// This is either 0 for python, 1 for CPP V1, 2 for CPP V2.
-//
-// 0 is default and is equivalent to
-// PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python
-//
-// 1 is set with -DPYTHON_PROTO2_CPP_IMPL_V1 and is equivalent to
-// PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=cpp
-// and
-// PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION_VERSION=1
-//
-// 2 is set with -DPYTHON_PROTO2_CPP_IMPL_V2 and is equivalent to
-// PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=cpp
-// and
-// PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION_VERSION=2
-#ifdef PYTHON_PROTO2_CPP_IMPL_V1
-#error "PYTHON_PROTO2_CPP_IMPL_V1 is no longer supported."
-#else
-#ifdef PYTHON_PROTO2_CPP_IMPL_V2
-static int kImplVersion = 2;
-#else
-#ifdef PYTHON_PROTO2_PYTHON_IMPL
-static int kImplVersion = 0;
-#else
-
-static int kImplVersion = -1; // -1 means "Unspecified by compiler flags".
-
-#endif // PYTHON_PROTO2_PYTHON_IMPL
-#endif // PYTHON_PROTO2_CPP_IMPL_V2
-#endif // PYTHON_PROTO2_CPP_IMPL_V1
-
-static const char* kImplVersionName = "api_version";
-
-static const char* kModuleName = "_api_implementation";
-static const char kModuleDocstring[] =
-"_api_implementation is a module that exposes compile-time constants that\n"
-"determine the default API implementation to use for Python proto2.\n"
-"\n"
-"It complements api_implementation.py by setting defaults using compile-time\n"
-"constants defined in C, such that one can set defaults at compilation\n"
-"(e.g. with blaze flag --copt=-DPYTHON_PROTO2_CPP_IMPL_V2).";
-
-#if PY_MAJOR_VERSION >= 3
-static struct PyModuleDef _module = {
- PyModuleDef_HEAD_INIT,
- kModuleName,
- kModuleDocstring,
- -1,
- NULL,
- NULL,
- NULL,
- NULL,
- NULL
-};
-#define INITFUNC PyInit__api_implementation
-#define INITFUNC_ERRORVAL NULL
-#else
-#define INITFUNC init_api_implementation
-#define INITFUNC_ERRORVAL
-#endif
-
-extern "C" {
- PyMODINIT_FUNC INITFUNC() {
-#if PY_MAJOR_VERSION >= 3
- PyObject *module = PyModule_Create(&_module);
-#else
- PyObject *module = Py_InitModule3(
- const_cast<char*>(kModuleName),
- NULL,
- const_cast<char*>(kModuleDocstring));
-#endif
- if (module == NULL) {
- return INITFUNC_ERRORVAL;
- }
-
- // Adds the module variable "api_version".
- if (PyModule_AddIntConstant(
- module,
- const_cast<char*>(kImplVersionName),
- kImplVersion))
-#if PY_MAJOR_VERSION < 3
- return;
-#else
- { Py_DECREF(module); return NULL; }
-
- return module;
-#endif
- }
-}
-
-} // namespace python
-} // namespace protobuf
-} // namespace google
diff --git a/third_party/protobuf/python/google/protobuf/internal/api_implementation.py b/third_party/protobuf/python/google/protobuf/internal/api_implementation.py
deleted file mode 100755
index ffcf751167..0000000000
--- a/third_party/protobuf/python/google/protobuf/internal/api_implementation.py
+++ /dev/null
@@ -1,107 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc. All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-# * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""Determine which implementation of the protobuf API is used in this process.
-"""
-
-import os
-import sys
-
-try:
- # pylint: disable=g-import-not-at-top
- from google.protobuf.internal import _api_implementation
- # The compile-time constants in the _api_implementation module can be used to
- # switch to a certain implementation of the Python API at build time.
- _api_version = _api_implementation.api_version
- _proto_extension_modules_exist_in_build = True
-except ImportError:
- _api_version = -1 # Unspecified by compiler flags.
- _proto_extension_modules_exist_in_build = False
-
-if _api_version == 1:
- raise ValueError('api_version=1 is no longer supported.')
-if _api_version < 0: # Still unspecified?
- try:
- # The presence of this module in a build allows the proto implementation to
- # be upgraded merely via build deps rather than a compiler flag or the
- # runtime environment variable.
- # pylint: disable=g-import-not-at-top
- from google.protobuf import _use_fast_cpp_protos
- # Work around a known issue in the classic bootstrap .par import hook.
- if not _use_fast_cpp_protos:
- raise ImportError('_use_fast_cpp_protos import succeeded but was None')
- del _use_fast_cpp_protos
- _api_version = 2
- except ImportError:
- if _proto_extension_modules_exist_in_build:
- if sys.version_info[0] >= 3: # Python 3 defaults to C++ impl v2.
- _api_version = 2
- # TODO(b/17427486): Make Python 2 default to C++ impl v2.
-
-_default_implementation_type = (
- 'python' if _api_version <= 0 else 'cpp')
-
-# This environment variable can be used to switch to a certain implementation
-# of the Python API, overriding the compile-time constants in the
-# _api_implementation module. Right now only 'python' and 'cpp' are valid
-# values. Any other value will be ignored.
-_implementation_type = os.getenv('PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION',
- _default_implementation_type)
-
-if _implementation_type != 'python':
- _implementation_type = 'cpp'
-
-# This environment variable can be used to switch between the two
-# 'cpp' implementations, overriding the compile-time constants in the
-# _api_implementation module. Right now only '2' is supported. Any other
-# value will cause an error to be raised.
-_implementation_version_str = os.getenv(
- 'PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION_VERSION', '2')
-
-if _implementation_version_str != '2':
- raise ValueError(
- 'unsupported PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION_VERSION: "' +
- _implementation_version_str + '" (supported versions: 2)'
- )
-
-_implementation_version = int(_implementation_version_str)
-
-
-# Usage of this function is discouraged. Clients shouldn't care which
-# implementation of the API is in use. Note that there is no guarantee
-# that differences between APIs will be maintained.
-# Please don't use this function if possible.
-def Type():
- return _implementation_type
-
-
-# See comment on 'Type' above.
-def Version():
- return _implementation_version
diff --git a/third_party/protobuf/python/google/protobuf/internal/containers.py b/third_party/protobuf/python/google/protobuf/internal/containers.py
deleted file mode 100755
index 97cdd848e3..0000000000
--- a/third_party/protobuf/python/google/protobuf/internal/containers.py
+++ /dev/null
@@ -1,611 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc. All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-# * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""Contains container classes to represent different protocol buffer types.
-
-This file defines container classes which represent categories of protocol
-buffer field types which need extra maintenance. Currently these categories
-are:
- - Repeated scalar fields - These are all repeated fields which aren't
- composite (e.g. they are of simple types like int32, string, etc).
- - Repeated composite fields - Repeated fields which are composite. This
- includes groups and nested messages.
-"""
-
-__author__ = 'petar@google.com (Petar Petrov)'
-
-import collections
-import sys
-
-if sys.version_info[0] < 3:
- # We would use collections.MutableMapping all the time, but in Python 2 it
- # doesn't define __slots__. This causes two significant problems:
- #
- # 1. we can't disallow arbitrary attribute assignment, even if our derived
- # classes *do* define __slots__.
- #
- # 2. we can't safely derive a C type from it without __slots__ defined (the
- # interpreter expects to find a dict at tp_dictoffset, which we can't
- # robustly provide. And we don't want an instance dict anyway.
- #
- # So this is the Python 2.7 definition of Mapping/MutableMapping functions
- # verbatim, except that:
- # 1. We declare __slots__.
- # 2. We don't declare this as a virtual base class. The classes defined
- # in collections are the interesting base classes, not us.
- #
- # Note: deriving from object is critical. It is the only thing that makes
- # this a true type, allowing us to derive from it in C++ cleanly and making
- # __slots__ properly disallow arbitrary element assignment.
-
- class Mapping(object):
- __slots__ = ()
-
- def get(self, key, default=None):
- try:
- return self[key]
- except KeyError:
- return default
-
- def __contains__(self, key):
- try:
- self[key]
- except KeyError:
- return False
- else:
- return True
-
- def iterkeys(self):
- return iter(self)
-
- def itervalues(self):
- for key in self:
- yield self[key]
-
- def iteritems(self):
- for key in self:
- yield (key, self[key])
-
- def keys(self):
- return list(self)
-
- def items(self):
- return [(key, self[key]) for key in self]
-
- def values(self):
- return [self[key] for key in self]
-
- # Mappings are not hashable by default, but subclasses can change this
- __hash__ = None
-
- def __eq__(self, other):
- if not isinstance(other, collections.Mapping):
- return NotImplemented
- return dict(self.items()) == dict(other.items())
-
- def __ne__(self, other):
- return not (self == other)
-
- class MutableMapping(Mapping):
- __slots__ = ()
-
- __marker = object()
-
- def pop(self, key, default=__marker):
- try:
- value = self[key]
- except KeyError:
- if default is self.__marker:
- raise
- return default
- else:
- del self[key]
- return value
-
- def popitem(self):
- try:
- key = next(iter(self))
- except StopIteration:
- raise KeyError
- value = self[key]
- del self[key]
- return key, value
-
- def clear(self):
- try:
- while True:
- self.popitem()
- except KeyError:
- pass
-
- def update(*args, **kwds):
- if len(args) > 2:
- raise TypeError("update() takes at most 2 positional "
- "arguments ({} given)".format(len(args)))
- elif not args:
- raise TypeError("update() takes at least 1 argument (0 given)")
- self = args[0]
- other = args[1] if len(args) >= 2 else ()
-
- if isinstance(other, Mapping):
- for key in other:
- self[key] = other[key]
- elif hasattr(other, "keys"):
- for key in other.keys():
- self[key] = other[key]
- else:
- for key, value in other:
- self[key] = value
- for key, value in kwds.items():
- self[key] = value
-
- def setdefault(self, key, default=None):
- try:
- return self[key]
- except KeyError:
- self[key] = default
- return default
-
- collections.Mapping.register(Mapping)
- collections.MutableMapping.register(MutableMapping)
-
-else:
- # In Python 3 we can just use MutableMapping directly, because it defines
- # __slots__.
- MutableMapping = collections.MutableMapping
-
-
-class BaseContainer(object):
-
- """Base container class."""
-
- # Minimizes memory usage and disallows assignment to other attributes.
- __slots__ = ['_message_listener', '_values']
-
- def __init__(self, message_listener):
- """
- Args:
- message_listener: A MessageListener implementation.
- The RepeatedScalarFieldContainer will call this object's
- Modified() method when it is modified.
- """
- self._message_listener = message_listener
- self._values = []
-
- def __getitem__(self, key):
- """Retrieves item by the specified key."""
- return self._values[key]
-
- def __len__(self):
- """Returns the number of elements in the container."""
- return len(self._values)
-
- def __ne__(self, other):
- """Checks if another instance isn't equal to this one."""
- # The concrete classes should define __eq__.
- return not self == other
-
- def __hash__(self):
- raise TypeError('unhashable object')
-
- def __repr__(self):
- return repr(self._values)
-
- def sort(self, *args, **kwargs):
- # Continue to support the old sort_function keyword argument.
- # This is expected to be a rare occurrence, so use LBYL to avoid
- # the overhead of actually catching KeyError.
- if 'sort_function' in kwargs:
- kwargs['cmp'] = kwargs.pop('sort_function')
- self._values.sort(*args, **kwargs)
-
-
-class RepeatedScalarFieldContainer(BaseContainer):
-
- """Simple, type-checked, list-like container for holding repeated scalars."""
-
- # Disallows assignment to other attributes.
- __slots__ = ['_type_checker']
-
- def __init__(self, message_listener, type_checker):
- """
- Args:
- message_listener: A MessageListener implementation.
- The RepeatedScalarFieldContainer will call this object's
- Modified() method when it is modified.
- type_checker: A type_checkers.ValueChecker instance to run on elements
- inserted into this container.
- """
- super(RepeatedScalarFieldContainer, self).__init__(message_listener)
- self._type_checker = type_checker
-
- def append(self, value):
- """Appends an item to the list. Similar to list.append()."""
- self._values.append(self._type_checker.CheckValue(value))
- if not self._message_listener.dirty:
- self._message_listener.Modified()
-
- def insert(self, key, value):
- """Inserts the item at the specified position. Similar to list.insert()."""
- self._values.insert(key, self._type_checker.CheckValue(value))
- if not self._message_listener.dirty:
- self._message_listener.Modified()
-
- def extend(self, elem_seq):
- """Extends by appending the given iterable. Similar to list.extend()."""
-
- if elem_seq is None:
- return
- try:
- elem_seq_iter = iter(elem_seq)
- except TypeError:
- if not elem_seq:
- # silently ignore falsy inputs :-/.
- # TODO(ptucker): Deprecate this behavior. b/18413862
- return
- raise
-
- new_values = [self._type_checker.CheckValue(elem) for elem in elem_seq_iter]
- if new_values:
- self._values.extend(new_values)
- self._message_listener.Modified()
-
- def MergeFrom(self, other):
- """Appends the contents of another repeated field of the same type to this
- one. We do not check the types of the individual fields.
- """
- self._values.extend(other._values)
- self._message_listener.Modified()
-
- def remove(self, elem):
- """Removes an item from the list. Similar to list.remove()."""
- self._values.remove(elem)
- self._message_listener.Modified()
-
- def pop(self, key=-1):
- """Removes and returns an item at a given index. Similar to list.pop()."""
- value = self._values[key]
- self.__delitem__(key)
- return value
-
- def __setitem__(self, key, value):
- """Sets the item on the specified position."""
- if isinstance(key, slice): # PY3
- if key.step is not None:
- raise ValueError('Extended slices not supported')
- self.__setslice__(key.start, key.stop, value)
- else:
- self._values[key] = self._type_checker.CheckValue(value)
- self._message_listener.Modified()
-
- def __getslice__(self, start, stop):
- """Retrieves the subset of items from between the specified indices."""
- return self._values[start:stop]
-
- def __setslice__(self, start, stop, values):
- """Sets the subset of items from between the specified indices."""
- new_values = []
- for value in values:
- new_values.append(self._type_checker.CheckValue(value))
- self._values[start:stop] = new_values
- self._message_listener.Modified()
-
- def __delitem__(self, key):
- """Deletes the item at the specified position."""
- del self._values[key]
- self._message_listener.Modified()
-
- def __delslice__(self, start, stop):
- """Deletes the subset of items from between the specified indices."""
- del self._values[start:stop]
- self._message_listener.Modified()
-
- def __eq__(self, other):
- """Compares the current instance with another one."""
- if self is other:
- return True
- # Special case for the same type which should be common and fast.
- if isinstance(other, self.__class__):
- return other._values == self._values
- # We are presumably comparing against some other sequence type.
- return other == self._values
-
-collections.MutableSequence.register(BaseContainer)
-
-
-class RepeatedCompositeFieldContainer(BaseContainer):
-
- """Simple, list-like container for holding repeated composite fields."""
-
- # Disallows assignment to other attributes.
- __slots__ = ['_message_descriptor']
-
- def __init__(self, message_listener, message_descriptor):
- """
- Note that we pass in a descriptor instead of the generated directly,
- since at the time we construct a _RepeatedCompositeFieldContainer we
- haven't yet necessarily initialized the type that will be contained in the
- container.
-
- Args:
- message_listener: A MessageListener implementation.
- The RepeatedCompositeFieldContainer will call this object's
- Modified() method when it is modified.
- message_descriptor: A Descriptor instance describing the protocol type
- that should be present in this container. We'll use the
- _concrete_class field of this descriptor when the client calls add().
- """
- super(RepeatedCompositeFieldContainer, self).__init__(message_listener)
- self._message_descriptor = message_descriptor
-
- def add(self, **kwargs):
- """Adds a new element at the end of the list and returns it. Keyword
- arguments may be used to initialize the element.
- """
- new_element = self._message_descriptor._concrete_class(**kwargs)
- new_element._SetListener(self._message_listener)
- self._values.append(new_element)
- if not self._message_listener.dirty:
- self._message_listener.Modified()
- return new_element
-
- def extend(self, elem_seq):
- """Extends by appending the given sequence of elements of the same type
- as this one, copying each individual message.
- """
- message_class = self._message_descriptor._concrete_class
- listener = self._message_listener
- values = self._values
- for message in elem_seq:
- new_element = message_class()
- new_element._SetListener(listener)
- new_element.MergeFrom(message)
- values.append(new_element)
- listener.Modified()
-
- def MergeFrom(self, other):
- """Appends the contents of another repeated field of the same type to this
- one, copying each individual message.
- """
- self.extend(other._values)
-
- def remove(self, elem):
- """Removes an item from the list. Similar to list.remove()."""
- self._values.remove(elem)
- self._message_listener.Modified()
-
- def pop(self, key=-1):
- """Removes and returns an item at a given index. Similar to list.pop()."""
- value = self._values[key]
- self.__delitem__(key)
- return value
-
- def __getslice__(self, start, stop):
- """Retrieves the subset of items from between the specified indices."""
- return self._values[start:stop]
-
- def __delitem__(self, key):
- """Deletes the item at the specified position."""
- del self._values[key]
- self._message_listener.Modified()
-
- def __delslice__(self, start, stop):
- """Deletes the subset of items from between the specified indices."""
- del self._values[start:stop]
- self._message_listener.Modified()
-
- def __eq__(self, other):
- """Compares the current instance with another one."""
- if self is other:
- return True
- if not isinstance(other, self.__class__):
- raise TypeError('Can only compare repeated composite fields against '
- 'other repeated composite fields.')
- return self._values == other._values
-
-
-class ScalarMap(MutableMapping):
-
- """Simple, type-checked, dict-like container for holding repeated scalars."""
-
- # Disallows assignment to other attributes.
- __slots__ = ['_key_checker', '_value_checker', '_values', '_message_listener']
-
- def __init__(self, message_listener, key_checker, value_checker):
- """
- Args:
- message_listener: A MessageListener implementation.
- The ScalarMap will call this object's Modified() method when it
- is modified.
- key_checker: A type_checkers.ValueChecker instance to run on keys
- inserted into this container.
- value_checker: A type_checkers.ValueChecker instance to run on values
- inserted into this container.
- """
- self._message_listener = message_listener
- self._key_checker = key_checker
- self._value_checker = value_checker
- self._values = {}
-
- def __getitem__(self, key):
- try:
- return self._values[key]
- except KeyError:
- key = self._key_checker.CheckValue(key)
- val = self._value_checker.DefaultValue()
- self._values[key] = val
- return val
-
- def __contains__(self, item):
- # We check the key's type to match the strong-typing flavor of the API.
- # Also this makes it easier to match the behavior of the C++ implementation.
- self._key_checker.CheckValue(item)
- return item in self._values
-
- # We need to override this explicitly, because our defaultdict-like behavior
- # will make the default implementation (from our base class) always insert
- # the key.
- def get(self, key, default=None):
- if key in self:
- return self[key]
- else:
- return default
-
- def __setitem__(self, key, value):
- checked_key = self._key_checker.CheckValue(key)
- checked_value = self._value_checker.CheckValue(value)
- self._values[checked_key] = checked_value
- self._message_listener.Modified()
-
- def __delitem__(self, key):
- del self._values[key]
- self._message_listener.Modified()
-
- def __len__(self):
- return len(self._values)
-
- def __iter__(self):
- return iter(self._values)
-
- def __repr__(self):
- return repr(self._values)
-
- def MergeFrom(self, other):
- self._values.update(other._values)
- self._message_listener.Modified()
-
- def InvalidateIterators(self):
- # It appears that the only way to reliably invalidate iterators to
- # self._values is to ensure that its size changes.
- original = self._values
- self._values = original.copy()
- original[None] = None
-
- # This is defined in the abstract base, but we can do it much more cheaply.
- def clear(self):
- self._values.clear()
- self._message_listener.Modified()
-
-
-class MessageMap(MutableMapping):
-
- """Simple, type-checked, dict-like container for with submessage values."""
-
- # Disallows assignment to other attributes.
- __slots__ = ['_key_checker', '_values', '_message_listener',
- '_message_descriptor']
-
- def __init__(self, message_listener, message_descriptor, key_checker):
- """
- Args:
- message_listener: A MessageListener implementation.
- The ScalarMap will call this object's Modified() method when it
- is modified.
- key_checker: A type_checkers.ValueChecker instance to run on keys
- inserted into this container.
- value_checker: A type_checkers.ValueChecker instance to run on values
- inserted into this container.
- """
- self._message_listener = message_listener
- self._message_descriptor = message_descriptor
- self._key_checker = key_checker
- self._values = {}
-
- def __getitem__(self, key):
- try:
- return self._values[key]
- except KeyError:
- key = self._key_checker.CheckValue(key)
- new_element = self._message_descriptor._concrete_class()
- new_element._SetListener(self._message_listener)
- self._values[key] = new_element
- self._message_listener.Modified()
-
- return new_element
-
- def get_or_create(self, key):
- """get_or_create() is an alias for getitem (ie. map[key]).
-
- Args:
- key: The key to get or create in the map.
-
- This is useful in cases where you want to be explicit that the call is
- mutating the map. This can avoid lint errors for statements like this
- that otherwise would appear to be pointless statements:
-
- msg.my_map[key]
- """
- return self[key]
-
- # We need to override this explicitly, because our defaultdict-like behavior
- # will make the default implementation (from our base class) always insert
- # the key.
- def get(self, key, default=None):
- if key in self:
- return self[key]
- else:
- return default
-
- def __contains__(self, item):
- return item in self._values
-
- def __setitem__(self, key, value):
- raise ValueError('May not set values directly, call my_map[key].foo = 5')
-
- def __delitem__(self, key):
- del self._values[key]
- self._message_listener.Modified()
-
- def __len__(self):
- return len(self._values)
-
- def __iter__(self):
- return iter(self._values)
-
- def __repr__(self):
- return repr(self._values)
-
- def MergeFrom(self, other):
- for key in other:
- self[key].MergeFrom(other[key])
- # self._message_listener.Modified() not required here, because
- # mutations to submessages already propagate.
-
- def InvalidateIterators(self):
- # It appears that the only way to reliably invalidate iterators to
- # self._values is to ensure that its size changes.
- original = self._values
- self._values = original.copy()
- original[None] = None
-
- # This is defined in the abstract base, but we can do it much more cheaply.
- def clear(self):
- self._values.clear()
- self._message_listener.Modified()
diff --git a/third_party/protobuf/python/google/protobuf/internal/decoder.py b/third_party/protobuf/python/google/protobuf/internal/decoder.py
deleted file mode 100755
index 31869e4575..0000000000
--- a/third_party/protobuf/python/google/protobuf/internal/decoder.py
+++ /dev/null
@@ -1,854 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc. All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-# * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""Code for decoding protocol buffer primitives.
-
-This code is very similar to encoder.py -- read the docs for that module first.
-
-A "decoder" is a function with the signature:
- Decode(buffer, pos, end, message, field_dict)
-The arguments are:
- buffer: The string containing the encoded message.
- pos: The current position in the string.
- end: The position in the string where the current message ends. May be
- less than len(buffer) if we're reading a sub-message.
- message: The message object into which we're parsing.
- field_dict: message._fields (avoids a hashtable lookup).
-The decoder reads the field and stores it into field_dict, returning the new
-buffer position. A decoder for a repeated field may proactively decode all of
-the elements of that field, if they appear consecutively.
-
-Note that decoders may throw any of the following:
- IndexError: Indicates a truncated message.
- struct.error: Unpacking of a fixed-width field failed.
- message.DecodeError: Other errors.
-
-Decoders are expected to raise an exception if they are called with pos > end.
-This allows callers to be lax about bounds checking: it's fineto read past
-"end" as long as you are sure that someone else will notice and throw an
-exception later on.
-
-Something up the call stack is expected to catch IndexError and struct.error
-and convert them to message.DecodeError.
-
-Decoders are constructed using decoder constructors with the signature:
- MakeDecoder(field_number, is_repeated, is_packed, key, new_default)
-The arguments are:
- field_number: The field number of the field we want to decode.
- is_repeated: Is the field a repeated field? (bool)
- is_packed: Is the field a packed field? (bool)
- key: The key to use when looking up the field within field_dict.
- (This is actually the FieldDescriptor but nothing in this
- file should depend on that.)
- new_default: A function which takes a message object as a parameter and
- returns a new instance of the default value for this field.
- (This is called for repeated fields and sub-messages, when an
- instance does not already exist.)
-
-As with encoders, we define a decoder constructor for every type of field.
-Then, for every field of every message class we construct an actual decoder.
-That decoder goes into a dict indexed by tag, so when we decode a message
-we repeatedly read a tag, look up the corresponding decoder, and invoke it.
-"""
-
-__author__ = 'kenton@google.com (Kenton Varda)'
-
-import struct
-
-import six
-
-if six.PY3:
- long = int
-
-from google.protobuf.internal import encoder
-from google.protobuf.internal import wire_format
-from google.protobuf import message
-
-
-# This will overflow and thus become IEEE-754 "infinity". We would use
-# "float('inf')" but it doesn't work on Windows pre-Python-2.6.
-_POS_INF = 1e10000
-_NEG_INF = -_POS_INF
-_NAN = _POS_INF * 0
-
-
-# This is not for optimization, but rather to avoid conflicts with local
-# variables named "message".
-_DecodeError = message.DecodeError
-
-
-def _VarintDecoder(mask, result_type):
- """Return an encoder for a basic varint value (does not include tag).
-
- Decoded values will be bitwise-anded with the given mask before being
- returned, e.g. to limit them to 32 bits. The returned decoder does not
- take the usual "end" parameter -- the caller is expected to do bounds checking
- after the fact (often the caller can defer such checking until later). The
- decoder returns a (value, new_pos) pair.
- """
-
- def DecodeVarint(buffer, pos):
- result = 0
- shift = 0
- while 1:
- b = six.indexbytes(buffer, pos)
- result |= ((b & 0x7f) << shift)
- pos += 1
- if not (b & 0x80):
- result &= mask
- result = result_type(result)
- return (result, pos)
- shift += 7
- if shift >= 64:
- raise _DecodeError('Too many bytes when decoding varint.')
- return DecodeVarint
-
-
-def _SignedVarintDecoder(mask, result_type):
- """Like _VarintDecoder() but decodes signed values."""
-
- def DecodeVarint(buffer, pos):
- result = 0
- shift = 0
- while 1:
- b = six.indexbytes(buffer, pos)
- result |= ((b & 0x7f) << shift)
- pos += 1
- if not (b & 0x80):
- if result > 0x7fffffffffffffff:
- result -= (1 << 64)
- result |= ~mask
- else:
- result &= mask
- result = result_type(result)
- return (result, pos)
- shift += 7
- if shift >= 64:
- raise _DecodeError('Too many bytes when decoding varint.')
- return DecodeVarint
-
-# We force 32-bit values to int and 64-bit values to long to make
-# alternate implementations where the distinction is more significant
-# (e.g. the C++ implementation) simpler.
-
-_DecodeVarint = _VarintDecoder((1 << 64) - 1, long)
-_DecodeSignedVarint = _SignedVarintDecoder((1 << 64) - 1, long)
-
-# Use these versions for values which must be limited to 32 bits.
-_DecodeVarint32 = _VarintDecoder((1 << 32) - 1, int)
-_DecodeSignedVarint32 = _SignedVarintDecoder((1 << 32) - 1, int)
-
-
-def ReadTag(buffer, pos):
- """Read a tag from the buffer, and return a (tag_bytes, new_pos) tuple.
-
- We return the raw bytes of the tag rather than decoding them. The raw
- bytes can then be used to look up the proper decoder. This effectively allows
- us to trade some work that would be done in pure-python (decoding a varint)
- for work that is done in C (searching for a byte string in a hash table).
- In a low-level language it would be much cheaper to decode the varint and
- use that, but not in Python.
- """
-
- start = pos
- while six.indexbytes(buffer, pos) & 0x80:
- pos += 1
- pos += 1
- return (buffer[start:pos], pos)
-
-
-# --------------------------------------------------------------------
-
-
-def _SimpleDecoder(wire_type, decode_value):
- """Return a constructor for a decoder for fields of a particular type.
-
- Args:
- wire_type: The field's wire type.
- decode_value: A function which decodes an individual value, e.g.
- _DecodeVarint()
- """
-
- def SpecificDecoder(field_number, is_repeated, is_packed, key, new_default):
- if is_packed:
- local_DecodeVarint = _DecodeVarint
- def DecodePackedField(buffer, pos, end, message, field_dict):
- value = field_dict.get(key)
- if value is None:
- value = field_dict.setdefault(key, new_default(message))
- (endpoint, pos) = local_DecodeVarint(buffer, pos)
- endpoint += pos
- if endpoint > end:
- raise _DecodeError('Truncated message.')
- while pos < endpoint:
- (element, pos) = decode_value(buffer, pos)
- value.append(element)
- if pos > endpoint:
- del value[-1] # Discard corrupt value.
- raise _DecodeError('Packed element was truncated.')
- return pos
- return DecodePackedField
- elif is_repeated:
- tag_bytes = encoder.TagBytes(field_number, wire_type)
- tag_len = len(tag_bytes)
- def DecodeRepeatedField(buffer, pos, end, message, field_dict):
- value = field_dict.get(key)
- if value is None:
- value = field_dict.setdefault(key, new_default(message))
- while 1:
- (element, new_pos) = decode_value(buffer, pos)
- value.append(element)
- # Predict that the next tag is another copy of the same repeated
- # field.
- pos = new_pos + tag_len
- if buffer[new_pos:pos] != tag_bytes or new_pos >= end:
- # Prediction failed. Return.
- if new_pos > end:
- raise _DecodeError('Truncated message.')
- return new_pos
- return DecodeRepeatedField
- else:
- def DecodeField(buffer, pos, end, message, field_dict):
- (field_dict[key], pos) = decode_value(buffer, pos)
- if pos > end:
- del field_dict[key] # Discard corrupt value.
- raise _DecodeError('Truncated message.')
- return pos
- return DecodeField
-
- return SpecificDecoder
-
-
-def _ModifiedDecoder(wire_type, decode_value, modify_value):
- """Like SimpleDecoder but additionally invokes modify_value on every value
- before storing it. Usually modify_value is ZigZagDecode.
- """
-
- # Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but
- # not enough to make a significant difference.
-
- def InnerDecode(buffer, pos):
- (result, new_pos) = decode_value(buffer, pos)
- return (modify_value(result), new_pos)
- return _SimpleDecoder(wire_type, InnerDecode)
-
-
-def _StructPackDecoder(wire_type, format):
- """Return a constructor for a decoder for a fixed-width field.
-
- Args:
- wire_type: The field's wire type.
- format: The format string to pass to struct.unpack().
- """
-
- value_size = struct.calcsize(format)
- local_unpack = struct.unpack
-
- # Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but
- # not enough to make a significant difference.
-
- # Note that we expect someone up-stack to catch struct.error and convert
- # it to _DecodeError -- this way we don't have to set up exception-
- # handling blocks every time we parse one value.
-
- def InnerDecode(buffer, pos):
- new_pos = pos + value_size
- result = local_unpack(format, buffer[pos:new_pos])[0]
- return (result, new_pos)
- return _SimpleDecoder(wire_type, InnerDecode)
-
-
-def _FloatDecoder():
- """Returns a decoder for a float field.
-
- This code works around a bug in struct.unpack for non-finite 32-bit
- floating-point values.
- """
-
- local_unpack = struct.unpack
-
- def InnerDecode(buffer, pos):
- # We expect a 32-bit value in little-endian byte order. Bit 1 is the sign
- # bit, bits 2-9 represent the exponent, and bits 10-32 are the significand.
- new_pos = pos + 4
- float_bytes = buffer[pos:new_pos]
-
- # If this value has all its exponent bits set, then it's non-finite.
- # In Python 2.4, struct.unpack will convert it to a finite 64-bit value.
- # To avoid that, we parse it specially.
- if (float_bytes[3:4] in b'\x7F\xFF' and float_bytes[2:3] >= b'\x80'):
- # If at least one significand bit is set...
- if float_bytes[0:3] != b'\x00\x00\x80':
- return (_NAN, new_pos)
- # If sign bit is set...
- if float_bytes[3:4] == b'\xFF':
- return (_NEG_INF, new_pos)
- return (_POS_INF, new_pos)
-
- # Note that we expect someone up-stack to catch struct.error and convert
- # it to _DecodeError -- this way we don't have to set up exception-
- # handling blocks every time we parse one value.
- result = local_unpack('<f', float_bytes)[0]
- return (result, new_pos)
- return _SimpleDecoder(wire_format.WIRETYPE_FIXED32, InnerDecode)
-
-
-def _DoubleDecoder():
- """Returns a decoder for a double field.
-
- This code works around a bug in struct.unpack for not-a-number.
- """
-
- local_unpack = struct.unpack
-
- def InnerDecode(buffer, pos):
- # We expect a 64-bit value in little-endian byte order. Bit 1 is the sign
- # bit, bits 2-12 represent the exponent, and bits 13-64 are the significand.
- new_pos = pos + 8
- double_bytes = buffer[pos:new_pos]
-
- # If this value has all its exponent bits set and at least one significand
- # bit set, it's not a number. In Python 2.4, struct.unpack will treat it
- # as inf or -inf. To avoid that, we treat it specially.
- if ((double_bytes[7:8] in b'\x7F\xFF')
- and (double_bytes[6:7] >= b'\xF0')
- and (double_bytes[0:7] != b'\x00\x00\x00\x00\x00\x00\xF0')):
- return (_NAN, new_pos)
-
- # Note that we expect someone up-stack to catch struct.error and convert
- # it to _DecodeError -- this way we don't have to set up exception-
- # handling blocks every time we parse one value.
- result = local_unpack('<d', double_bytes)[0]
- return (result, new_pos)
- return _SimpleDecoder(wire_format.WIRETYPE_FIXED64, InnerDecode)
-
-
-def EnumDecoder(field_number, is_repeated, is_packed, key, new_default):
- enum_type = key.enum_type
- if is_packed:
- local_DecodeVarint = _DecodeVarint
- def DecodePackedField(buffer, pos, end, message, field_dict):
- value = field_dict.get(key)
- if value is None:
- value = field_dict.setdefault(key, new_default(message))
- (endpoint, pos) = local_DecodeVarint(buffer, pos)
- endpoint += pos
- if endpoint > end:
- raise _DecodeError('Truncated message.')
- while pos < endpoint:
- value_start_pos = pos
- (element, pos) = _DecodeSignedVarint32(buffer, pos)
- if element in enum_type.values_by_number:
- value.append(element)
- else:
- if not message._unknown_fields:
- message._unknown_fields = []
- tag_bytes = encoder.TagBytes(field_number,
- wire_format.WIRETYPE_VARINT)
- message._unknown_fields.append(
- (tag_bytes, buffer[value_start_pos:pos]))
- if pos > endpoint:
- if element in enum_type.values_by_number:
- del value[-1] # Discard corrupt value.
- else:
- del message._unknown_fields[-1]
- raise _DecodeError('Packed element was truncated.')
- return pos
- return DecodePackedField
- elif is_repeated:
- tag_bytes = encoder.TagBytes(field_number, wire_format.WIRETYPE_VARINT)
- tag_len = len(tag_bytes)
- def DecodeRepeatedField(buffer, pos, end, message, field_dict):
- value = field_dict.get(key)
- if value is None:
- value = field_dict.setdefault(key, new_default(message))
- while 1:
- (element, new_pos) = _DecodeSignedVarint32(buffer, pos)
- if element in enum_type.values_by_number:
- value.append(element)
- else:
- if not message._unknown_fields:
- message._unknown_fields = []
- message._unknown_fields.append(
- (tag_bytes, buffer[pos:new_pos]))
- # Predict that the next tag is another copy of the same repeated
- # field.
- pos = new_pos + tag_len
- if buffer[new_pos:pos] != tag_bytes or new_pos >= end:
- # Prediction failed. Return.
- if new_pos > end:
- raise _DecodeError('Truncated message.')
- return new_pos
- return DecodeRepeatedField
- else:
- def DecodeField(buffer, pos, end, message, field_dict):
- value_start_pos = pos
- (enum_value, pos) = _DecodeSignedVarint32(buffer, pos)
- if pos > end:
- raise _DecodeError('Truncated message.')
- if enum_value in enum_type.values_by_number:
- field_dict[key] = enum_value
- else:
- if not message._unknown_fields:
- message._unknown_fields = []
- tag_bytes = encoder.TagBytes(field_number,
- wire_format.WIRETYPE_VARINT)
- message._unknown_fields.append(
- (tag_bytes, buffer[value_start_pos:pos]))
- return pos
- return DecodeField
-
-
-# --------------------------------------------------------------------
-
-
-Int32Decoder = _SimpleDecoder(
- wire_format.WIRETYPE_VARINT, _DecodeSignedVarint32)
-
-Int64Decoder = _SimpleDecoder(
- wire_format.WIRETYPE_VARINT, _DecodeSignedVarint)
-
-UInt32Decoder = _SimpleDecoder(wire_format.WIRETYPE_VARINT, _DecodeVarint32)
-UInt64Decoder = _SimpleDecoder(wire_format.WIRETYPE_VARINT, _DecodeVarint)
-
-SInt32Decoder = _ModifiedDecoder(
- wire_format.WIRETYPE_VARINT, _DecodeVarint32, wire_format.ZigZagDecode)
-SInt64Decoder = _ModifiedDecoder(
- wire_format.WIRETYPE_VARINT, _DecodeVarint, wire_format.ZigZagDecode)
-
-# Note that Python conveniently guarantees that when using the '<' prefix on
-# formats, they will also have the same size across all platforms (as opposed
-# to without the prefix, where their sizes depend on the C compiler's basic
-# type sizes).
-Fixed32Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED32, '<I')
-Fixed64Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED64, '<Q')
-SFixed32Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED32, '<i')
-SFixed64Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED64, '<q')
-FloatDecoder = _FloatDecoder()
-DoubleDecoder = _DoubleDecoder()
-
-BoolDecoder = _ModifiedDecoder(
- wire_format.WIRETYPE_VARINT, _DecodeVarint, bool)
-
-
-def StringDecoder(field_number, is_repeated, is_packed, key, new_default):
- """Returns a decoder for a string field."""
-
- local_DecodeVarint = _DecodeVarint
- local_unicode = six.text_type
-
- def _ConvertToUnicode(byte_str):
- try:
- return local_unicode(byte_str, 'utf-8')
- except UnicodeDecodeError as e:
- # add more information to the error message and re-raise it.
- e.reason = '%s in field: %s' % (e, key.full_name)
- raise
-
- assert not is_packed
- if is_repeated:
- tag_bytes = encoder.TagBytes(field_number,
- wire_format.WIRETYPE_LENGTH_DELIMITED)
- tag_len = len(tag_bytes)
- def DecodeRepeatedField(buffer, pos, end, message, field_dict):
- value = field_dict.get(key)
- if value is None:
- value = field_dict.setdefault(key, new_default(message))
- while 1:
- (size, pos) = local_DecodeVarint(buffer, pos)
- new_pos = pos + size
- if new_pos > end:
- raise _DecodeError('Truncated string.')
- value.append(_ConvertToUnicode(buffer[pos:new_pos]))
- # Predict that the next tag is another copy of the same repeated field.
- pos = new_pos + tag_len
- if buffer[new_pos:pos] != tag_bytes or new_pos == end:
- # Prediction failed. Return.
- return new_pos
- return DecodeRepeatedField
- else:
- def DecodeField(buffer, pos, end, message, field_dict):
- (size, pos) = local_DecodeVarint(buffer, pos)
- new_pos = pos + size
- if new_pos > end:
- raise _DecodeError('Truncated string.')
- field_dict[key] = _ConvertToUnicode(buffer[pos:new_pos])
- return new_pos
- return DecodeField
-
-
-def BytesDecoder(field_number, is_repeated, is_packed, key, new_default):
- """Returns a decoder for a bytes field."""
-
- local_DecodeVarint = _DecodeVarint
-
- assert not is_packed
- if is_repeated:
- tag_bytes = encoder.TagBytes(field_number,
- wire_format.WIRETYPE_LENGTH_DELIMITED)
- tag_len = len(tag_bytes)
- def DecodeRepeatedField(buffer, pos, end, message, field_dict):
- value = field_dict.get(key)
- if value is None:
- value = field_dict.setdefault(key, new_default(message))
- while 1:
- (size, pos) = local_DecodeVarint(buffer, pos)
- new_pos = pos + size
- if new_pos > end:
- raise _DecodeError('Truncated string.')
- value.append(buffer[pos:new_pos])
- # Predict that the next tag is another copy of the same repeated field.
- pos = new_pos + tag_len
- if buffer[new_pos:pos] != tag_bytes or new_pos == end:
- # Prediction failed. Return.
- return new_pos
- return DecodeRepeatedField
- else:
- def DecodeField(buffer, pos, end, message, field_dict):
- (size, pos) = local_DecodeVarint(buffer, pos)
- new_pos = pos + size
- if new_pos > end:
- raise _DecodeError('Truncated string.')
- field_dict[key] = buffer[pos:new_pos]
- return new_pos
- return DecodeField
-
-
-def GroupDecoder(field_number, is_repeated, is_packed, key, new_default):
- """Returns a decoder for a group field."""
-
- end_tag_bytes = encoder.TagBytes(field_number,
- wire_format.WIRETYPE_END_GROUP)
- end_tag_len = len(end_tag_bytes)
-
- assert not is_packed
- if is_repeated:
- tag_bytes = encoder.TagBytes(field_number,
- wire_format.WIRETYPE_START_GROUP)
- tag_len = len(tag_bytes)
- def DecodeRepeatedField(buffer, pos, end, message, field_dict):
- value = field_dict.get(key)
- if value is None:
- value = field_dict.setdefault(key, new_default(message))
- while 1:
- value = field_dict.get(key)
- if value is None:
- value = field_dict.setdefault(key, new_default(message))
- # Read sub-message.
- pos = value.add()._InternalParse(buffer, pos, end)
- # Read end tag.
- new_pos = pos+end_tag_len
- if buffer[pos:new_pos] != end_tag_bytes or new_pos > end:
- raise _DecodeError('Missing group end tag.')
- # Predict that the next tag is another copy of the same repeated field.
- pos = new_pos + tag_len
- if buffer[new_pos:pos] != tag_bytes or new_pos == end:
- # Prediction failed. Return.
- return new_pos
- return DecodeRepeatedField
- else:
- def DecodeField(buffer, pos, end, message, field_dict):
- value = field_dict.get(key)
- if value is None:
- value = field_dict.setdefault(key, new_default(message))
- # Read sub-message.
- pos = value._InternalParse(buffer, pos, end)
- # Read end tag.
- new_pos = pos+end_tag_len
- if buffer[pos:new_pos] != end_tag_bytes or new_pos > end:
- raise _DecodeError('Missing group end tag.')
- return new_pos
- return DecodeField
-
-
-def MessageDecoder(field_number, is_repeated, is_packed, key, new_default):
- """Returns a decoder for a message field."""
-
- local_DecodeVarint = _DecodeVarint
-
- assert not is_packed
- if is_repeated:
- tag_bytes = encoder.TagBytes(field_number,
- wire_format.WIRETYPE_LENGTH_DELIMITED)
- tag_len = len(tag_bytes)
- def DecodeRepeatedField(buffer, pos, end, message, field_dict):
- value = field_dict.get(key)
- if value is None:
- value = field_dict.setdefault(key, new_default(message))
- while 1:
- # Read length.
- (size, pos) = local_DecodeVarint(buffer, pos)
- new_pos = pos + size
- if new_pos > end:
- raise _DecodeError('Truncated message.')
- # Read sub-message.
- if value.add()._InternalParse(buffer, pos, new_pos) != new_pos:
- # The only reason _InternalParse would return early is if it
- # encountered an end-group tag.
- raise _DecodeError('Unexpected end-group tag.')
- # Predict that the next tag is another copy of the same repeated field.
- pos = new_pos + tag_len
- if buffer[new_pos:pos] != tag_bytes or new_pos == end:
- # Prediction failed. Return.
- return new_pos
- return DecodeRepeatedField
- else:
- def DecodeField(buffer, pos, end, message, field_dict):
- value = field_dict.get(key)
- if value is None:
- value = field_dict.setdefault(key, new_default(message))
- # Read length.
- (size, pos) = local_DecodeVarint(buffer, pos)
- new_pos = pos + size
- if new_pos > end:
- raise _DecodeError('Truncated message.')
- # Read sub-message.
- if value._InternalParse(buffer, pos, new_pos) != new_pos:
- # The only reason _InternalParse would return early is if it encountered
- # an end-group tag.
- raise _DecodeError('Unexpected end-group tag.')
- return new_pos
- return DecodeField
-
-
-# --------------------------------------------------------------------
-
-MESSAGE_SET_ITEM_TAG = encoder.TagBytes(1, wire_format.WIRETYPE_START_GROUP)
-
-def MessageSetItemDecoder(extensions_by_number):
- """Returns a decoder for a MessageSet item.
-
- The parameter is the _extensions_by_number map for the message class.
-
- The message set message looks like this:
- message MessageSet {
- repeated group Item = 1 {
- required int32 type_id = 2;
- required string message = 3;
- }
- }
- """
-
- type_id_tag_bytes = encoder.TagBytes(2, wire_format.WIRETYPE_VARINT)
- message_tag_bytes = encoder.TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED)
- item_end_tag_bytes = encoder.TagBytes(1, wire_format.WIRETYPE_END_GROUP)
-
- local_ReadTag = ReadTag
- local_DecodeVarint = _DecodeVarint
- local_SkipField = SkipField
-
- def DecodeItem(buffer, pos, end, message, field_dict):
- message_set_item_start = pos
- type_id = -1
- message_start = -1
- message_end = -1
-
- # Technically, type_id and message can appear in any order, so we need
- # a little loop here.
- while 1:
- (tag_bytes, pos) = local_ReadTag(buffer, pos)
- if tag_bytes == type_id_tag_bytes:
- (type_id, pos) = local_DecodeVarint(buffer, pos)
- elif tag_bytes == message_tag_bytes:
- (size, message_start) = local_DecodeVarint(buffer, pos)
- pos = message_end = message_start + size
- elif tag_bytes == item_end_tag_bytes:
- break
- else:
- pos = SkipField(buffer, pos, end, tag_bytes)
- if pos == -1:
- raise _DecodeError('Missing group end tag.')
-
- if pos > end:
- raise _DecodeError('Truncated message.')
-
- if type_id == -1:
- raise _DecodeError('MessageSet item missing type_id.')
- if message_start == -1:
- raise _DecodeError('MessageSet item missing message.')
-
- extension = extensions_by_number.get(type_id)
- if extension is not None:
- value = field_dict.get(extension)
- if value is None:
- value = field_dict.setdefault(
- extension, extension.message_type._concrete_class())
- if value._InternalParse(buffer, message_start,message_end) != message_end:
- # The only reason _InternalParse would return early is if it encountered
- # an end-group tag.
- raise _DecodeError('Unexpected end-group tag.')
- else:
- if not message._unknown_fields:
- message._unknown_fields = []
- message._unknown_fields.append((MESSAGE_SET_ITEM_TAG,
- buffer[message_set_item_start:pos]))
-
- return pos
-
- return DecodeItem
-
-# --------------------------------------------------------------------
-
-def MapDecoder(field_descriptor, new_default, is_message_map):
- """Returns a decoder for a map field."""
-
- key = field_descriptor
- tag_bytes = encoder.TagBytes(field_descriptor.number,
- wire_format.WIRETYPE_LENGTH_DELIMITED)
- tag_len = len(tag_bytes)
- local_DecodeVarint = _DecodeVarint
- # Can't read _concrete_class yet; might not be initialized.
- message_type = field_descriptor.message_type
-
- def DecodeMap(buffer, pos, end, message, field_dict):
- submsg = message_type._concrete_class()
- value = field_dict.get(key)
- if value is None:
- value = field_dict.setdefault(key, new_default(message))
- while 1:
- # Read length.
- (size, pos) = local_DecodeVarint(buffer, pos)
- new_pos = pos + size
- if new_pos > end:
- raise _DecodeError('Truncated message.')
- # Read sub-message.
- submsg.Clear()
- if submsg._InternalParse(buffer, pos, new_pos) != new_pos:
- # The only reason _InternalParse would return early is if it
- # encountered an end-group tag.
- raise _DecodeError('Unexpected end-group tag.')
-
- if is_message_map:
- value[submsg.key].MergeFrom(submsg.value)
- else:
- value[submsg.key] = submsg.value
-
- # Predict that the next tag is another copy of the same repeated field.
- pos = new_pos + tag_len
- if buffer[new_pos:pos] != tag_bytes or new_pos == end:
- # Prediction failed. Return.
- return new_pos
-
- return DecodeMap
-
-# --------------------------------------------------------------------
-# Optimization is not as heavy here because calls to SkipField() are rare,
-# except for handling end-group tags.
-
-def _SkipVarint(buffer, pos, end):
- """Skip a varint value. Returns the new position."""
- # Previously ord(buffer[pos]) raised IndexError when pos is out of range.
- # With this code, ord(b'') raises TypeError. Both are handled in
- # python_message.py to generate a 'Truncated message' error.
- while ord(buffer[pos:pos+1]) & 0x80:
- pos += 1
- pos += 1
- if pos > end:
- raise _DecodeError('Truncated message.')
- return pos
-
-def _SkipFixed64(buffer, pos, end):
- """Skip a fixed64 value. Returns the new position."""
-
- pos += 8
- if pos > end:
- raise _DecodeError('Truncated message.')
- return pos
-
-def _SkipLengthDelimited(buffer, pos, end):
- """Skip a length-delimited value. Returns the new position."""
-
- (size, pos) = _DecodeVarint(buffer, pos)
- pos += size
- if pos > end:
- raise _DecodeError('Truncated message.')
- return pos
-
-def _SkipGroup(buffer, pos, end):
- """Skip sub-group. Returns the new position."""
-
- while 1:
- (tag_bytes, pos) = ReadTag(buffer, pos)
- new_pos = SkipField(buffer, pos, end, tag_bytes)
- if new_pos == -1:
- return pos
- pos = new_pos
-
-def _EndGroup(buffer, pos, end):
- """Skipping an END_GROUP tag returns -1 to tell the parent loop to break."""
-
- return -1
-
-def _SkipFixed32(buffer, pos, end):
- """Skip a fixed32 value. Returns the new position."""
-
- pos += 4
- if pos > end:
- raise _DecodeError('Truncated message.')
- return pos
-
-def _RaiseInvalidWireType(buffer, pos, end):
- """Skip function for unknown wire types. Raises an exception."""
-
- raise _DecodeError('Tag had invalid wire type.')
-
-def _FieldSkipper():
- """Constructs the SkipField function."""
-
- WIRETYPE_TO_SKIPPER = [
- _SkipVarint,
- _SkipFixed64,
- _SkipLengthDelimited,
- _SkipGroup,
- _EndGroup,
- _SkipFixed32,
- _RaiseInvalidWireType,
- _RaiseInvalidWireType,
- ]
-
- wiretype_mask = wire_format.TAG_TYPE_MASK
-
- def SkipField(buffer, pos, end, tag_bytes):
- """Skips a field with the specified tag.
-
- |pos| should point to the byte immediately after the tag.
-
- Returns:
- The new position (after the tag value), or -1 if the tag is an end-group
- tag (in which case the calling loop should break).
- """
-
- # The wire type is always in the first byte since varints are little-endian.
- wire_type = ord(tag_bytes[0:1]) & wiretype_mask
- return WIRETYPE_TO_SKIPPER[wire_type](buffer, pos, end)
-
- return SkipField
-
-SkipField = _FieldSkipper()
diff --git a/third_party/protobuf/python/google/protobuf/internal/descriptor_pool_test1.proto b/third_party/protobuf/python/google/protobuf/internal/descriptor_pool_test1.proto
deleted file mode 100644
index 00816b78ec..0000000000
--- a/third_party/protobuf/python/google/protobuf/internal/descriptor_pool_test1.proto
+++ /dev/null
@@ -1,96 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-syntax = "proto2";
-
-package google.protobuf.python.internal;
-
-
-message DescriptorPoolTest1 {
- extensions 1000 to max;
-
- enum NestedEnum {
- ALPHA = 1;
- BETA = 2;
- }
-
- optional NestedEnum nested_enum = 1 [default = BETA];
-
- message NestedMessage {
- enum NestedEnum {
- EPSILON = 5;
- ZETA = 6;
- }
- optional NestedEnum nested_enum = 1 [default = ZETA];
- optional string nested_field = 2 [default = "beta"];
- optional DeepNestedMessage deep_nested_message = 3;
-
- message DeepNestedMessage {
- enum NestedEnum {
- ETA = 7;
- THETA = 8;
- }
- optional NestedEnum nested_enum = 1 [default = ETA];
- optional string nested_field = 2 [default = "theta"];
- }
- }
-
- optional NestedMessage nested_message = 2;
-}
-
-message DescriptorPoolTest2 {
- enum NestedEnum {
- GAMMA = 3;
- DELTA = 4;
- }
-
- optional NestedEnum nested_enum = 1 [default = GAMMA];
-
- message NestedMessage {
- enum NestedEnum {
- IOTA = 9;
- KAPPA = 10;
- }
- optional NestedEnum nested_enum = 1 [default = IOTA];
- optional string nested_field = 2 [default = "delta"];
- optional DeepNestedMessage deep_nested_message = 3;
-
- message DeepNestedMessage {
- enum NestedEnum {
- LAMBDA = 11;
- MU = 12;
- }
- optional NestedEnum nested_enum = 1 [default = MU];
- optional string nested_field = 2 [default = "lambda"];
- }
- }
-
- optional NestedMessage nested_message = 2;
-}
diff --git a/third_party/protobuf/python/google/protobuf/internal/descriptor_pool_test2.proto b/third_party/protobuf/python/google/protobuf/internal/descriptor_pool_test2.proto
deleted file mode 100644
index e3fa660ce1..0000000000
--- a/third_party/protobuf/python/google/protobuf/internal/descriptor_pool_test2.proto
+++ /dev/null
@@ -1,72 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-syntax = "proto2";
-
-package google.protobuf.python.internal;
-
-import "google/protobuf/internal/descriptor_pool_test1.proto";
-
-
-message DescriptorPoolTest3 {
-
- extend DescriptorPoolTest1 {
- optional DescriptorPoolTest3 descriptor_pool_test = 1001;
- }
-
- enum NestedEnum {
- NU = 13;
- XI = 14;
- }
-
- optional NestedEnum nested_enum = 1 [default = XI];
-
- message NestedMessage {
- enum NestedEnum {
- OMICRON = 15;
- PI = 16;
- }
- optional NestedEnum nested_enum = 1 [default = PI];
- optional string nested_field = 2 [default = "nu"];
- optional DeepNestedMessage deep_nested_message = 3;
-
- message DeepNestedMessage {
- enum NestedEnum {
- RHO = 17;
- SIGMA = 18;
- }
- optional NestedEnum nested_enum = 1 [default = RHO];
- optional string nested_field = 2 [default = "sigma"];
- }
- }
-
- optional NestedMessage nested_message = 2;
-}
-
diff --git a/third_party/protobuf/python/google/protobuf/internal/encoder.py b/third_party/protobuf/python/google/protobuf/internal/encoder.py
deleted file mode 100755
index 48ef2df31c..0000000000
--- a/third_party/protobuf/python/google/protobuf/internal/encoder.py
+++ /dev/null
@@ -1,823 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc. All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-# * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""Code for encoding protocol message primitives.
-
-Contains the logic for encoding every logical protocol field type
-into one of the 5 physical wire types.
-
-This code is designed to push the Python interpreter's performance to the
-limits.
-
-The basic idea is that at startup time, for every field (i.e. every
-FieldDescriptor) we construct two functions: a "sizer" and an "encoder". The
-sizer takes a value of this field's type and computes its byte size. The
-encoder takes a writer function and a value. It encodes the value into byte
-strings and invokes the writer function to write those strings. Typically the
-writer function is the write() method of a BytesIO.
-
-We try to do as much work as possible when constructing the writer and the
-sizer rather than when calling them. In particular:
-* We copy any needed global functions to local variables, so that we do not need
- to do costly global table lookups at runtime.
-* Similarly, we try to do any attribute lookups at startup time if possible.
-* Every field's tag is encoded to bytes at startup, since it can't change at
- runtime.
-* Whatever component of the field size we can compute at startup, we do.
-* We *avoid* sharing code if doing so would make the code slower and not sharing
- does not burden us too much. For example, encoders for repeated fields do
- not just call the encoders for singular fields in a loop because this would
- add an extra function call overhead for every loop iteration; instead, we
- manually inline the single-value encoder into the loop.
-* If a Python function lacks a return statement, Python actually generates
- instructions to pop the result of the last statement off the stack, push
- None onto the stack, and then return that. If we really don't care what
- value is returned, then we can save two instructions by returning the
- result of the last statement. It looks funny but it helps.
-* We assume that type and bounds checking has happened at a higher level.
-"""
-
-__author__ = 'kenton@google.com (Kenton Varda)'
-
-import struct
-
-import six
-
-from google.protobuf.internal import wire_format
-
-
-# This will overflow and thus become IEEE-754 "infinity". We would use
-# "float('inf')" but it doesn't work on Windows pre-Python-2.6.
-_POS_INF = 1e10000
-_NEG_INF = -_POS_INF
-
-
-def _VarintSize(value):
- """Compute the size of a varint value."""
- if value <= 0x7f: return 1
- if value <= 0x3fff: return 2
- if value <= 0x1fffff: return 3
- if value <= 0xfffffff: return 4
- if value <= 0x7ffffffff: return 5
- if value <= 0x3ffffffffff: return 6
- if value <= 0x1ffffffffffff: return 7
- if value <= 0xffffffffffffff: return 8
- if value <= 0x7fffffffffffffff: return 9
- return 10
-
-
-def _SignedVarintSize(value):
- """Compute the size of a signed varint value."""
- if value < 0: return 10
- if value <= 0x7f: return 1
- if value <= 0x3fff: return 2
- if value <= 0x1fffff: return 3
- if value <= 0xfffffff: return 4
- if value <= 0x7ffffffff: return 5
- if value <= 0x3ffffffffff: return 6
- if value <= 0x1ffffffffffff: return 7
- if value <= 0xffffffffffffff: return 8
- if value <= 0x7fffffffffffffff: return 9
- return 10
-
-
-def _TagSize(field_number):
- """Returns the number of bytes required to serialize a tag with this field
- number."""
- # Just pass in type 0, since the type won't affect the tag+type size.
- return _VarintSize(wire_format.PackTag(field_number, 0))
-
-
-# --------------------------------------------------------------------
-# In this section we define some generic sizers. Each of these functions
-# takes parameters specific to a particular field type, e.g. int32 or fixed64.
-# It returns another function which in turn takes parameters specific to a
-# particular field, e.g. the field number and whether it is repeated or packed.
-# Look at the next section to see how these are used.
-
-
-def _SimpleSizer(compute_value_size):
- """A sizer which uses the function compute_value_size to compute the size of
- each value. Typically compute_value_size is _VarintSize."""
-
- def SpecificSizer(field_number, is_repeated, is_packed):
- tag_size = _TagSize(field_number)
- if is_packed:
- local_VarintSize = _VarintSize
- def PackedFieldSize(value):
- result = 0
- for element in value:
- result += compute_value_size(element)
- return result + local_VarintSize(result) + tag_size
- return PackedFieldSize
- elif is_repeated:
- def RepeatedFieldSize(value):
- result = tag_size * len(value)
- for element in value:
- result += compute_value_size(element)
- return result
- return RepeatedFieldSize
- else:
- def FieldSize(value):
- return tag_size + compute_value_size(value)
- return FieldSize
-
- return SpecificSizer
-
-
-def _ModifiedSizer(compute_value_size, modify_value):
- """Like SimpleSizer, but modify_value is invoked on each value before it is
- passed to compute_value_size. modify_value is typically ZigZagEncode."""
-
- def SpecificSizer(field_number, is_repeated, is_packed):
- tag_size = _TagSize(field_number)
- if is_packed:
- local_VarintSize = _VarintSize
- def PackedFieldSize(value):
- result = 0
- for element in value:
- result += compute_value_size(modify_value(element))
- return result + local_VarintSize(result) + tag_size
- return PackedFieldSize
- elif is_repeated:
- def RepeatedFieldSize(value):
- result = tag_size * len(value)
- for element in value:
- result += compute_value_size(modify_value(element))
- return result
- return RepeatedFieldSize
- else:
- def FieldSize(value):
- return tag_size + compute_value_size(modify_value(value))
- return FieldSize
-
- return SpecificSizer
-
-
-def _FixedSizer(value_size):
- """Like _SimpleSizer except for a fixed-size field. The input is the size
- of one value."""
-
- def SpecificSizer(field_number, is_repeated, is_packed):
- tag_size = _TagSize(field_number)
- if is_packed:
- local_VarintSize = _VarintSize
- def PackedFieldSize(value):
- result = len(value) * value_size
- return result + local_VarintSize(result) + tag_size
- return PackedFieldSize
- elif is_repeated:
- element_size = value_size + tag_size
- def RepeatedFieldSize(value):
- return len(value) * element_size
- return RepeatedFieldSize
- else:
- field_size = value_size + tag_size
- def FieldSize(value):
- return field_size
- return FieldSize
-
- return SpecificSizer
-
-
-# ====================================================================
-# Here we declare a sizer constructor for each field type. Each "sizer
-# constructor" is a function that takes (field_number, is_repeated, is_packed)
-# as parameters and returns a sizer, which in turn takes a field value as
-# a parameter and returns its encoded size.
-
-
-Int32Sizer = Int64Sizer = EnumSizer = _SimpleSizer(_SignedVarintSize)
-
-UInt32Sizer = UInt64Sizer = _SimpleSizer(_VarintSize)
-
-SInt32Sizer = SInt64Sizer = _ModifiedSizer(
- _SignedVarintSize, wire_format.ZigZagEncode)
-
-Fixed32Sizer = SFixed32Sizer = FloatSizer = _FixedSizer(4)
-Fixed64Sizer = SFixed64Sizer = DoubleSizer = _FixedSizer(8)
-
-BoolSizer = _FixedSizer(1)
-
-
-def StringSizer(field_number, is_repeated, is_packed):
- """Returns a sizer for a string field."""
-
- tag_size = _TagSize(field_number)
- local_VarintSize = _VarintSize
- local_len = len
- assert not is_packed
- if is_repeated:
- def RepeatedFieldSize(value):
- result = tag_size * len(value)
- for element in value:
- l = local_len(element.encode('utf-8'))
- result += local_VarintSize(l) + l
- return result
- return RepeatedFieldSize
- else:
- def FieldSize(value):
- l = local_len(value.encode('utf-8'))
- return tag_size + local_VarintSize(l) + l
- return FieldSize
-
-
-def BytesSizer(field_number, is_repeated, is_packed):
- """Returns a sizer for a bytes field."""
-
- tag_size = _TagSize(field_number)
- local_VarintSize = _VarintSize
- local_len = len
- assert not is_packed
- if is_repeated:
- def RepeatedFieldSize(value):
- result = tag_size * len(value)
- for element in value:
- l = local_len(element)
- result += local_VarintSize(l) + l
- return result
- return RepeatedFieldSize
- else:
- def FieldSize(value):
- l = local_len(value)
- return tag_size + local_VarintSize(l) + l
- return FieldSize
-
-
-def GroupSizer(field_number, is_repeated, is_packed):
- """Returns a sizer for a group field."""
-
- tag_size = _TagSize(field_number) * 2
- assert not is_packed
- if is_repeated:
- def RepeatedFieldSize(value):
- result = tag_size * len(value)
- for element in value:
- result += element.ByteSize()
- return result
- return RepeatedFieldSize
- else:
- def FieldSize(value):
- return tag_size + value.ByteSize()
- return FieldSize
-
-
-def MessageSizer(field_number, is_repeated, is_packed):
- """Returns a sizer for a message field."""
-
- tag_size = _TagSize(field_number)
- local_VarintSize = _VarintSize
- assert not is_packed
- if is_repeated:
- def RepeatedFieldSize(value):
- result = tag_size * len(value)
- for element in value:
- l = element.ByteSize()
- result += local_VarintSize(l) + l
- return result
- return RepeatedFieldSize
- else:
- def FieldSize(value):
- l = value.ByteSize()
- return tag_size + local_VarintSize(l) + l
- return FieldSize
-
-
-# --------------------------------------------------------------------
-# MessageSet is special: it needs custom logic to compute its size properly.
-
-
-def MessageSetItemSizer(field_number):
- """Returns a sizer for extensions of MessageSet.
-
- The message set message looks like this:
- message MessageSet {
- repeated group Item = 1 {
- required int32 type_id = 2;
- required string message = 3;
- }
- }
- """
- static_size = (_TagSize(1) * 2 + _TagSize(2) + _VarintSize(field_number) +
- _TagSize(3))
- local_VarintSize = _VarintSize
-
- def FieldSize(value):
- l = value.ByteSize()
- return static_size + local_VarintSize(l) + l
-
- return FieldSize
-
-
-# --------------------------------------------------------------------
-# Map is special: it needs custom logic to compute its size properly.
-
-
-def MapSizer(field_descriptor):
- """Returns a sizer for a map field."""
-
- # Can't look at field_descriptor.message_type._concrete_class because it may
- # not have been initialized yet.
- message_type = field_descriptor.message_type
- message_sizer = MessageSizer(field_descriptor.number, False, False)
-
- def FieldSize(map_value):
- total = 0
- for key in map_value:
- value = map_value[key]
- # It's wasteful to create the messages and throw them away one second
- # later since we'll do the same for the actual encode. But there's not an
- # obvious way to avoid this within the current design without tons of code
- # duplication.
- entry_msg = message_type._concrete_class(key=key, value=value)
- total += message_sizer(entry_msg)
- return total
-
- return FieldSize
-
-# ====================================================================
-# Encoders!
-
-
-def _VarintEncoder():
- """Return an encoder for a basic varint value (does not include tag)."""
-
- def EncodeVarint(write, value):
- bits = value & 0x7f
- value >>= 7
- while value:
- write(six.int2byte(0x80|bits))
- bits = value & 0x7f
- value >>= 7
- return write(six.int2byte(bits))
-
- return EncodeVarint
-
-
-def _SignedVarintEncoder():
- """Return an encoder for a basic signed varint value (does not include
- tag)."""
-
- def EncodeSignedVarint(write, value):
- if value < 0:
- value += (1 << 64)
- bits = value & 0x7f
- value >>= 7
- while value:
- write(six.int2byte(0x80|bits))
- bits = value & 0x7f
- value >>= 7
- return write(six.int2byte(bits))
-
- return EncodeSignedVarint
-
-
-_EncodeVarint = _VarintEncoder()
-_EncodeSignedVarint = _SignedVarintEncoder()
-
-
-def _VarintBytes(value):
- """Encode the given integer as a varint and return the bytes. This is only
- called at startup time so it doesn't need to be fast."""
-
- pieces = []
- _EncodeVarint(pieces.append, value)
- return b"".join(pieces)
-
-
-def TagBytes(field_number, wire_type):
- """Encode the given tag and return the bytes. Only called at startup."""
-
- return _VarintBytes(wire_format.PackTag(field_number, wire_type))
-
-# --------------------------------------------------------------------
-# As with sizers (see above), we have a number of common encoder
-# implementations.
-
-
-def _SimpleEncoder(wire_type, encode_value, compute_value_size):
- """Return a constructor for an encoder for fields of a particular type.
-
- Args:
- wire_type: The field's wire type, for encoding tags.
- encode_value: A function which encodes an individual value, e.g.
- _EncodeVarint().
- compute_value_size: A function which computes the size of an individual
- value, e.g. _VarintSize().
- """
-
- def SpecificEncoder(field_number, is_repeated, is_packed):
- if is_packed:
- tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
- local_EncodeVarint = _EncodeVarint
- def EncodePackedField(write, value):
- write(tag_bytes)
- size = 0
- for element in value:
- size += compute_value_size(element)
- local_EncodeVarint(write, size)
- for element in value:
- encode_value(write, element)
- return EncodePackedField
- elif is_repeated:
- tag_bytes = TagBytes(field_number, wire_type)
- def EncodeRepeatedField(write, value):
- for element in value:
- write(tag_bytes)
- encode_value(write, element)
- return EncodeRepeatedField
- else:
- tag_bytes = TagBytes(field_number, wire_type)
- def EncodeField(write, value):
- write(tag_bytes)
- return encode_value(write, value)
- return EncodeField
-
- return SpecificEncoder
-
-
-def _ModifiedEncoder(wire_type, encode_value, compute_value_size, modify_value):
- """Like SimpleEncoder but additionally invokes modify_value on every value
- before passing it to encode_value. Usually modify_value is ZigZagEncode."""
-
- def SpecificEncoder(field_number, is_repeated, is_packed):
- if is_packed:
- tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
- local_EncodeVarint = _EncodeVarint
- def EncodePackedField(write, value):
- write(tag_bytes)
- size = 0
- for element in value:
- size += compute_value_size(modify_value(element))
- local_EncodeVarint(write, size)
- for element in value:
- encode_value(write, modify_value(element))
- return EncodePackedField
- elif is_repeated:
- tag_bytes = TagBytes(field_number, wire_type)
- def EncodeRepeatedField(write, value):
- for element in value:
- write(tag_bytes)
- encode_value(write, modify_value(element))
- return EncodeRepeatedField
- else:
- tag_bytes = TagBytes(field_number, wire_type)
- def EncodeField(write, value):
- write(tag_bytes)
- return encode_value(write, modify_value(value))
- return EncodeField
-
- return SpecificEncoder
-
-
-def _StructPackEncoder(wire_type, format):
- """Return a constructor for an encoder for a fixed-width field.
-
- Args:
- wire_type: The field's wire type, for encoding tags.
- format: The format string to pass to struct.pack().
- """
-
- value_size = struct.calcsize(format)
-
- def SpecificEncoder(field_number, is_repeated, is_packed):
- local_struct_pack = struct.pack
- if is_packed:
- tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
- local_EncodeVarint = _EncodeVarint
- def EncodePackedField(write, value):
- write(tag_bytes)
- local_EncodeVarint(write, len(value) * value_size)
- for element in value:
- write(local_struct_pack(format, element))
- return EncodePackedField
- elif is_repeated:
- tag_bytes = TagBytes(field_number, wire_type)
- def EncodeRepeatedField(write, value):
- for element in value:
- write(tag_bytes)
- write(local_struct_pack(format, element))
- return EncodeRepeatedField
- else:
- tag_bytes = TagBytes(field_number, wire_type)
- def EncodeField(write, value):
- write(tag_bytes)
- return write(local_struct_pack(format, value))
- return EncodeField
-
- return SpecificEncoder
-
-
-def _FloatingPointEncoder(wire_type, format):
- """Return a constructor for an encoder for float fields.
-
- This is like StructPackEncoder, but catches errors that may be due to
- passing non-finite floating-point values to struct.pack, and makes a
- second attempt to encode those values.
-
- Args:
- wire_type: The field's wire type, for encoding tags.
- format: The format string to pass to struct.pack().
- """
-
- value_size = struct.calcsize(format)
- if value_size == 4:
- def EncodeNonFiniteOrRaise(write, value):
- # Remember that the serialized form uses little-endian byte order.
- if value == _POS_INF:
- write(b'\x00\x00\x80\x7F')
- elif value == _NEG_INF:
- write(b'\x00\x00\x80\xFF')
- elif value != value: # NaN
- write(b'\x00\x00\xC0\x7F')
- else:
- raise
- elif value_size == 8:
- def EncodeNonFiniteOrRaise(write, value):
- if value == _POS_INF:
- write(b'\x00\x00\x00\x00\x00\x00\xF0\x7F')
- elif value == _NEG_INF:
- write(b'\x00\x00\x00\x00\x00\x00\xF0\xFF')
- elif value != value: # NaN
- write(b'\x00\x00\x00\x00\x00\x00\xF8\x7F')
- else:
- raise
- else:
- raise ValueError('Can\'t encode floating-point values that are '
- '%d bytes long (only 4 or 8)' % value_size)
-
- def SpecificEncoder(field_number, is_repeated, is_packed):
- local_struct_pack = struct.pack
- if is_packed:
- tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
- local_EncodeVarint = _EncodeVarint
- def EncodePackedField(write, value):
- write(tag_bytes)
- local_EncodeVarint(write, len(value) * value_size)
- for element in value:
- # This try/except block is going to be faster than any code that
- # we could write to check whether element is finite.
- try:
- write(local_struct_pack(format, element))
- except SystemError:
- EncodeNonFiniteOrRaise(write, element)
- return EncodePackedField
- elif is_repeated:
- tag_bytes = TagBytes(field_number, wire_type)
- def EncodeRepeatedField(write, value):
- for element in value:
- write(tag_bytes)
- try:
- write(local_struct_pack(format, element))
- except SystemError:
- EncodeNonFiniteOrRaise(write, element)
- return EncodeRepeatedField
- else:
- tag_bytes = TagBytes(field_number, wire_type)
- def EncodeField(write, value):
- write(tag_bytes)
- try:
- write(local_struct_pack(format, value))
- except SystemError:
- EncodeNonFiniteOrRaise(write, value)
- return EncodeField
-
- return SpecificEncoder
-
-
-# ====================================================================
-# Here we declare an encoder constructor for each field type. These work
-# very similarly to sizer constructors, described earlier.
-
-
-Int32Encoder = Int64Encoder = EnumEncoder = _SimpleEncoder(
- wire_format.WIRETYPE_VARINT, _EncodeSignedVarint, _SignedVarintSize)
-
-UInt32Encoder = UInt64Encoder = _SimpleEncoder(
- wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize)
-
-SInt32Encoder = SInt64Encoder = _ModifiedEncoder(
- wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize,
- wire_format.ZigZagEncode)
-
-# Note that Python conveniently guarantees that when using the '<' prefix on
-# formats, they will also have the same size across all platforms (as opposed
-# to without the prefix, where their sizes depend on the C compiler's basic
-# type sizes).
-Fixed32Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED32, '<I')
-Fixed64Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED64, '<Q')
-SFixed32Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED32, '<i')
-SFixed64Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED64, '<q')
-FloatEncoder = _FloatingPointEncoder(wire_format.WIRETYPE_FIXED32, '<f')
-DoubleEncoder = _FloatingPointEncoder(wire_format.WIRETYPE_FIXED64, '<d')
-
-
-def BoolEncoder(field_number, is_repeated, is_packed):
- """Returns an encoder for a boolean field."""
-
- false_byte = b'\x00'
- true_byte = b'\x01'
- if is_packed:
- tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
- local_EncodeVarint = _EncodeVarint
- def EncodePackedField(write, value):
- write(tag_bytes)
- local_EncodeVarint(write, len(value))
- for element in value:
- if element:
- write(true_byte)
- else:
- write(false_byte)
- return EncodePackedField
- elif is_repeated:
- tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_VARINT)
- def EncodeRepeatedField(write, value):
- for element in value:
- write(tag_bytes)
- if element:
- write(true_byte)
- else:
- write(false_byte)
- return EncodeRepeatedField
- else:
- tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_VARINT)
- def EncodeField(write, value):
- write(tag_bytes)
- if value:
- return write(true_byte)
- return write(false_byte)
- return EncodeField
-
-
-def StringEncoder(field_number, is_repeated, is_packed):
- """Returns an encoder for a string field."""
-
- tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
- local_EncodeVarint = _EncodeVarint
- local_len = len
- assert not is_packed
- if is_repeated:
- def EncodeRepeatedField(write, value):
- for element in value:
- encoded = element.encode('utf-8')
- write(tag)
- local_EncodeVarint(write, local_len(encoded))
- write(encoded)
- return EncodeRepeatedField
- else:
- def EncodeField(write, value):
- encoded = value.encode('utf-8')
- write(tag)
- local_EncodeVarint(write, local_len(encoded))
- return write(encoded)
- return EncodeField
-
-
-def BytesEncoder(field_number, is_repeated, is_packed):
- """Returns an encoder for a bytes field."""
-
- tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
- local_EncodeVarint = _EncodeVarint
- local_len = len
- assert not is_packed
- if is_repeated:
- def EncodeRepeatedField(write, value):
- for element in value:
- write(tag)
- local_EncodeVarint(write, local_len(element))
- write(element)
- return EncodeRepeatedField
- else:
- def EncodeField(write, value):
- write(tag)
- local_EncodeVarint(write, local_len(value))
- return write(value)
- return EncodeField
-
-
-def GroupEncoder(field_number, is_repeated, is_packed):
- """Returns an encoder for a group field."""
-
- start_tag = TagBytes(field_number, wire_format.WIRETYPE_START_GROUP)
- end_tag = TagBytes(field_number, wire_format.WIRETYPE_END_GROUP)
- assert not is_packed
- if is_repeated:
- def EncodeRepeatedField(write, value):
- for element in value:
- write(start_tag)
- element._InternalSerialize(write)
- write(end_tag)
- return EncodeRepeatedField
- else:
- def EncodeField(write, value):
- write(start_tag)
- value._InternalSerialize(write)
- return write(end_tag)
- return EncodeField
-
-
-def MessageEncoder(field_number, is_repeated, is_packed):
- """Returns an encoder for a message field."""
-
- tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
- local_EncodeVarint = _EncodeVarint
- assert not is_packed
- if is_repeated:
- def EncodeRepeatedField(write, value):
- for element in value:
- write(tag)
- local_EncodeVarint(write, element.ByteSize())
- element._InternalSerialize(write)
- return EncodeRepeatedField
- else:
- def EncodeField(write, value):
- write(tag)
- local_EncodeVarint(write, value.ByteSize())
- return value._InternalSerialize(write)
- return EncodeField
-
-
-# --------------------------------------------------------------------
-# As before, MessageSet is special.
-
-
-def MessageSetItemEncoder(field_number):
- """Encoder for extensions of MessageSet.
-
- The message set message looks like this:
- message MessageSet {
- repeated group Item = 1 {
- required int32 type_id = 2;
- required string message = 3;
- }
- }
- """
- start_bytes = b"".join([
- TagBytes(1, wire_format.WIRETYPE_START_GROUP),
- TagBytes(2, wire_format.WIRETYPE_VARINT),
- _VarintBytes(field_number),
- TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED)])
- end_bytes = TagBytes(1, wire_format.WIRETYPE_END_GROUP)
- local_EncodeVarint = _EncodeVarint
-
- def EncodeField(write, value):
- write(start_bytes)
- local_EncodeVarint(write, value.ByteSize())
- value._InternalSerialize(write)
- return write(end_bytes)
-
- return EncodeField
-
-
-# --------------------------------------------------------------------
-# As before, Map is special.
-
-
-def MapEncoder(field_descriptor):
- """Encoder for extensions of MessageSet.
-
- Maps always have a wire format like this:
- message MapEntry {
- key_type key = 1;
- value_type value = 2;
- }
- repeated MapEntry map = N;
- """
- # Can't look at field_descriptor.message_type._concrete_class because it may
- # not have been initialized yet.
- message_type = field_descriptor.message_type
- encode_message = MessageEncoder(field_descriptor.number, False, False)
-
- def EncodeField(write, value):
- for key in value:
- entry_msg = message_type._concrete_class(key=key, value=value[key])
- encode_message(write, entry_msg)
-
- return EncodeField
diff --git a/third_party/protobuf/python/google/protobuf/internal/enum_type_wrapper.py b/third_party/protobuf/python/google/protobuf/internal/enum_type_wrapper.py
deleted file mode 100644
index 1cffe35295..0000000000
--- a/third_party/protobuf/python/google/protobuf/internal/enum_type_wrapper.py
+++ /dev/null
@@ -1,89 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc. All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-# * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""A simple wrapper around enum types to expose utility functions.
-
-Instances are created as properties with the same name as the enum they wrap
-on proto classes. For usage, see:
- reflection_test.py
-"""
-
-__author__ = 'rabsatt@google.com (Kevin Rabsatt)'
-
-
-class EnumTypeWrapper(object):
- """A utility for finding the names of enum values."""
-
- DESCRIPTOR = None
-
- def __init__(self, enum_type):
- """Inits EnumTypeWrapper with an EnumDescriptor."""
- self._enum_type = enum_type
- self.DESCRIPTOR = enum_type;
-
- def Name(self, number):
- """Returns a string containing the name of an enum value."""
- if number in self._enum_type.values_by_number:
- return self._enum_type.values_by_number[number].name
- raise ValueError('Enum %s has no name defined for value %d' % (
- self._enum_type.name, number))
-
- def Value(self, name):
- """Returns the value coresponding to the given enum name."""
- if name in self._enum_type.values_by_name:
- return self._enum_type.values_by_name[name].number
- raise ValueError('Enum %s has no value defined for name %s' % (
- self._enum_type.name, name))
-
- def keys(self):
- """Return a list of the string names in the enum.
-
- These are returned in the order they were defined in the .proto file.
- """
-
- return [value_descriptor.name
- for value_descriptor in self._enum_type.values]
-
- def values(self):
- """Return a list of the integer values in the enum.
-
- These are returned in the order they were defined in the .proto file.
- """
-
- return [value_descriptor.number
- for value_descriptor in self._enum_type.values]
-
- def items(self):
- """Return a list of the (name, value) pairs of the enum.
-
- These are returned in the order they were defined in the .proto file.
- """
- return [(value_descriptor.name, value_descriptor.number)
- for value_descriptor in self._enum_type.values]
diff --git a/third_party/protobuf/python/google/protobuf/internal/factory_test1.proto b/third_party/protobuf/python/google/protobuf/internal/factory_test1.proto
deleted file mode 100644
index d2fbbeecf1..0000000000
--- a/third_party/protobuf/python/google/protobuf/internal/factory_test1.proto
+++ /dev/null
@@ -1,58 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Author: matthewtoia@google.com (Matt Toia)
-
-syntax = "proto2";
-
-package google.protobuf.python.internal;
-
-
-enum Factory1Enum {
- FACTORY_1_VALUE_0 = 0;
- FACTORY_1_VALUE_1 = 1;
-}
-
-message Factory1Message {
- optional Factory1Enum factory_1_enum = 1;
- enum NestedFactory1Enum {
- NESTED_FACTORY_1_VALUE_0 = 0;
- NESTED_FACTORY_1_VALUE_1 = 1;
- }
- optional NestedFactory1Enum nested_factory_1_enum = 2;
- message NestedFactory1Message {
- optional string value = 1;
- }
- optional NestedFactory1Message nested_factory_1_message = 3;
- optional int32 scalar_value = 4;
- repeated string list_value = 5;
-
- extensions 1000 to max;
-}
diff --git a/third_party/protobuf/python/google/protobuf/internal/factory_test2.proto b/third_party/protobuf/python/google/protobuf/internal/factory_test2.proto
deleted file mode 100644
index bb1b54ada2..0000000000
--- a/third_party/protobuf/python/google/protobuf/internal/factory_test2.proto
+++ /dev/null
@@ -1,99 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Author: matthewtoia@google.com (Matt Toia)
-
-syntax = "proto2";
-
-package google.protobuf.python.internal;
-
-import "google/protobuf/internal/factory_test1.proto";
-
-
-enum Factory2Enum {
- FACTORY_2_VALUE_0 = 0;
- FACTORY_2_VALUE_1 = 1;
-}
-
-message Factory2Message {
- required int32 mandatory = 1;
- optional Factory2Enum factory_2_enum = 2;
- enum NestedFactory2Enum {
- NESTED_FACTORY_2_VALUE_0 = 0;
- NESTED_FACTORY_2_VALUE_1 = 1;
- }
- optional NestedFactory2Enum nested_factory_2_enum = 3;
- message NestedFactory2Message {
- optional string value = 1;
- }
- optional NestedFactory2Message nested_factory_2_message = 4;
- optional Factory1Message factory_1_message = 5;
- optional Factory1Enum factory_1_enum = 6;
- optional Factory1Message.NestedFactory1Enum nested_factory_1_enum = 7;
- optional Factory1Message.NestedFactory1Message nested_factory_1_message = 8;
- optional Factory2Message circular_message = 9;
- optional string scalar_value = 10;
- repeated string list_value = 11;
- repeated group Grouped = 12 {
- optional string part_1 = 13;
- optional string part_2 = 14;
- }
- optional LoopMessage loop = 15;
- optional int32 int_with_default = 16 [default = 1776];
- optional double double_with_default = 17 [default = 9.99];
- optional string string_with_default = 18 [default = "hello world"];
- optional bool bool_with_default = 19 [default = false];
- optional Factory2Enum enum_with_default = 20 [default = FACTORY_2_VALUE_1];
- optional bytes bytes_with_default = 21 [default = "a\373\000c"];
-
-
- extend Factory1Message {
- optional string one_more_field = 1001;
- }
-
- oneof oneof_field {
- int32 oneof_int = 22;
- string oneof_string = 23;
- }
-}
-
-message LoopMessage {
- optional Factory2Message loop = 1;
-}
-
-message MessageWithNestedEnumOnly {
- enum NestedEnum {
- NESTED_MESSAGE_ENUM_0 = 0;
- }
-}
-
-extend Factory1Message {
- optional string another_field = 1002;
-}
diff --git a/third_party/protobuf/python/google/protobuf/internal/import_test_package/__init__.py b/third_party/protobuf/python/google/protobuf/internal/import_test_package/__init__.py
deleted file mode 100644
index 5121dd0ec5..0000000000
--- a/third_party/protobuf/python/google/protobuf/internal/import_test_package/__init__.py
+++ /dev/null
@@ -1,33 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc. All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-# * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""Sample module importing a nested proto from itself."""
-
-from google.protobuf.internal.import_test_package import outer_pb2 as myproto
diff --git a/third_party/protobuf/python/google/protobuf/internal/import_test_package/inner.proto b/third_party/protobuf/python/google/protobuf/internal/import_test_package/inner.proto
deleted file mode 100644
index 2887c1230e..0000000000
--- a/third_party/protobuf/python/google/protobuf/internal/import_test_package/inner.proto
+++ /dev/null
@@ -1,37 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-syntax = "proto2";
-
-package google.protobuf.python.internal.import_test_package;
-
-message Inner {
- optional int32 value = 1 [default = 57];
-}
diff --git a/third_party/protobuf/python/google/protobuf/internal/import_test_package/outer.proto b/third_party/protobuf/python/google/protobuf/internal/import_test_package/outer.proto
deleted file mode 100644
index a27fb5c8f4..0000000000
--- a/third_party/protobuf/python/google/protobuf/internal/import_test_package/outer.proto
+++ /dev/null
@@ -1,39 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-syntax = "proto2";
-
-package google.protobuf.python.internal.import_test_package;
-
-import "google/protobuf/internal/import_test_package/inner.proto";
-
-message Outer {
- optional Inner inner = 1;
-}
diff --git a/third_party/protobuf/python/google/protobuf/internal/message_listener.py b/third_party/protobuf/python/google/protobuf/internal/message_listener.py
deleted file mode 100755
index 0fc255a774..0000000000
--- a/third_party/protobuf/python/google/protobuf/internal/message_listener.py
+++ /dev/null
@@ -1,78 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc. All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-# * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""Defines a listener interface for observing certain
-state transitions on Message objects.
-
-Also defines a null implementation of this interface.
-"""
-
-__author__ = 'robinson@google.com (Will Robinson)'
-
-
-class MessageListener(object):
-
- """Listens for modifications made to a message. Meant to be registered via
- Message._SetListener().
-
- Attributes:
- dirty: If True, then calling Modified() would be a no-op. This can be
- used to avoid these calls entirely in the common case.
- """
-
- def Modified(self):
- """Called every time the message is modified in such a way that the parent
- message may need to be updated. This currently means either:
- (a) The message was modified for the first time, so the parent message
- should henceforth mark the message as present.
- (b) The message's cached byte size became dirty -- i.e. the message was
- modified for the first time after a previous call to ByteSize().
- Therefore the parent should also mark its byte size as dirty.
- Note that (a) implies (b), since new objects start out with a client cached
- size (zero). However, we document (a) explicitly because it is important.
-
- Modified() will *only* be called in response to one of these two events --
- not every time the sub-message is modified.
-
- Note that if the listener's |dirty| attribute is true, then calling
- Modified at the moment would be a no-op, so it can be skipped. Performance-
- sensitive callers should check this attribute directly before calling since
- it will be true most of the time.
- """
-
- raise NotImplementedError
-
-
-class NullMessageListener(object):
-
- """No-op MessageListener implementation."""
-
- def Modified(self):
- pass
diff --git a/third_party/protobuf/python/google/protobuf/internal/message_set_extensions.proto b/third_party/protobuf/python/google/protobuf/internal/message_set_extensions.proto
deleted file mode 100644
index 14e5f19375..0000000000
--- a/third_party/protobuf/python/google/protobuf/internal/message_set_extensions.proto
+++ /dev/null
@@ -1,74 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// This file contains messages that extend MessageSet.
-
-syntax = "proto2";
-package google.protobuf.internal;
-
-
-// A message with message_set_wire_format.
-message TestMessageSet {
- option message_set_wire_format = true;
- extensions 4 to max;
-}
-
-message TestMessageSetExtension1 {
- extend TestMessageSet {
- optional TestMessageSetExtension1 message_set_extension = 98418603;
- }
- optional int32 i = 15;
-}
-
-message TestMessageSetExtension2 {
- extend TestMessageSet {
- optional TestMessageSetExtension2 message_set_extension = 98418634;
- }
- optional string str = 25;
-}
-
-message TestMessageSetExtension3 {
- optional string text = 35;
-}
-
-extend TestMessageSet {
- optional TestMessageSetExtension3 message_set_extension3 = 98418655;
-}
-
-// This message was used to generate
-// //net/proto2/python/internal/testdata/message_set_message, but is commented
-// out since it must not actually exist in code, to simulate an "unknown"
-// extension.
-// message TestMessageSetUnknownExtension {
-// extend TestMessageSet {
-// optional TestMessageSetUnknownExtension message_set_extension = 56141421;
-// }
-// optional int64 a = 1;
-// }
diff --git a/third_party/protobuf/python/google/protobuf/internal/missing_enum_values.proto b/third_party/protobuf/python/google/protobuf/internal/missing_enum_values.proto
deleted file mode 100644
index 1850be5bb7..0000000000
--- a/third_party/protobuf/python/google/protobuf/internal/missing_enum_values.proto
+++ /dev/null
@@ -1,56 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-syntax = "proto2";
-
-package google.protobuf.python.internal;
-
-message TestEnumValues {
- enum NestedEnum {
- ZERO = 0;
- ONE = 1;
- }
- optional NestedEnum optional_nested_enum = 1;
- repeated NestedEnum repeated_nested_enum = 2;
- repeated NestedEnum packed_nested_enum = 3 [packed = true];
-}
-
-message TestMissingEnumValues {
- enum NestedEnum {
- TWO = 2;
- }
- optional NestedEnum optional_nested_enum = 1;
- repeated NestedEnum repeated_nested_enum = 2;
- repeated NestedEnum packed_nested_enum = 3 [packed = true];
-}
-
-message JustString {
- required string dummy = 1;
-}
diff --git a/third_party/protobuf/python/google/protobuf/internal/more_extensions.proto b/third_party/protobuf/python/google/protobuf/internal/more_extensions.proto
deleted file mode 100644
index 78f1467361..0000000000
--- a/third_party/protobuf/python/google/protobuf/internal/more_extensions.proto
+++ /dev/null
@@ -1,59 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Author: robinson@google.com (Will Robinson)
-
-syntax = "proto2";
-
-package google.protobuf.internal;
-
-
-message TopLevelMessage {
- optional ExtendedMessage submessage = 1;
-}
-
-
-message ExtendedMessage {
- extensions 1 to max;
-}
-
-
-message ForeignMessage {
- optional int32 foreign_message_int = 1;
-}
-
-
-extend ExtendedMessage {
- optional int32 optional_int_extension = 1;
- optional ForeignMessage optional_message_extension = 2;
-
- repeated int32 repeated_int_extension = 3;
- repeated ForeignMessage repeated_message_extension = 4;
-}
diff --git a/third_party/protobuf/python/google/protobuf/internal/more_extensions_dynamic.proto b/third_party/protobuf/python/google/protobuf/internal/more_extensions_dynamic.proto
deleted file mode 100644
index 11f85ef60c..0000000000
--- a/third_party/protobuf/python/google/protobuf/internal/more_extensions_dynamic.proto
+++ /dev/null
@@ -1,50 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Author: jasonh@google.com (Jason Hsueh)
-//
-// This file is used to test a corner case in the CPP implementation where the
-// generated C++ type is available for the extendee, but the extension is
-// defined in a file whose C++ type is not in the binary.
-
-syntax = "proto2";
-
-import "google/protobuf/internal/more_extensions.proto";
-
-package google.protobuf.internal;
-
-message DynamicMessageType {
- optional int32 a = 1;
-}
-
-extend ExtendedMessage {
- optional int32 dynamic_int32_extension = 100;
- optional DynamicMessageType dynamic_message_extension = 101;
-}
diff --git a/third_party/protobuf/python/google/protobuf/internal/more_messages.proto b/third_party/protobuf/python/google/protobuf/internal/more_messages.proto
deleted file mode 100644
index 2c6ab9efdf..0000000000
--- a/third_party/protobuf/python/google/protobuf/internal/more_messages.proto
+++ /dev/null
@@ -1,52 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Author: robinson@google.com (Will Robinson)
-
-syntax = "proto2";
-
-package google.protobuf.internal;
-
-// A message where tag numbers are listed out of order, to allow us to test our
-// canonicalization of serialized output, which should always be in tag order.
-// We also mix in some extensions for extra fun.
-message OutOfOrderFields {
- optional sint32 optional_sint32 = 5;
- extensions 4 to 4;
- optional uint32 optional_uint32 = 3;
- extensions 2 to 2;
- optional int32 optional_int32 = 1;
-};
-
-
-extend OutOfOrderFields {
- optional uint64 optional_uint64 = 4;
- optional int64 optional_int64 = 2;
-}
diff --git a/third_party/protobuf/python/google/protobuf/internal/packed_field_test.proto b/third_party/protobuf/python/google/protobuf/internal/packed_field_test.proto
deleted file mode 100644
index 0dfdc10a87..0000000000
--- a/third_party/protobuf/python/google/protobuf/internal/packed_field_test.proto
+++ /dev/null
@@ -1,73 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-syntax = "proto3";
-
-package google.protobuf.python.internal;
-
-message TestPackedTypes {
- enum NestedEnum {
- FOO = 0;
- BAR = 1;
- BAZ = 2;
- }
-
- repeated int32 repeated_int32 = 1;
- repeated int64 repeated_int64 = 2;
- repeated uint32 repeated_uint32 = 3;
- repeated uint64 repeated_uint64 = 4;
- repeated sint32 repeated_sint32 = 5;
- repeated sint64 repeated_sint64 = 6;
- repeated fixed32 repeated_fixed32 = 7;
- repeated fixed64 repeated_fixed64 = 8;
- repeated sfixed32 repeated_sfixed32 = 9;
- repeated sfixed64 repeated_sfixed64 = 10;
- repeated float repeated_float = 11;
- repeated double repeated_double = 12;
- repeated bool repeated_bool = 13;
- repeated NestedEnum repeated_nested_enum = 14;
-}
-
-message TestUnpackedTypes {
- repeated int32 repeated_int32 = 1 [packed = false];
- repeated int64 repeated_int64 = 2 [packed = false];
- repeated uint32 repeated_uint32 = 3 [packed = false];
- repeated uint64 repeated_uint64 = 4 [packed = false];
- repeated sint32 repeated_sint32 = 5 [packed = false];
- repeated sint64 repeated_sint64 = 6 [packed = false];
- repeated fixed32 repeated_fixed32 = 7 [packed = false];
- repeated fixed64 repeated_fixed64 = 8 [packed = false];
- repeated sfixed32 repeated_sfixed32 = 9 [packed = false];
- repeated sfixed64 repeated_sfixed64 = 10 [packed = false];
- repeated float repeated_float = 11 [packed = false];
- repeated double repeated_double = 12 [packed = false];
- repeated bool repeated_bool = 13 [packed = false];
- repeated TestPackedTypes.NestedEnum repeated_nested_enum = 14 [packed = false];
-}
diff --git a/third_party/protobuf/python/google/protobuf/internal/python_message.py b/third_party/protobuf/python/google/protobuf/internal/python_message.py
deleted file mode 100755
index 87f60666ab..0000000000
--- a/third_party/protobuf/python/google/protobuf/internal/python_message.py
+++ /dev/null
@@ -1,1520 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc. All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-# * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-# This code is meant to work on Python 2.4 and above only.
-#
-# TODO(robinson): Helpers for verbose, common checks like seeing if a
-# descriptor's cpp_type is CPPTYPE_MESSAGE.
-
-"""Contains a metaclass and helper functions used to create
-protocol message classes from Descriptor objects at runtime.
-
-Recall that a metaclass is the "type" of a class.
-(A class is to a metaclass what an instance is to a class.)
-
-In this case, we use the GeneratedProtocolMessageType metaclass
-to inject all the useful functionality into the classes
-output by the protocol compiler at compile-time.
-
-The upshot of all this is that the real implementation
-details for ALL pure-Python protocol buffers are *here in
-this file*.
-"""
-
-__author__ = 'robinson@google.com (Will Robinson)'
-
-from io import BytesIO
-import sys
-import struct
-import weakref
-
-import six
-import six.moves.copyreg as copyreg
-
-# We use "as" to avoid name collisions with variables.
-from google.protobuf.internal import containers
-from google.protobuf.internal import decoder
-from google.protobuf.internal import encoder
-from google.protobuf.internal import enum_type_wrapper
-from google.protobuf.internal import message_listener as message_listener_mod
-from google.protobuf.internal import type_checkers
-from google.protobuf.internal import well_known_types
-from google.protobuf.internal import wire_format
-from google.protobuf import descriptor as descriptor_mod
-from google.protobuf import message as message_mod
-from google.protobuf import symbol_database
-from google.protobuf import text_format
-
-_FieldDescriptor = descriptor_mod.FieldDescriptor
-_AnyFullTypeName = 'google.protobuf.Any'
-
-
-class GeneratedProtocolMessageType(type):
-
- """Metaclass for protocol message classes created at runtime from Descriptors.
-
- We add implementations for all methods described in the Message class. We
- also create properties to allow getting/setting all fields in the protocol
- message. Finally, we create slots to prevent users from accidentally
- "setting" nonexistent fields in the protocol message, which then wouldn't get
- serialized / deserialized properly.
-
- The protocol compiler currently uses this metaclass to create protocol
- message classes at runtime. Clients can also manually create their own
- classes at runtime, as in this example:
-
- mydescriptor = Descriptor(.....)
- class MyProtoClass(Message):
- __metaclass__ = GeneratedProtocolMessageType
- DESCRIPTOR = mydescriptor
- myproto_instance = MyProtoClass()
- myproto.foo_field = 23
- ...
-
- The above example will not work for nested types. If you wish to include them,
- use reflection.MakeClass() instead of manually instantiating the class in
- order to create the appropriate class structure.
- """
-
- # Must be consistent with the protocol-compiler code in
- # proto2/compiler/internal/generator.*.
- _DESCRIPTOR_KEY = 'DESCRIPTOR'
-
- def __new__(cls, name, bases, dictionary):
- """Custom allocation for runtime-generated class types.
-
- We override __new__ because this is apparently the only place
- where we can meaningfully set __slots__ on the class we're creating(?).
- (The interplay between metaclasses and slots is not very well-documented).
-
- Args:
- name: Name of the class (ignored, but required by the
- metaclass protocol).
- bases: Base classes of the class we're constructing.
- (Should be message.Message). We ignore this field, but
- it's required by the metaclass protocol
- dictionary: The class dictionary of the class we're
- constructing. dictionary[_DESCRIPTOR_KEY] must contain
- a Descriptor object describing this protocol message
- type.
-
- Returns:
- Newly-allocated class.
- """
- descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY]
- if descriptor.full_name in well_known_types.WKTBASES:
- bases += (well_known_types.WKTBASES[descriptor.full_name],)
- _AddClassAttributesForNestedExtensions(descriptor, dictionary)
- _AddSlots(descriptor, dictionary)
-
- superclass = super(GeneratedProtocolMessageType, cls)
- new_class = superclass.__new__(cls, name, bases, dictionary)
- return new_class
-
- def __init__(cls, name, bases, dictionary):
- """Here we perform the majority of our work on the class.
- We add enum getters, an __init__ method, implementations
- of all Message methods, and properties for all fields
- in the protocol type.
-
- Args:
- name: Name of the class (ignored, but required by the
- metaclass protocol).
- bases: Base classes of the class we're constructing.
- (Should be message.Message). We ignore this field, but
- it's required by the metaclass protocol
- dictionary: The class dictionary of the class we're
- constructing. dictionary[_DESCRIPTOR_KEY] must contain
- a Descriptor object describing this protocol message
- type.
- """
- descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY]
- cls._decoders_by_tag = {}
- cls._extensions_by_name = {}
- cls._extensions_by_number = {}
- if (descriptor.has_options and
- descriptor.GetOptions().message_set_wire_format):
- cls._decoders_by_tag[decoder.MESSAGE_SET_ITEM_TAG] = (
- decoder.MessageSetItemDecoder(cls._extensions_by_number), None)
-
- # Attach stuff to each FieldDescriptor for quick lookup later on.
- for field in descriptor.fields:
- _AttachFieldHelpers(cls, field)
-
- descriptor._concrete_class = cls # pylint: disable=protected-access
- _AddEnumValues(descriptor, cls)
- _AddInitMethod(descriptor, cls)
- _AddPropertiesForFields(descriptor, cls)
- _AddPropertiesForExtensions(descriptor, cls)
- _AddStaticMethods(cls)
- _AddMessageMethods(descriptor, cls)
- _AddPrivateHelperMethods(descriptor, cls)
- copyreg.pickle(cls, lambda obj: (cls, (), obj.__getstate__()))
-
- superclass = super(GeneratedProtocolMessageType, cls)
- superclass.__init__(name, bases, dictionary)
-
-
-# Stateless helpers for GeneratedProtocolMessageType below.
-# Outside clients should not access these directly.
-#
-# I opted not to make any of these methods on the metaclass, to make it more
-# clear that I'm not really using any state there and to keep clients from
-# thinking that they have direct access to these construction helpers.
-
-
-def _PropertyName(proto_field_name):
- """Returns the name of the public property attribute which
- clients can use to get and (in some cases) set the value
- of a protocol message field.
-
- Args:
- proto_field_name: The protocol message field name, exactly
- as it appears (or would appear) in a .proto file.
- """
- # TODO(robinson): Escape Python keywords (e.g., yield), and test this support.
- # nnorwitz makes my day by writing:
- # """
- # FYI. See the keyword module in the stdlib. This could be as simple as:
- #
- # if keyword.iskeyword(proto_field_name):
- # return proto_field_name + "_"
- # return proto_field_name
- # """
- # Kenton says: The above is a BAD IDEA. People rely on being able to use
- # getattr() and setattr() to reflectively manipulate field values. If we
- # rename the properties, then every such user has to also make sure to apply
- # the same transformation. Note that currently if you name a field "yield",
- # you can still access it just fine using getattr/setattr -- it's not even
- # that cumbersome to do so.
- # TODO(kenton): Remove this method entirely if/when everyone agrees with my
- # position.
- return proto_field_name
-
-
-def _VerifyExtensionHandle(message, extension_handle):
- """Verify that the given extension handle is valid."""
-
- if not isinstance(extension_handle, _FieldDescriptor):
- raise KeyError('HasExtension() expects an extension handle, got: %s' %
- extension_handle)
-
- if not extension_handle.is_extension:
- raise KeyError('"%s" is not an extension.' % extension_handle.full_name)
-
- if not extension_handle.containing_type:
- raise KeyError('"%s" is missing a containing_type.'
- % extension_handle.full_name)
-
- if extension_handle.containing_type is not message.DESCRIPTOR:
- raise KeyError('Extension "%s" extends message type "%s", but this '
- 'message is of type "%s".' %
- (extension_handle.full_name,
- extension_handle.containing_type.full_name,
- message.DESCRIPTOR.full_name))
-
-
-def _AddSlots(message_descriptor, dictionary):
- """Adds a __slots__ entry to dictionary, containing the names of all valid
- attributes for this message type.
-
- Args:
- message_descriptor: A Descriptor instance describing this message type.
- dictionary: Class dictionary to which we'll add a '__slots__' entry.
- """
- dictionary['__slots__'] = ['_cached_byte_size',
- '_cached_byte_size_dirty',
- '_fields',
- '_unknown_fields',
- '_is_present_in_parent',
- '_listener',
- '_listener_for_children',
- '__weakref__',
- '_oneofs']
-
-
-def _IsMessageSetExtension(field):
- return (field.is_extension and
- field.containing_type.has_options and
- field.containing_type.GetOptions().message_set_wire_format and
- field.type == _FieldDescriptor.TYPE_MESSAGE and
- field.label == _FieldDescriptor.LABEL_OPTIONAL)
-
-
-def _IsMapField(field):
- return (field.type == _FieldDescriptor.TYPE_MESSAGE and
- field.message_type.has_options and
- field.message_type.GetOptions().map_entry)
-
-
-def _IsMessageMapField(field):
- value_type = field.message_type.fields_by_name["value"]
- return value_type.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE
-
-
-def _AttachFieldHelpers(cls, field_descriptor):
- is_repeated = (field_descriptor.label == _FieldDescriptor.LABEL_REPEATED)
- is_packable = (is_repeated and
- wire_format.IsTypePackable(field_descriptor.type))
- if not is_packable:
- is_packed = False
- elif field_descriptor.containing_type.syntax == "proto2":
- is_packed = (field_descriptor.has_options and
- field_descriptor.GetOptions().packed)
- else:
- has_packed_false = (field_descriptor.has_options and
- field_descriptor.GetOptions().HasField("packed") and
- field_descriptor.GetOptions().packed == False)
- is_packed = not has_packed_false
- is_map_entry = _IsMapField(field_descriptor)
-
- if is_map_entry:
- field_encoder = encoder.MapEncoder(field_descriptor)
- sizer = encoder.MapSizer(field_descriptor)
- elif _IsMessageSetExtension(field_descriptor):
- field_encoder = encoder.MessageSetItemEncoder(field_descriptor.number)
- sizer = encoder.MessageSetItemSizer(field_descriptor.number)
- else:
- field_encoder = type_checkers.TYPE_TO_ENCODER[field_descriptor.type](
- field_descriptor.number, is_repeated, is_packed)
- sizer = type_checkers.TYPE_TO_SIZER[field_descriptor.type](
- field_descriptor.number, is_repeated, is_packed)
-
- field_descriptor._encoder = field_encoder
- field_descriptor._sizer = sizer
- field_descriptor._default_constructor = _DefaultValueConstructorForField(
- field_descriptor)
-
- def AddDecoder(wiretype, is_packed):
- tag_bytes = encoder.TagBytes(field_descriptor.number, wiretype)
- decode_type = field_descriptor.type
- if (decode_type == _FieldDescriptor.TYPE_ENUM and
- type_checkers.SupportsOpenEnums(field_descriptor)):
- decode_type = _FieldDescriptor.TYPE_INT32
-
- oneof_descriptor = None
- if field_descriptor.containing_oneof is not None:
- oneof_descriptor = field_descriptor
-
- if is_map_entry:
- is_message_map = _IsMessageMapField(field_descriptor)
-
- field_decoder = decoder.MapDecoder(
- field_descriptor, _GetInitializeDefaultForMap(field_descriptor),
- is_message_map)
- else:
- field_decoder = type_checkers.TYPE_TO_DECODER[decode_type](
- field_descriptor.number, is_repeated, is_packed,
- field_descriptor, field_descriptor._default_constructor)
-
- cls._decoders_by_tag[tag_bytes] = (field_decoder, oneof_descriptor)
-
- AddDecoder(type_checkers.FIELD_TYPE_TO_WIRE_TYPE[field_descriptor.type],
- False)
-
- if is_repeated and wire_format.IsTypePackable(field_descriptor.type):
- # To support wire compatibility of adding packed = true, add a decoder for
- # packed values regardless of the field's options.
- AddDecoder(wire_format.WIRETYPE_LENGTH_DELIMITED, True)
-
-
-def _AddClassAttributesForNestedExtensions(descriptor, dictionary):
- extension_dict = descriptor.extensions_by_name
- for extension_name, extension_field in extension_dict.items():
- assert extension_name not in dictionary
- dictionary[extension_name] = extension_field
-
-
-def _AddEnumValues(descriptor, cls):
- """Sets class-level attributes for all enum fields defined in this message.
-
- Also exporting a class-level object that can name enum values.
-
- Args:
- descriptor: Descriptor object for this message type.
- cls: Class we're constructing for this message type.
- """
- for enum_type in descriptor.enum_types:
- setattr(cls, enum_type.name, enum_type_wrapper.EnumTypeWrapper(enum_type))
- for enum_value in enum_type.values:
- setattr(cls, enum_value.name, enum_value.number)
-
-
-def _GetInitializeDefaultForMap(field):
- if field.label != _FieldDescriptor.LABEL_REPEATED:
- raise ValueError('map_entry set on non-repeated field %s' % (
- field.name))
- fields_by_name = field.message_type.fields_by_name
- key_checker = type_checkers.GetTypeChecker(fields_by_name['key'])
-
- value_field = fields_by_name['value']
- if _IsMessageMapField(field):
- def MakeMessageMapDefault(message):
- return containers.MessageMap(
- message._listener_for_children, value_field.message_type, key_checker)
- return MakeMessageMapDefault
- else:
- value_checker = type_checkers.GetTypeChecker(value_field)
- def MakePrimitiveMapDefault(message):
- return containers.ScalarMap(
- message._listener_for_children, key_checker, value_checker)
- return MakePrimitiveMapDefault
-
-def _DefaultValueConstructorForField(field):
- """Returns a function which returns a default value for a field.
-
- Args:
- field: FieldDescriptor object for this field.
-
- The returned function has one argument:
- message: Message instance containing this field, or a weakref proxy
- of same.
-
- That function in turn returns a default value for this field. The default
- value may refer back to |message| via a weak reference.
- """
-
- if _IsMapField(field):
- return _GetInitializeDefaultForMap(field)
-
- if field.label == _FieldDescriptor.LABEL_REPEATED:
- if field.has_default_value and field.default_value != []:
- raise ValueError('Repeated field default value not empty list: %s' % (
- field.default_value))
- if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
- # We can't look at _concrete_class yet since it might not have
- # been set. (Depends on order in which we initialize the classes).
- message_type = field.message_type
- def MakeRepeatedMessageDefault(message):
- return containers.RepeatedCompositeFieldContainer(
- message._listener_for_children, field.message_type)
- return MakeRepeatedMessageDefault
- else:
- type_checker = type_checkers.GetTypeChecker(field)
- def MakeRepeatedScalarDefault(message):
- return containers.RepeatedScalarFieldContainer(
- message._listener_for_children, type_checker)
- return MakeRepeatedScalarDefault
-
- if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
- # _concrete_class may not yet be initialized.
- message_type = field.message_type
- def MakeSubMessageDefault(message):
- result = message_type._concrete_class()
- result._SetListener(
- _OneofListener(message, field)
- if field.containing_oneof is not None
- else message._listener_for_children)
- return result
- return MakeSubMessageDefault
-
- def MakeScalarDefault(message):
- # TODO(protobuf-team): This may be broken since there may not be
- # default_value. Combine with has_default_value somehow.
- return field.default_value
- return MakeScalarDefault
-
-
-def _ReraiseTypeErrorWithFieldName(message_name, field_name):
- """Re-raise the currently-handled TypeError with the field name added."""
- exc = sys.exc_info()[1]
- if len(exc.args) == 1 and type(exc) is TypeError:
- # simple TypeError; add field name to exception message
- exc = TypeError('%s for field %s.%s' % (str(exc), message_name, field_name))
-
- # re-raise possibly-amended exception with original traceback:
- six.reraise(type(exc), exc, sys.exc_info()[2])
-
-
-def _AddInitMethod(message_descriptor, cls):
- """Adds an __init__ method to cls."""
-
- def _GetIntegerEnumValue(enum_type, value):
- """Convert a string or integer enum value to an integer.
-
- If the value is a string, it is converted to the enum value in
- enum_type with the same name. If the value is not a string, it's
- returned as-is. (No conversion or bounds-checking is done.)
- """
- if isinstance(value, six.string_types):
- try:
- return enum_type.values_by_name[value].number
- except KeyError:
- raise ValueError('Enum type %s: unknown label "%s"' % (
- enum_type.full_name, value))
- return value
-
- def init(self, **kwargs):
- self._cached_byte_size = 0
- self._cached_byte_size_dirty = len(kwargs) > 0
- self._fields = {}
- # Contains a mapping from oneof field descriptors to the descriptor
- # of the currently set field in that oneof field.
- self._oneofs = {}
-
- # _unknown_fields is () when empty for efficiency, and will be turned into
- # a list if fields are added.
- self._unknown_fields = ()
- self._is_present_in_parent = False
- self._listener = message_listener_mod.NullMessageListener()
- self._listener_for_children = _Listener(self)
- for field_name, field_value in kwargs.items():
- field = _GetFieldByName(message_descriptor, field_name)
- if field is None:
- raise TypeError("%s() got an unexpected keyword argument '%s'" %
- (message_descriptor.name, field_name))
- if field.label == _FieldDescriptor.LABEL_REPEATED:
- copy = field._default_constructor(self)
- if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: # Composite
- if _IsMapField(field):
- if _IsMessageMapField(field):
- for key in field_value:
- copy[key].MergeFrom(field_value[key])
- else:
- copy.update(field_value)
- else:
- for val in field_value:
- if isinstance(val, dict):
- copy.add(**val)
- else:
- copy.add().MergeFrom(val)
- else: # Scalar
- if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM:
- field_value = [_GetIntegerEnumValue(field.enum_type, val)
- for val in field_value]
- copy.extend(field_value)
- self._fields[field] = copy
- elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
- copy = field._default_constructor(self)
- new_val = field_value
- if isinstance(field_value, dict):
- new_val = field.message_type._concrete_class(**field_value)
- try:
- copy.MergeFrom(new_val)
- except TypeError:
- _ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name)
- self._fields[field] = copy
- else:
- if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM:
- field_value = _GetIntegerEnumValue(field.enum_type, field_value)
- try:
- setattr(self, field_name, field_value)
- except TypeError:
- _ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name)
-
- init.__module__ = None
- init.__doc__ = None
- cls.__init__ = init
-
-
-def _GetFieldByName(message_descriptor, field_name):
- """Returns a field descriptor by field name.
-
- Args:
- message_descriptor: A Descriptor describing all fields in message.
- field_name: The name of the field to retrieve.
- Returns:
- The field descriptor associated with the field name.
- """
- try:
- return message_descriptor.fields_by_name[field_name]
- except KeyError:
- raise ValueError('Protocol message %s has no "%s" field.' %
- (message_descriptor.name, field_name))
-
-
-def _AddPropertiesForFields(descriptor, cls):
- """Adds properties for all fields in this protocol message type."""
- for field in descriptor.fields:
- _AddPropertiesForField(field, cls)
-
- if descriptor.is_extendable:
- # _ExtensionDict is just an adaptor with no state so we allocate a new one
- # every time it is accessed.
- cls.Extensions = property(lambda self: _ExtensionDict(self))
-
-
-def _AddPropertiesForField(field, cls):
- """Adds a public property for a protocol message field.
- Clients can use this property to get and (in the case
- of non-repeated scalar fields) directly set the value
- of a protocol message field.
-
- Args:
- field: A FieldDescriptor for this field.
- cls: The class we're constructing.
- """
- # Catch it if we add other types that we should
- # handle specially here.
- assert _FieldDescriptor.MAX_CPPTYPE == 10
-
- constant_name = field.name.upper() + "_FIELD_NUMBER"
- setattr(cls, constant_name, field.number)
-
- if field.label == _FieldDescriptor.LABEL_REPEATED:
- _AddPropertiesForRepeatedField(field, cls)
- elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
- _AddPropertiesForNonRepeatedCompositeField(field, cls)
- else:
- _AddPropertiesForNonRepeatedScalarField(field, cls)
-
-
-def _AddPropertiesForRepeatedField(field, cls):
- """Adds a public property for a "repeated" protocol message field. Clients
- can use this property to get the value of the field, which will be either a
- _RepeatedScalarFieldContainer or _RepeatedCompositeFieldContainer (see
- below).
-
- Note that when clients add values to these containers, we perform
- type-checking in the case of repeated scalar fields, and we also set any
- necessary "has" bits as a side-effect.
-
- Args:
- field: A FieldDescriptor for this field.
- cls: The class we're constructing.
- """
- proto_field_name = field.name
- property_name = _PropertyName(proto_field_name)
-
- def getter(self):
- field_value = self._fields.get(field)
- if field_value is None:
- # Construct a new object to represent this field.
- field_value = field._default_constructor(self)
-
- # Atomically check if another thread has preempted us and, if not, swap
- # in the new object we just created. If someone has preempted us, we
- # take that object and discard ours.
- # WARNING: We are relying on setdefault() being atomic. This is true
- # in CPython but we haven't investigated others. This warning appears
- # in several other locations in this file.
- field_value = self._fields.setdefault(field, field_value)
- return field_value
- getter.__module__ = None
- getter.__doc__ = 'Getter for %s.' % proto_field_name
-
- # We define a setter just so we can throw an exception with a more
- # helpful error message.
- def setter(self, new_value):
- raise AttributeError('Assignment not allowed to repeated field '
- '"%s" in protocol message object.' % proto_field_name)
-
- doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name
- setattr(cls, property_name, property(getter, setter, doc=doc))
-
-
-def _AddPropertiesForNonRepeatedScalarField(field, cls):
- """Adds a public property for a nonrepeated, scalar protocol message field.
- Clients can use this property to get and directly set the value of the field.
- Note that when the client sets the value of a field by using this property,
- all necessary "has" bits are set as a side-effect, and we also perform
- type-checking.
-
- Args:
- field: A FieldDescriptor for this field.
- cls: The class we're constructing.
- """
- proto_field_name = field.name
- property_name = _PropertyName(proto_field_name)
- type_checker = type_checkers.GetTypeChecker(field)
- default_value = field.default_value
- valid_values = set()
- is_proto3 = field.containing_type.syntax == "proto3"
-
- def getter(self):
- # TODO(protobuf-team): This may be broken since there may not be
- # default_value. Combine with has_default_value somehow.
- return self._fields.get(field, default_value)
- getter.__module__ = None
- getter.__doc__ = 'Getter for %s.' % proto_field_name
-
- clear_when_set_to_default = is_proto3 and not field.containing_oneof
-
- def field_setter(self, new_value):
- # pylint: disable=protected-access
- # Testing the value for truthiness captures all of the proto3 defaults
- # (0, 0.0, enum 0, and False).
- new_value = type_checker.CheckValue(new_value)
- if clear_when_set_to_default and not new_value:
- self._fields.pop(field, None)
- else:
- self._fields[field] = new_value
- # Check _cached_byte_size_dirty inline to improve performance, since scalar
- # setters are called frequently.
- if not self._cached_byte_size_dirty:
- self._Modified()
-
- if field.containing_oneof:
- def setter(self, new_value):
- field_setter(self, new_value)
- self._UpdateOneofState(field)
- else:
- setter = field_setter
-
- setter.__module__ = None
- setter.__doc__ = 'Setter for %s.' % proto_field_name
-
- # Add a property to encapsulate the getter/setter.
- doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name
- setattr(cls, property_name, property(getter, setter, doc=doc))
-
-
-def _AddPropertiesForNonRepeatedCompositeField(field, cls):
- """Adds a public property for a nonrepeated, composite protocol message field.
- A composite field is a "group" or "message" field.
-
- Clients can use this property to get the value of the field, but cannot
- assign to the property directly.
-
- Args:
- field: A FieldDescriptor for this field.
- cls: The class we're constructing.
- """
- # TODO(robinson): Remove duplication with similar method
- # for non-repeated scalars.
- proto_field_name = field.name
- property_name = _PropertyName(proto_field_name)
-
- def getter(self):
- field_value = self._fields.get(field)
- if field_value is None:
- # Construct a new object to represent this field.
- field_value = field._default_constructor(self)
-
- # Atomically check if another thread has preempted us and, if not, swap
- # in the new object we just created. If someone has preempted us, we
- # take that object and discard ours.
- # WARNING: We are relying on setdefault() being atomic. This is true
- # in CPython but we haven't investigated others. This warning appears
- # in several other locations in this file.
- field_value = self._fields.setdefault(field, field_value)
- return field_value
- getter.__module__ = None
- getter.__doc__ = 'Getter for %s.' % proto_field_name
-
- # We define a setter just so we can throw an exception with a more
- # helpful error message.
- def setter(self, new_value):
- raise AttributeError('Assignment not allowed to composite field '
- '"%s" in protocol message object.' % proto_field_name)
-
- # Add a property to encapsulate the getter.
- doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name
- setattr(cls, property_name, property(getter, setter, doc=doc))
-
-
-def _AddPropertiesForExtensions(descriptor, cls):
- """Adds properties for all fields in this protocol message type."""
- extension_dict = descriptor.extensions_by_name
- for extension_name, extension_field in extension_dict.items():
- constant_name = extension_name.upper() + "_FIELD_NUMBER"
- setattr(cls, constant_name, extension_field.number)
-
-
-def _AddStaticMethods(cls):
- # TODO(robinson): This probably needs to be thread-safe(?)
- def RegisterExtension(extension_handle):
- extension_handle.containing_type = cls.DESCRIPTOR
- _AttachFieldHelpers(cls, extension_handle)
-
- # Try to insert our extension, failing if an extension with the same number
- # already exists.
- actual_handle = cls._extensions_by_number.setdefault(
- extension_handle.number, extension_handle)
- if actual_handle is not extension_handle:
- raise AssertionError(
- 'Extensions "%s" and "%s" both try to extend message type "%s" with '
- 'field number %d.' %
- (extension_handle.full_name, actual_handle.full_name,
- cls.DESCRIPTOR.full_name, extension_handle.number))
-
- cls._extensions_by_name[extension_handle.full_name] = extension_handle
-
- handle = extension_handle # avoid line wrapping
- if _IsMessageSetExtension(handle):
- # MessageSet extension. Also register under type name.
- cls._extensions_by_name[
- extension_handle.message_type.full_name] = extension_handle
-
- cls.RegisterExtension = staticmethod(RegisterExtension)
-
- def FromString(s):
- message = cls()
- message.MergeFromString(s)
- return message
- cls.FromString = staticmethod(FromString)
-
-
-def _IsPresent(item):
- """Given a (FieldDescriptor, value) tuple from _fields, return true if the
- value should be included in the list returned by ListFields()."""
-
- if item[0].label == _FieldDescriptor.LABEL_REPEATED:
- return bool(item[1])
- elif item[0].cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
- return item[1]._is_present_in_parent
- else:
- return True
-
-
-def _AddListFieldsMethod(message_descriptor, cls):
- """Helper for _AddMessageMethods()."""
-
- def ListFields(self):
- all_fields = [item for item in self._fields.items() if _IsPresent(item)]
- all_fields.sort(key = lambda item: item[0].number)
- return all_fields
-
- cls.ListFields = ListFields
-
-_Proto3HasError = 'Protocol message has no non-repeated submessage field "%s"'
-_Proto2HasError = 'Protocol message has no non-repeated field "%s"'
-
-def _AddHasFieldMethod(message_descriptor, cls):
- """Helper for _AddMessageMethods()."""
-
- is_proto3 = (message_descriptor.syntax == "proto3")
- error_msg = _Proto3HasError if is_proto3 else _Proto2HasError
-
- hassable_fields = {}
- for field in message_descriptor.fields:
- if field.label == _FieldDescriptor.LABEL_REPEATED:
- continue
- # For proto3, only submessages and fields inside a oneof have presence.
- if (is_proto3 and field.cpp_type != _FieldDescriptor.CPPTYPE_MESSAGE and
- not field.containing_oneof):
- continue
- hassable_fields[field.name] = field
-
- if not is_proto3:
- # Fields inside oneofs are never repeated (enforced by the compiler).
- for oneof in message_descriptor.oneofs:
- hassable_fields[oneof.name] = oneof
-
- def HasField(self, field_name):
- try:
- field = hassable_fields[field_name]
- except KeyError:
- raise ValueError(error_msg % field_name)
-
- if isinstance(field, descriptor_mod.OneofDescriptor):
- try:
- return HasField(self, self._oneofs[field].name)
- except KeyError:
- return False
- else:
- if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
- value = self._fields.get(field)
- return value is not None and value._is_present_in_parent
- else:
- return field in self._fields
-
- cls.HasField = HasField
-
-
-def _AddClearFieldMethod(message_descriptor, cls):
- """Helper for _AddMessageMethods()."""
- def ClearField(self, field_name):
- try:
- field = message_descriptor.fields_by_name[field_name]
- except KeyError:
- try:
- field = message_descriptor.oneofs_by_name[field_name]
- if field in self._oneofs:
- field = self._oneofs[field]
- else:
- return
- except KeyError:
- raise ValueError('Protocol message %s() has no "%s" field.' %
- (message_descriptor.name, field_name))
-
- if field in self._fields:
- # To match the C++ implementation, we need to invalidate iterators
- # for map fields when ClearField() happens.
- if hasattr(self._fields[field], 'InvalidateIterators'):
- self._fields[field].InvalidateIterators()
-
- # Note: If the field is a sub-message, its listener will still point
- # at us. That's fine, because the worst than can happen is that it
- # will call _Modified() and invalidate our byte size. Big deal.
- del self._fields[field]
-
- if self._oneofs.get(field.containing_oneof, None) is field:
- del self._oneofs[field.containing_oneof]
-
- # Always call _Modified() -- even if nothing was changed, this is
- # a mutating method, and thus calling it should cause the field to become
- # present in the parent message.
- self._Modified()
-
- cls.ClearField = ClearField
-
-
-def _AddClearExtensionMethod(cls):
- """Helper for _AddMessageMethods()."""
- def ClearExtension(self, extension_handle):
- _VerifyExtensionHandle(self, extension_handle)
-
- # Similar to ClearField(), above.
- if extension_handle in self._fields:
- del self._fields[extension_handle]
- self._Modified()
- cls.ClearExtension = ClearExtension
-
-
-def _AddClearMethod(message_descriptor, cls):
- """Helper for _AddMessageMethods()."""
- def Clear(self):
- # Clear fields.
- self._fields = {}
- self._unknown_fields = ()
- self._oneofs = {}
- self._Modified()
- cls.Clear = Clear
-
-
-def _AddHasExtensionMethod(cls):
- """Helper for _AddMessageMethods()."""
- def HasExtension(self, extension_handle):
- _VerifyExtensionHandle(self, extension_handle)
- if extension_handle.label == _FieldDescriptor.LABEL_REPEATED:
- raise KeyError('"%s" is repeated.' % extension_handle.full_name)
-
- if extension_handle.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
- value = self._fields.get(extension_handle)
- return value is not None and value._is_present_in_parent
- else:
- return extension_handle in self._fields
- cls.HasExtension = HasExtension
-
-def _InternalUnpackAny(msg):
- """Unpacks Any message and returns the unpacked message.
-
- This internal method is differnt from public Any Unpack method which takes
- the target message as argument. _InternalUnpackAny method does not have
- target message type and need to find the message type in descriptor pool.
-
- Args:
- msg: An Any message to be unpacked.
-
- Returns:
- The unpacked message.
- """
- type_url = msg.type_url
- db = symbol_database.Default()
-
- if not type_url:
- return None
-
- # TODO(haberman): For now we just strip the hostname. Better logic will be
- # required.
- type_name = type_url.split("/")[-1]
- descriptor = db.pool.FindMessageTypeByName(type_name)
-
- if descriptor is None:
- return None
-
- message_class = db.GetPrototype(descriptor)
- message = message_class()
-
- message.ParseFromString(msg.value)
- return message
-
-def _AddEqualsMethod(message_descriptor, cls):
- """Helper for _AddMessageMethods()."""
- def __eq__(self, other):
- if (not isinstance(other, message_mod.Message) or
- other.DESCRIPTOR != self.DESCRIPTOR):
- return False
-
- if self is other:
- return True
-
- if self.DESCRIPTOR.full_name == _AnyFullTypeName:
- any_a = _InternalUnpackAny(self)
- any_b = _InternalUnpackAny(other)
- if any_a and any_b:
- return any_a == any_b
-
- if not self.ListFields() == other.ListFields():
- return False
-
- # Sort unknown fields because their order shouldn't affect equality test.
- unknown_fields = list(self._unknown_fields)
- unknown_fields.sort()
- other_unknown_fields = list(other._unknown_fields)
- other_unknown_fields.sort()
-
- return unknown_fields == other_unknown_fields
-
- cls.__eq__ = __eq__
-
-
-def _AddStrMethod(message_descriptor, cls):
- """Helper for _AddMessageMethods()."""
- def __str__(self):
- return text_format.MessageToString(self)
- cls.__str__ = __str__
-
-
-def _AddReprMethod(message_descriptor, cls):
- """Helper for _AddMessageMethods()."""
- def __repr__(self):
- return text_format.MessageToString(self)
- cls.__repr__ = __repr__
-
-
-def _AddUnicodeMethod(unused_message_descriptor, cls):
- """Helper for _AddMessageMethods()."""
-
- def __unicode__(self):
- return text_format.MessageToString(self, as_utf8=True).decode('utf-8')
- cls.__unicode__ = __unicode__
-
-
-def _AddSetListenerMethod(cls):
- """Helper for _AddMessageMethods()."""
- def SetListener(self, listener):
- if listener is None:
- self._listener = message_listener_mod.NullMessageListener()
- else:
- self._listener = listener
- cls._SetListener = SetListener
-
-
-def _BytesForNonRepeatedElement(value, field_number, field_type):
- """Returns the number of bytes needed to serialize a non-repeated element.
- The returned byte count includes space for tag information and any
- other additional space associated with serializing value.
-
- Args:
- value: Value we're serializing.
- field_number: Field number of this value. (Since the field number
- is stored as part of a varint-encoded tag, this has an impact
- on the total bytes required to serialize the value).
- field_type: The type of the field. One of the TYPE_* constants
- within FieldDescriptor.
- """
- try:
- fn = type_checkers.TYPE_TO_BYTE_SIZE_FN[field_type]
- return fn(field_number, value)
- except KeyError:
- raise message_mod.EncodeError('Unrecognized field type: %d' % field_type)
-
-
-def _AddByteSizeMethod(message_descriptor, cls):
- """Helper for _AddMessageMethods()."""
-
- def ByteSize(self):
- if not self._cached_byte_size_dirty:
- return self._cached_byte_size
-
- size = 0
- for field_descriptor, field_value in self.ListFields():
- size += field_descriptor._sizer(field_value)
-
- for tag_bytes, value_bytes in self._unknown_fields:
- size += len(tag_bytes) + len(value_bytes)
-
- self._cached_byte_size = size
- self._cached_byte_size_dirty = False
- self._listener_for_children.dirty = False
- return size
-
- cls.ByteSize = ByteSize
-
-
-def _AddSerializeToStringMethod(message_descriptor, cls):
- """Helper for _AddMessageMethods()."""
-
- def SerializeToString(self):
- # Check if the message has all of its required fields set.
- errors = []
- if not self.IsInitialized():
- raise message_mod.EncodeError(
- 'Message %s is missing required fields: %s' % (
- self.DESCRIPTOR.full_name, ','.join(self.FindInitializationErrors())))
- return self.SerializePartialToString()
- cls.SerializeToString = SerializeToString
-
-
-def _AddSerializePartialToStringMethod(message_descriptor, cls):
- """Helper for _AddMessageMethods()."""
-
- def SerializePartialToString(self):
- out = BytesIO()
- self._InternalSerialize(out.write)
- return out.getvalue()
- cls.SerializePartialToString = SerializePartialToString
-
- def InternalSerialize(self, write_bytes):
- for field_descriptor, field_value in self.ListFields():
- field_descriptor._encoder(write_bytes, field_value)
- for tag_bytes, value_bytes in self._unknown_fields:
- write_bytes(tag_bytes)
- write_bytes(value_bytes)
- cls._InternalSerialize = InternalSerialize
-
-
-def _AddMergeFromStringMethod(message_descriptor, cls):
- """Helper for _AddMessageMethods()."""
- def MergeFromString(self, serialized):
- length = len(serialized)
- try:
- if self._InternalParse(serialized, 0, length) != length:
- # The only reason _InternalParse would return early is if it
- # encountered an end-group tag.
- raise message_mod.DecodeError('Unexpected end-group tag.')
- except (IndexError, TypeError):
- # Now ord(buf[p:p+1]) == ord('') gets TypeError.
- raise message_mod.DecodeError('Truncated message.')
- except struct.error as e:
- raise message_mod.DecodeError(e)
- return length # Return this for legacy reasons.
- cls.MergeFromString = MergeFromString
-
- local_ReadTag = decoder.ReadTag
- local_SkipField = decoder.SkipField
- decoders_by_tag = cls._decoders_by_tag
- is_proto3 = message_descriptor.syntax == "proto3"
-
- def InternalParse(self, buffer, pos, end):
- self._Modified()
- field_dict = self._fields
- unknown_field_list = self._unknown_fields
- while pos != end:
- (tag_bytes, new_pos) = local_ReadTag(buffer, pos)
- field_decoder, field_desc = decoders_by_tag.get(tag_bytes, (None, None))
- if field_decoder is None:
- value_start_pos = new_pos
- new_pos = local_SkipField(buffer, new_pos, end, tag_bytes)
- if new_pos == -1:
- return pos
- if not is_proto3:
- if not unknown_field_list:
- unknown_field_list = self._unknown_fields = []
- unknown_field_list.append(
- (tag_bytes, buffer[value_start_pos:new_pos]))
- pos = new_pos
- else:
- pos = field_decoder(buffer, new_pos, end, self, field_dict)
- if field_desc:
- self._UpdateOneofState(field_desc)
- return pos
- cls._InternalParse = InternalParse
-
-
-def _AddIsInitializedMethod(message_descriptor, cls):
- """Adds the IsInitialized and FindInitializationError methods to the
- protocol message class."""
-
- required_fields = [field for field in message_descriptor.fields
- if field.label == _FieldDescriptor.LABEL_REQUIRED]
-
- def IsInitialized(self, errors=None):
- """Checks if all required fields of a message are set.
-
- Args:
- errors: A list which, if provided, will be populated with the field
- paths of all missing required fields.
-
- Returns:
- True iff the specified message has all required fields set.
- """
-
- # Performance is critical so we avoid HasField() and ListFields().
-
- for field in required_fields:
- if (field not in self._fields or
- (field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE and
- not self._fields[field]._is_present_in_parent)):
- if errors is not None:
- errors.extend(self.FindInitializationErrors())
- return False
-
- for field, value in list(self._fields.items()): # dict can change size!
- if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
- if field.label == _FieldDescriptor.LABEL_REPEATED:
- if (field.message_type.has_options and
- field.message_type.GetOptions().map_entry):
- continue
- for element in value:
- if not element.IsInitialized():
- if errors is not None:
- errors.extend(self.FindInitializationErrors())
- return False
- elif value._is_present_in_parent and not value.IsInitialized():
- if errors is not None:
- errors.extend(self.FindInitializationErrors())
- return False
-
- return True
-
- cls.IsInitialized = IsInitialized
-
- def FindInitializationErrors(self):
- """Finds required fields which are not initialized.
-
- Returns:
- A list of strings. Each string is a path to an uninitialized field from
- the top-level message, e.g. "foo.bar[5].baz".
- """
-
- errors = [] # simplify things
-
- for field in required_fields:
- if not self.HasField(field.name):
- errors.append(field.name)
-
- for field, value in self.ListFields():
- if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
- if field.is_extension:
- name = "(%s)" % field.full_name
- else:
- name = field.name
-
- if _IsMapField(field):
- if _IsMessageMapField(field):
- for key in value:
- element = value[key]
- prefix = "%s[%s]." % (name, key)
- sub_errors = element.FindInitializationErrors()
- errors += [prefix + error for error in sub_errors]
- else:
- # ScalarMaps can't have any initialization errors.
- pass
- elif field.label == _FieldDescriptor.LABEL_REPEATED:
- for i in range(len(value)):
- element = value[i]
- prefix = "%s[%d]." % (name, i)
- sub_errors = element.FindInitializationErrors()
- errors += [prefix + error for error in sub_errors]
- else:
- prefix = name + "."
- sub_errors = value.FindInitializationErrors()
- errors += [prefix + error for error in sub_errors]
-
- return errors
-
- cls.FindInitializationErrors = FindInitializationErrors
-
-
-def _AddMergeFromMethod(cls):
- LABEL_REPEATED = _FieldDescriptor.LABEL_REPEATED
- CPPTYPE_MESSAGE = _FieldDescriptor.CPPTYPE_MESSAGE
-
- def MergeFrom(self, msg):
- if not isinstance(msg, cls):
- raise TypeError(
- "Parameter to MergeFrom() must be instance of same class: "
- "expected %s got %s." % (cls.__name__, type(msg).__name__))
-
- assert msg is not self
- self._Modified()
-
- fields = self._fields
-
- for field, value in msg._fields.items():
- if field.label == LABEL_REPEATED:
- field_value = fields.get(field)
- if field_value is None:
- # Construct a new object to represent this field.
- field_value = field._default_constructor(self)
- fields[field] = field_value
- field_value.MergeFrom(value)
- elif field.cpp_type == CPPTYPE_MESSAGE:
- if value._is_present_in_parent:
- field_value = fields.get(field)
- if field_value is None:
- # Construct a new object to represent this field.
- field_value = field._default_constructor(self)
- fields[field] = field_value
- field_value.MergeFrom(value)
- else:
- self._fields[field] = value
- if field.containing_oneof:
- self._UpdateOneofState(field)
-
- if msg._unknown_fields:
- if not self._unknown_fields:
- self._unknown_fields = []
- self._unknown_fields.extend(msg._unknown_fields)
-
- cls.MergeFrom = MergeFrom
-
-
-def _AddWhichOneofMethod(message_descriptor, cls):
- def WhichOneof(self, oneof_name):
- """Returns the name of the currently set field inside a oneof, or None."""
- try:
- field = message_descriptor.oneofs_by_name[oneof_name]
- except KeyError:
- raise ValueError(
- 'Protocol message has no oneof "%s" field.' % oneof_name)
-
- nested_field = self._oneofs.get(field, None)
- if nested_field is not None and self.HasField(nested_field.name):
- return nested_field.name
- else:
- return None
-
- cls.WhichOneof = WhichOneof
-
-
-def _AddMessageMethods(message_descriptor, cls):
- """Adds implementations of all Message methods to cls."""
- _AddListFieldsMethod(message_descriptor, cls)
- _AddHasFieldMethod(message_descriptor, cls)
- _AddClearFieldMethod(message_descriptor, cls)
- if message_descriptor.is_extendable:
- _AddClearExtensionMethod(cls)
- _AddHasExtensionMethod(cls)
- _AddClearMethod(message_descriptor, cls)
- _AddEqualsMethod(message_descriptor, cls)
- _AddStrMethod(message_descriptor, cls)
- _AddReprMethod(message_descriptor, cls)
- _AddUnicodeMethod(message_descriptor, cls)
- _AddSetListenerMethod(cls)
- _AddByteSizeMethod(message_descriptor, cls)
- _AddSerializeToStringMethod(message_descriptor, cls)
- _AddSerializePartialToStringMethod(message_descriptor, cls)
- _AddMergeFromStringMethod(message_descriptor, cls)
- _AddIsInitializedMethod(message_descriptor, cls)
- _AddMergeFromMethod(cls)
- _AddWhichOneofMethod(message_descriptor, cls)
-
-
-def _AddPrivateHelperMethods(message_descriptor, cls):
- """Adds implementation of private helper methods to cls."""
-
- def Modified(self):
- """Sets the _cached_byte_size_dirty bit to true,
- and propagates this to our listener iff this was a state change.
- """
-
- # Note: Some callers check _cached_byte_size_dirty before calling
- # _Modified() as an extra optimization. So, if this method is ever
- # changed such that it does stuff even when _cached_byte_size_dirty is
- # already true, the callers need to be updated.
- if not self._cached_byte_size_dirty:
- self._cached_byte_size_dirty = True
- self._listener_for_children.dirty = True
- self._is_present_in_parent = True
- self._listener.Modified()
-
- def _UpdateOneofState(self, field):
- """Sets field as the active field in its containing oneof.
-
- Will also delete currently active field in the oneof, if it is different
- from the argument. Does not mark the message as modified.
- """
- other_field = self._oneofs.setdefault(field.containing_oneof, field)
- if other_field is not field:
- del self._fields[other_field]
- self._oneofs[field.containing_oneof] = field
-
- cls._Modified = Modified
- cls.SetInParent = Modified
- cls._UpdateOneofState = _UpdateOneofState
-
-
-class _Listener(object):
-
- """MessageListener implementation that a parent message registers with its
- child message.
-
- In order to support semantics like:
-
- foo.bar.baz.qux = 23
- assert foo.HasField('bar')
-
- ...child objects must have back references to their parents.
- This helper class is at the heart of this support.
- """
-
- def __init__(self, parent_message):
- """Args:
- parent_message: The message whose _Modified() method we should call when
- we receive Modified() messages.
- """
- # This listener establishes a back reference from a child (contained) object
- # to its parent (containing) object. We make this a weak reference to avoid
- # creating cyclic garbage when the client finishes with the 'parent' object
- # in the tree.
- if isinstance(parent_message, weakref.ProxyType):
- self._parent_message_weakref = parent_message
- else:
- self._parent_message_weakref = weakref.proxy(parent_message)
-
- # As an optimization, we also indicate directly on the listener whether
- # or not the parent message is dirty. This way we can avoid traversing
- # up the tree in the common case.
- self.dirty = False
-
- def Modified(self):
- if self.dirty:
- return
- try:
- # Propagate the signal to our parents iff this is the first field set.
- self._parent_message_weakref._Modified()
- except ReferenceError:
- # We can get here if a client has kept a reference to a child object,
- # and is now setting a field on it, but the child's parent has been
- # garbage-collected. This is not an error.
- pass
-
-
-class _OneofListener(_Listener):
- """Special listener implementation for setting composite oneof fields."""
-
- def __init__(self, parent_message, field):
- """Args:
- parent_message: The message whose _Modified() method we should call when
- we receive Modified() messages.
- field: The descriptor of the field being set in the parent message.
- """
- super(_OneofListener, self).__init__(parent_message)
- self._field = field
-
- def Modified(self):
- """Also updates the state of the containing oneof in the parent message."""
- try:
- self._parent_message_weakref._UpdateOneofState(self._field)
- super(_OneofListener, self).Modified()
- except ReferenceError:
- pass
-
-
-# TODO(robinson): Move elsewhere? This file is getting pretty ridiculous...
-# TODO(robinson): Unify error handling of "unknown extension" crap.
-# TODO(robinson): Support iteritems()-style iteration over all
-# extensions with the "has" bits turned on?
-class _ExtensionDict(object):
-
- """Dict-like container for supporting an indexable "Extensions"
- field on proto instances.
-
- Note that in all cases we expect extension handles to be
- FieldDescriptors.
- """
-
- def __init__(self, extended_message):
- """extended_message: Message instance for which we are the Extensions dict.
- """
-
- self._extended_message = extended_message
-
- def __getitem__(self, extension_handle):
- """Returns the current value of the given extension handle."""
-
- _VerifyExtensionHandle(self._extended_message, extension_handle)
-
- result = self._extended_message._fields.get(extension_handle)
- if result is not None:
- return result
-
- if extension_handle.label == _FieldDescriptor.LABEL_REPEATED:
- result = extension_handle._default_constructor(self._extended_message)
- elif extension_handle.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
- result = extension_handle.message_type._concrete_class()
- try:
- result._SetListener(self._extended_message._listener_for_children)
- except ReferenceError:
- pass
- else:
- # Singular scalar -- just return the default without inserting into the
- # dict.
- return extension_handle.default_value
-
- # Atomically check if another thread has preempted us and, if not, swap
- # in the new object we just created. If someone has preempted us, we
- # take that object and discard ours.
- # WARNING: We are relying on setdefault() being atomic. This is true
- # in CPython but we haven't investigated others. This warning appears
- # in several other locations in this file.
- result = self._extended_message._fields.setdefault(
- extension_handle, result)
-
- return result
-
- def __eq__(self, other):
- if not isinstance(other, self.__class__):
- return False
-
- my_fields = self._extended_message.ListFields()
- other_fields = other._extended_message.ListFields()
-
- # Get rid of non-extension fields.
- my_fields = [ field for field in my_fields if field.is_extension ]
- other_fields = [ field for field in other_fields if field.is_extension ]
-
- return my_fields == other_fields
-
- def __ne__(self, other):
- return not self == other
-
- def __hash__(self):
- raise TypeError('unhashable object')
-
- # Note that this is only meaningful for non-repeated, scalar extension
- # fields. Note also that we may have to call _Modified() when we do
- # successfully set a field this way, to set any necssary "has" bits in the
- # ancestors of the extended message.
- def __setitem__(self, extension_handle, value):
- """If extension_handle specifies a non-repeated, scalar extension
- field, sets the value of that field.
- """
-
- _VerifyExtensionHandle(self._extended_message, extension_handle)
-
- if (extension_handle.label == _FieldDescriptor.LABEL_REPEATED or
- extension_handle.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE):
- raise TypeError(
- 'Cannot assign to extension "%s" because it is a repeated or '
- 'composite type.' % extension_handle.full_name)
-
- # It's slightly wasteful to lookup the type checker each time,
- # but we expect this to be a vanishingly uncommon case anyway.
- type_checker = type_checkers.GetTypeChecker(extension_handle)
- # pylint: disable=protected-access
- self._extended_message._fields[extension_handle] = (
- type_checker.CheckValue(value))
- self._extended_message._Modified()
-
- def _FindExtensionByName(self, name):
- """Tries to find a known extension with the specified name.
-
- Args:
- name: Extension full name.
-
- Returns:
- Extension field descriptor.
- """
- return self._extended_message._extensions_by_name.get(name, None)
diff --git a/third_party/protobuf/python/google/protobuf/internal/test_bad_identifiers.proto b/third_party/protobuf/python/google/protobuf/internal/test_bad_identifiers.proto
deleted file mode 100644
index c4860ea88a..0000000000
--- a/third_party/protobuf/python/google/protobuf/internal/test_bad_identifiers.proto
+++ /dev/null
@@ -1,53 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Author: kenton@google.com (Kenton Varda)
-
-syntax = "proto2";
-
-package protobuf_unittest;
-
-option py_generic_services = true;
-
-message TestBadIdentifiers {
- extensions 100 to max;
-}
-
-// Make sure these reasonable extension names don't conflict with internal
-// variables.
-extend TestBadIdentifiers {
- optional string message = 100 [default="foo"];
- optional string descriptor = 101 [default="bar"];
- optional string reflection = 102 [default="baz"];
- optional string service = 103 [default="qux"];
-}
-
-message AnotherMessage {}
-service AnotherService {}
diff --git a/third_party/protobuf/python/google/protobuf/internal/type_checkers.py b/third_party/protobuf/python/google/protobuf/internal/type_checkers.py
deleted file mode 100755
index f30ca6a80e..0000000000
--- a/third_party/protobuf/python/google/protobuf/internal/type_checkers.py
+++ /dev/null
@@ -1,341 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc. All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-# * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""Provides type checking routines.
-
-This module defines type checking utilities in the forms of dictionaries:
-
-VALUE_CHECKERS: A dictionary of field types and a value validation object.
-TYPE_TO_BYTE_SIZE_FN: A dictionary with field types and a size computing
- function.
-TYPE_TO_SERIALIZE_METHOD: A dictionary with field types and serialization
- function.
-FIELD_TYPE_TO_WIRE_TYPE: A dictionary with field typed and their
- coresponding wire types.
-TYPE_TO_DESERIALIZE_METHOD: A dictionary with field types and deserialization
- function.
-"""
-
-__author__ = 'robinson@google.com (Will Robinson)'
-
-import six
-
-if six.PY3:
- long = int
-
-from google.protobuf.internal import api_implementation
-from google.protobuf.internal import decoder
-from google.protobuf.internal import encoder
-from google.protobuf.internal import wire_format
-from google.protobuf import descriptor
-
-_FieldDescriptor = descriptor.FieldDescriptor
-
-def SupportsOpenEnums(field_descriptor):
- return field_descriptor.containing_type.syntax == "proto3"
-
-def GetTypeChecker(field):
- """Returns a type checker for a message field of the specified types.
-
- Args:
- field: FieldDescriptor object for this field.
-
- Returns:
- An instance of TypeChecker which can be used to verify the types
- of values assigned to a field of the specified type.
- """
- if (field.cpp_type == _FieldDescriptor.CPPTYPE_STRING and
- field.type == _FieldDescriptor.TYPE_STRING):
- return UnicodeValueChecker()
- if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM:
- if SupportsOpenEnums(field):
- # When open enums are supported, any int32 can be assigned.
- return _VALUE_CHECKERS[_FieldDescriptor.CPPTYPE_INT32]
- else:
- return EnumValueChecker(field.enum_type)
- return _VALUE_CHECKERS[field.cpp_type]
-
-
-# None of the typecheckers below make any attempt to guard against people
-# subclassing builtin types and doing weird things. We're not trying to
-# protect against malicious clients here, just people accidentally shooting
-# themselves in the foot in obvious ways.
-
-class TypeChecker(object):
-
- """Type checker used to catch type errors as early as possible
- when the client is setting scalar fields in protocol messages.
- """
-
- def __init__(self, *acceptable_types):
- self._acceptable_types = acceptable_types
-
- def CheckValue(self, proposed_value):
- """Type check the provided value and return it.
-
- The returned value might have been normalized to another type.
- """
- if not isinstance(proposed_value, self._acceptable_types):
- message = ('%.1024r has type %s, but expected one of: %s' %
- (proposed_value, type(proposed_value), self._acceptable_types))
- raise TypeError(message)
- return proposed_value
-
-
-# IntValueChecker and its subclasses perform integer type-checks
-# and bounds-checks.
-class IntValueChecker(object):
-
- """Checker used for integer fields. Performs type-check and range check."""
-
- def CheckValue(self, proposed_value):
- if not isinstance(proposed_value, six.integer_types):
- message = ('%.1024r has type %s, but expected one of: %s' %
- (proposed_value, type(proposed_value), six.integer_types))
- raise TypeError(message)
- if not self._MIN <= proposed_value <= self._MAX:
- raise ValueError('Value out of range: %d' % proposed_value)
- # We force 32-bit values to int and 64-bit values to long to make
- # alternate implementations where the distinction is more significant
- # (e.g. the C++ implementation) simpler.
- proposed_value = self._TYPE(proposed_value)
- return proposed_value
-
- def DefaultValue(self):
- return 0
-
-
-class EnumValueChecker(object):
-
- """Checker used for enum fields. Performs type-check and range check."""
-
- def __init__(self, enum_type):
- self._enum_type = enum_type
-
- def CheckValue(self, proposed_value):
- if not isinstance(proposed_value, six.integer_types):
- message = ('%.1024r has type %s, but expected one of: %s' %
- (proposed_value, type(proposed_value), six.integer_types))
- raise TypeError(message)
- if proposed_value not in self._enum_type.values_by_number:
- raise ValueError('Unknown enum value: %d' % proposed_value)
- return proposed_value
-
- def DefaultValue(self):
- return self._enum_type.values[0].number
-
-
-class UnicodeValueChecker(object):
-
- """Checker used for string fields.
-
- Always returns a unicode value, even if the input is of type str.
- """
-
- def CheckValue(self, proposed_value):
- if not isinstance(proposed_value, (bytes, six.text_type)):
- message = ('%.1024r has type %s, but expected one of: %s' %
- (proposed_value, type(proposed_value), (bytes, six.text_type)))
- raise TypeError(message)
-
- # If the value is of type 'bytes' make sure that it is valid UTF-8 data.
- if isinstance(proposed_value, bytes):
- try:
- proposed_value = proposed_value.decode('utf-8')
- except UnicodeDecodeError:
- raise ValueError('%.1024r has type bytes, but isn\'t valid UTF-8 '
- 'encoding. Non-UTF-8 strings must be converted to '
- 'unicode objects before being added.' %
- (proposed_value))
- return proposed_value
-
- def DefaultValue(self):
- return u""
-
-
-class Int32ValueChecker(IntValueChecker):
- # We're sure to use ints instead of longs here since comparison may be more
- # efficient.
- _MIN = -2147483648
- _MAX = 2147483647
- _TYPE = int
-
-
-class Uint32ValueChecker(IntValueChecker):
- _MIN = 0
- _MAX = (1 << 32) - 1
- _TYPE = int
-
-
-class Int64ValueChecker(IntValueChecker):
- _MIN = -(1 << 63)
- _MAX = (1 << 63) - 1
- _TYPE = long
-
-
-class Uint64ValueChecker(IntValueChecker):
- _MIN = 0
- _MAX = (1 << 64) - 1
- _TYPE = long
-
-
-# Type-checkers for all scalar CPPTYPEs.
-_VALUE_CHECKERS = {
- _FieldDescriptor.CPPTYPE_INT32: Int32ValueChecker(),
- _FieldDescriptor.CPPTYPE_INT64: Int64ValueChecker(),
- _FieldDescriptor.CPPTYPE_UINT32: Uint32ValueChecker(),
- _FieldDescriptor.CPPTYPE_UINT64: Uint64ValueChecker(),
- _FieldDescriptor.CPPTYPE_DOUBLE: TypeChecker(
- float, int, long),
- _FieldDescriptor.CPPTYPE_FLOAT: TypeChecker(
- float, int, long),
- _FieldDescriptor.CPPTYPE_BOOL: TypeChecker(bool, int),
- _FieldDescriptor.CPPTYPE_STRING: TypeChecker(bytes),
- }
-
-
-# Map from field type to a function F, such that F(field_num, value)
-# gives the total byte size for a value of the given type. This
-# byte size includes tag information and any other additional space
-# associated with serializing "value".
-TYPE_TO_BYTE_SIZE_FN = {
- _FieldDescriptor.TYPE_DOUBLE: wire_format.DoubleByteSize,
- _FieldDescriptor.TYPE_FLOAT: wire_format.FloatByteSize,
- _FieldDescriptor.TYPE_INT64: wire_format.Int64ByteSize,
- _FieldDescriptor.TYPE_UINT64: wire_format.UInt64ByteSize,
- _FieldDescriptor.TYPE_INT32: wire_format.Int32ByteSize,
- _FieldDescriptor.TYPE_FIXED64: wire_format.Fixed64ByteSize,
- _FieldDescriptor.TYPE_FIXED32: wire_format.Fixed32ByteSize,
- _FieldDescriptor.TYPE_BOOL: wire_format.BoolByteSize,
- _FieldDescriptor.TYPE_STRING: wire_format.StringByteSize,
- _FieldDescriptor.TYPE_GROUP: wire_format.GroupByteSize,
- _FieldDescriptor.TYPE_MESSAGE: wire_format.MessageByteSize,
- _FieldDescriptor.TYPE_BYTES: wire_format.BytesByteSize,
- _FieldDescriptor.TYPE_UINT32: wire_format.UInt32ByteSize,
- _FieldDescriptor.TYPE_ENUM: wire_format.EnumByteSize,
- _FieldDescriptor.TYPE_SFIXED32: wire_format.SFixed32ByteSize,
- _FieldDescriptor.TYPE_SFIXED64: wire_format.SFixed64ByteSize,
- _FieldDescriptor.TYPE_SINT32: wire_format.SInt32ByteSize,
- _FieldDescriptor.TYPE_SINT64: wire_format.SInt64ByteSize
- }
-
-
-# Maps from field types to encoder constructors.
-TYPE_TO_ENCODER = {
- _FieldDescriptor.TYPE_DOUBLE: encoder.DoubleEncoder,
- _FieldDescriptor.TYPE_FLOAT: encoder.FloatEncoder,
- _FieldDescriptor.TYPE_INT64: encoder.Int64Encoder,
- _FieldDescriptor.TYPE_UINT64: encoder.UInt64Encoder,
- _FieldDescriptor.TYPE_INT32: encoder.Int32Encoder,
- _FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Encoder,
- _FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Encoder,
- _FieldDescriptor.TYPE_BOOL: encoder.BoolEncoder,
- _FieldDescriptor.TYPE_STRING: encoder.StringEncoder,
- _FieldDescriptor.TYPE_GROUP: encoder.GroupEncoder,
- _FieldDescriptor.TYPE_MESSAGE: encoder.MessageEncoder,
- _FieldDescriptor.TYPE_BYTES: encoder.BytesEncoder,
- _FieldDescriptor.TYPE_UINT32: encoder.UInt32Encoder,
- _FieldDescriptor.TYPE_ENUM: encoder.EnumEncoder,
- _FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Encoder,
- _FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Encoder,
- _FieldDescriptor.TYPE_SINT32: encoder.SInt32Encoder,
- _FieldDescriptor.TYPE_SINT64: encoder.SInt64Encoder,
- }
-
-
-# Maps from field types to sizer constructors.
-TYPE_TO_SIZER = {
- _FieldDescriptor.TYPE_DOUBLE: encoder.DoubleSizer,
- _FieldDescriptor.TYPE_FLOAT: encoder.FloatSizer,
- _FieldDescriptor.TYPE_INT64: encoder.Int64Sizer,
- _FieldDescriptor.TYPE_UINT64: encoder.UInt64Sizer,
- _FieldDescriptor.TYPE_INT32: encoder.Int32Sizer,
- _FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Sizer,
- _FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Sizer,
- _FieldDescriptor.TYPE_BOOL: encoder.BoolSizer,
- _FieldDescriptor.TYPE_STRING: encoder.StringSizer,
- _FieldDescriptor.TYPE_GROUP: encoder.GroupSizer,
- _FieldDescriptor.TYPE_MESSAGE: encoder.MessageSizer,
- _FieldDescriptor.TYPE_BYTES: encoder.BytesSizer,
- _FieldDescriptor.TYPE_UINT32: encoder.UInt32Sizer,
- _FieldDescriptor.TYPE_ENUM: encoder.EnumSizer,
- _FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Sizer,
- _FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Sizer,
- _FieldDescriptor.TYPE_SINT32: encoder.SInt32Sizer,
- _FieldDescriptor.TYPE_SINT64: encoder.SInt64Sizer,
- }
-
-
-# Maps from field type to a decoder constructor.
-TYPE_TO_DECODER = {
- _FieldDescriptor.TYPE_DOUBLE: decoder.DoubleDecoder,
- _FieldDescriptor.TYPE_FLOAT: decoder.FloatDecoder,
- _FieldDescriptor.TYPE_INT64: decoder.Int64Decoder,
- _FieldDescriptor.TYPE_UINT64: decoder.UInt64Decoder,
- _FieldDescriptor.TYPE_INT32: decoder.Int32Decoder,
- _FieldDescriptor.TYPE_FIXED64: decoder.Fixed64Decoder,
- _FieldDescriptor.TYPE_FIXED32: decoder.Fixed32Decoder,
- _FieldDescriptor.TYPE_BOOL: decoder.BoolDecoder,
- _FieldDescriptor.TYPE_STRING: decoder.StringDecoder,
- _FieldDescriptor.TYPE_GROUP: decoder.GroupDecoder,
- _FieldDescriptor.TYPE_MESSAGE: decoder.MessageDecoder,
- _FieldDescriptor.TYPE_BYTES: decoder.BytesDecoder,
- _FieldDescriptor.TYPE_UINT32: decoder.UInt32Decoder,
- _FieldDescriptor.TYPE_ENUM: decoder.EnumDecoder,
- _FieldDescriptor.TYPE_SFIXED32: decoder.SFixed32Decoder,
- _FieldDescriptor.TYPE_SFIXED64: decoder.SFixed64Decoder,
- _FieldDescriptor.TYPE_SINT32: decoder.SInt32Decoder,
- _FieldDescriptor.TYPE_SINT64: decoder.SInt64Decoder,
- }
-
-# Maps from field type to expected wiretype.
-FIELD_TYPE_TO_WIRE_TYPE = {
- _FieldDescriptor.TYPE_DOUBLE: wire_format.WIRETYPE_FIXED64,
- _FieldDescriptor.TYPE_FLOAT: wire_format.WIRETYPE_FIXED32,
- _FieldDescriptor.TYPE_INT64: wire_format.WIRETYPE_VARINT,
- _FieldDescriptor.TYPE_UINT64: wire_format.WIRETYPE_VARINT,
- _FieldDescriptor.TYPE_INT32: wire_format.WIRETYPE_VARINT,
- _FieldDescriptor.TYPE_FIXED64: wire_format.WIRETYPE_FIXED64,
- _FieldDescriptor.TYPE_FIXED32: wire_format.WIRETYPE_FIXED32,
- _FieldDescriptor.TYPE_BOOL: wire_format.WIRETYPE_VARINT,
- _FieldDescriptor.TYPE_STRING:
- wire_format.WIRETYPE_LENGTH_DELIMITED,
- _FieldDescriptor.TYPE_GROUP: wire_format.WIRETYPE_START_GROUP,
- _FieldDescriptor.TYPE_MESSAGE:
- wire_format.WIRETYPE_LENGTH_DELIMITED,
- _FieldDescriptor.TYPE_BYTES:
- wire_format.WIRETYPE_LENGTH_DELIMITED,
- _FieldDescriptor.TYPE_UINT32: wire_format.WIRETYPE_VARINT,
- _FieldDescriptor.TYPE_ENUM: wire_format.WIRETYPE_VARINT,
- _FieldDescriptor.TYPE_SFIXED32: wire_format.WIRETYPE_FIXED32,
- _FieldDescriptor.TYPE_SFIXED64: wire_format.WIRETYPE_FIXED64,
- _FieldDescriptor.TYPE_SINT32: wire_format.WIRETYPE_VARINT,
- _FieldDescriptor.TYPE_SINT64: wire_format.WIRETYPE_VARINT,
- }
diff --git a/third_party/protobuf/python/google/protobuf/internal/well_known_types.py b/third_party/protobuf/python/google/protobuf/internal/well_known_types.py
deleted file mode 100644
index d35fcc5f45..0000000000
--- a/third_party/protobuf/python/google/protobuf/internal/well_known_types.py
+++ /dev/null
@@ -1,720 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc. All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-# * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""Contains well known classes.
-
-This files defines well known classes which need extra maintenance including:
- - Any
- - Duration
- - FieldMask
- - Struct
- - Timestamp
-"""
-
-__author__ = 'jieluo@google.com (Jie Luo)'
-
-from datetime import datetime
-from datetime import timedelta
-import six
-
-from google.protobuf.descriptor import FieldDescriptor
-
-_TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S'
-_NANOS_PER_SECOND = 1000000000
-_NANOS_PER_MILLISECOND = 1000000
-_NANOS_PER_MICROSECOND = 1000
-_MILLIS_PER_SECOND = 1000
-_MICROS_PER_SECOND = 1000000
-_SECONDS_PER_DAY = 24 * 3600
-
-
-class Error(Exception):
- """Top-level module error."""
-
-
-class ParseError(Error):
- """Thrown in case of parsing error."""
-
-
-class Any(object):
- """Class for Any Message type."""
-
- def Pack(self, msg, type_url_prefix='type.googleapis.com/'):
- """Packs the specified message into current Any message."""
- if len(type_url_prefix) < 1 or type_url_prefix[-1] != '/':
- self.type_url = '%s/%s' % (type_url_prefix, msg.DESCRIPTOR.full_name)
- else:
- self.type_url = '%s%s' % (type_url_prefix, msg.DESCRIPTOR.full_name)
- self.value = msg.SerializeToString()
-
- def Unpack(self, msg):
- """Unpacks the current Any message into specified message."""
- descriptor = msg.DESCRIPTOR
- if not self.Is(descriptor):
- return False
- msg.ParseFromString(self.value)
- return True
-
- def Is(self, descriptor):
- """Checks if this Any represents the given protobuf type."""
- # Only last part is to be used: b/25630112
- return self.type_url.split('/')[-1] == descriptor.full_name
-
-
-class Timestamp(object):
- """Class for Timestamp message type."""
-
- def ToJsonString(self):
- """Converts Timestamp to RFC 3339 date string format.
-
- Returns:
- A string converted from timestamp. The string is always Z-normalized
- and uses 3, 6 or 9 fractional digits as required to represent the
- exact time. Example of the return format: '1972-01-01T10:00:20.021Z'
- """
- nanos = self.nanos % _NANOS_PER_SECOND
- total_sec = self.seconds + (self.nanos - nanos) // _NANOS_PER_SECOND
- seconds = total_sec % _SECONDS_PER_DAY
- days = (total_sec - seconds) // _SECONDS_PER_DAY
- dt = datetime(1970, 1, 1) + timedelta(days, seconds)
-
- result = dt.isoformat()
- if (nanos % 1e9) == 0:
- # If there are 0 fractional digits, the fractional
- # point '.' should be omitted when serializing.
- return result + 'Z'
- if (nanos % 1e6) == 0:
- # Serialize 3 fractional digits.
- return result + '.%03dZ' % (nanos / 1e6)
- if (nanos % 1e3) == 0:
- # Serialize 6 fractional digits.
- return result + '.%06dZ' % (nanos / 1e3)
- # Serialize 9 fractional digits.
- return result + '.%09dZ' % nanos
-
- def FromJsonString(self, value):
- """Parse a RFC 3339 date string format to Timestamp.
-
- Args:
- value: A date string. Any fractional digits (or none) and any offset are
- accepted as long as they fit into nano-seconds precision.
- Example of accepted format: '1972-01-01T10:00:20.021-05:00'
-
- Raises:
- ParseError: On parsing problems.
- """
- timezone_offset = value.find('Z')
- if timezone_offset == -1:
- timezone_offset = value.find('+')
- if timezone_offset == -1:
- timezone_offset = value.rfind('-')
- if timezone_offset == -1:
- raise ParseError(
- 'Failed to parse timestamp: missing valid timezone offset.')
- time_value = value[0:timezone_offset]
- # Parse datetime and nanos.
- point_position = time_value.find('.')
- if point_position == -1:
- second_value = time_value
- nano_value = ''
- else:
- second_value = time_value[:point_position]
- nano_value = time_value[point_position + 1:]
- date_object = datetime.strptime(second_value, _TIMESTAMPFOMAT)
- td = date_object - datetime(1970, 1, 1)
- seconds = td.seconds + td.days * _SECONDS_PER_DAY
- if len(nano_value) > 9:
- raise ParseError(
- 'Failed to parse Timestamp: nanos {0} more than '
- '9 fractional digits.'.format(nano_value))
- if nano_value:
- nanos = round(float('0.' + nano_value) * 1e9)
- else:
- nanos = 0
- # Parse timezone offsets.
- if value[timezone_offset] == 'Z':
- if len(value) != timezone_offset + 1:
- raise ParseError('Failed to parse timestamp: invalid trailing'
- ' data {0}.'.format(value))
- else:
- timezone = value[timezone_offset:]
- pos = timezone.find(':')
- if pos == -1:
- raise ParseError(
- 'Invalid timezone offset value: {0}.'.format(timezone))
- if timezone[0] == '+':
- seconds -= (int(timezone[1:pos])*60+int(timezone[pos+1:]))*60
- else:
- seconds += (int(timezone[1:pos])*60+int(timezone[pos+1:]))*60
- # Set seconds and nanos
- self.seconds = int(seconds)
- self.nanos = int(nanos)
-
- def GetCurrentTime(self):
- """Get the current UTC into Timestamp."""
- self.FromDatetime(datetime.utcnow())
-
- def ToNanoseconds(self):
- """Converts Timestamp to nanoseconds since epoch."""
- return self.seconds * _NANOS_PER_SECOND + self.nanos
-
- def ToMicroseconds(self):
- """Converts Timestamp to microseconds since epoch."""
- return (self.seconds * _MICROS_PER_SECOND +
- self.nanos // _NANOS_PER_MICROSECOND)
-
- def ToMilliseconds(self):
- """Converts Timestamp to milliseconds since epoch."""
- return (self.seconds * _MILLIS_PER_SECOND +
- self.nanos // _NANOS_PER_MILLISECOND)
-
- def ToSeconds(self):
- """Converts Timestamp to seconds since epoch."""
- return self.seconds
-
- def FromNanoseconds(self, nanos):
- """Converts nanoseconds since epoch to Timestamp."""
- self.seconds = nanos // _NANOS_PER_SECOND
- self.nanos = nanos % _NANOS_PER_SECOND
-
- def FromMicroseconds(self, micros):
- """Converts microseconds since epoch to Timestamp."""
- self.seconds = micros // _MICROS_PER_SECOND
- self.nanos = (micros % _MICROS_PER_SECOND) * _NANOS_PER_MICROSECOND
-
- def FromMilliseconds(self, millis):
- """Converts milliseconds since epoch to Timestamp."""
- self.seconds = millis // _MILLIS_PER_SECOND
- self.nanos = (millis % _MILLIS_PER_SECOND) * _NANOS_PER_MILLISECOND
-
- def FromSeconds(self, seconds):
- """Converts seconds since epoch to Timestamp."""
- self.seconds = seconds
- self.nanos = 0
-
- def ToDatetime(self):
- """Converts Timestamp to datetime."""
- return datetime.utcfromtimestamp(
- self.seconds + self.nanos / float(_NANOS_PER_SECOND))
-
- def FromDatetime(self, dt):
- """Converts datetime to Timestamp."""
- td = dt - datetime(1970, 1, 1)
- self.seconds = td.seconds + td.days * _SECONDS_PER_DAY
- self.nanos = td.microseconds * _NANOS_PER_MICROSECOND
-
-
-class Duration(object):
- """Class for Duration message type."""
-
- def ToJsonString(self):
- """Converts Duration to string format.
-
- Returns:
- A string converted from self. The string format will contains
- 3, 6, or 9 fractional digits depending on the precision required to
- represent the exact Duration value. For example: "1s", "1.010s",
- "1.000000100s", "-3.100s"
- """
- if self.seconds < 0 or self.nanos < 0:
- result = '-'
- seconds = - self.seconds + int((0 - self.nanos) // 1e9)
- nanos = (0 - self.nanos) % 1e9
- else:
- result = ''
- seconds = self.seconds + int(self.nanos // 1e9)
- nanos = self.nanos % 1e9
- result += '%d' % seconds
- if (nanos % 1e9) == 0:
- # If there are 0 fractional digits, the fractional
- # point '.' should be omitted when serializing.
- return result + 's'
- if (nanos % 1e6) == 0:
- # Serialize 3 fractional digits.
- return result + '.%03ds' % (nanos / 1e6)
- if (nanos % 1e3) == 0:
- # Serialize 6 fractional digits.
- return result + '.%06ds' % (nanos / 1e3)
- # Serialize 9 fractional digits.
- return result + '.%09ds' % nanos
-
- def FromJsonString(self, value):
- """Converts a string to Duration.
-
- Args:
- value: A string to be converted. The string must end with 's'. Any
- fractional digits (or none) are accepted as long as they fit into
- precision. For example: "1s", "1.01s", "1.0000001s", "-3.100s
-
- Raises:
- ParseError: On parsing problems.
- """
- if len(value) < 1 or value[-1] != 's':
- raise ParseError(
- 'Duration must end with letter "s": {0}.'.format(value))
- try:
- pos = value.find('.')
- if pos == -1:
- self.seconds = int(value[:-1])
- self.nanos = 0
- else:
- self.seconds = int(value[:pos])
- if value[0] == '-':
- self.nanos = int(round(float('-0{0}'.format(value[pos: -1])) *1e9))
- else:
- self.nanos = int(round(float('0{0}'.format(value[pos: -1])) *1e9))
- except ValueError:
- raise ParseError(
- 'Couldn\'t parse duration: {0}.'.format(value))
-
- def ToNanoseconds(self):
- """Converts a Duration to nanoseconds."""
- return self.seconds * _NANOS_PER_SECOND + self.nanos
-
- def ToMicroseconds(self):
- """Converts a Duration to microseconds."""
- micros = _RoundTowardZero(self.nanos, _NANOS_PER_MICROSECOND)
- return self.seconds * _MICROS_PER_SECOND + micros
-
- def ToMilliseconds(self):
- """Converts a Duration to milliseconds."""
- millis = _RoundTowardZero(self.nanos, _NANOS_PER_MILLISECOND)
- return self.seconds * _MILLIS_PER_SECOND + millis
-
- def ToSeconds(self):
- """Converts a Duration to seconds."""
- return self.seconds
-
- def FromNanoseconds(self, nanos):
- """Converts nanoseconds to Duration."""
- self._NormalizeDuration(nanos // _NANOS_PER_SECOND,
- nanos % _NANOS_PER_SECOND)
-
- def FromMicroseconds(self, micros):
- """Converts microseconds to Duration."""
- self._NormalizeDuration(
- micros // _MICROS_PER_SECOND,
- (micros % _MICROS_PER_SECOND) * _NANOS_PER_MICROSECOND)
-
- def FromMilliseconds(self, millis):
- """Converts milliseconds to Duration."""
- self._NormalizeDuration(
- millis // _MILLIS_PER_SECOND,
- (millis % _MILLIS_PER_SECOND) * _NANOS_PER_MILLISECOND)
-
- def FromSeconds(self, seconds):
- """Converts seconds to Duration."""
- self.seconds = seconds
- self.nanos = 0
-
- def ToTimedelta(self):
- """Converts Duration to timedelta."""
- return timedelta(
- seconds=self.seconds, microseconds=_RoundTowardZero(
- self.nanos, _NANOS_PER_MICROSECOND))
-
- def FromTimedelta(self, td):
- """Convertd timedelta to Duration."""
- self._NormalizeDuration(td.seconds + td.days * _SECONDS_PER_DAY,
- td.microseconds * _NANOS_PER_MICROSECOND)
-
- def _NormalizeDuration(self, seconds, nanos):
- """Set Duration by seconds and nonas."""
- # Force nanos to be negative if the duration is negative.
- if seconds < 0 and nanos > 0:
- seconds += 1
- nanos -= _NANOS_PER_SECOND
- self.seconds = seconds
- self.nanos = nanos
-
-
-def _RoundTowardZero(value, divider):
- """Truncates the remainder part after division."""
- # For some languanges, the sign of the remainder is implementation
- # dependent if any of the operands is negative. Here we enforce
- # "rounded toward zero" semantics. For example, for (-5) / 2 an
- # implementation may give -3 as the result with the remainder being
- # 1. This function ensures we always return -2 (closer to zero).
- result = value // divider
- remainder = value % divider
- if result < 0 and remainder > 0:
- return result + 1
- else:
- return result
-
-
-class FieldMask(object):
- """Class for FieldMask message type."""
-
- def ToJsonString(self):
- """Converts FieldMask to string according to proto3 JSON spec."""
- return ','.join(self.paths)
-
- def FromJsonString(self, value):
- """Converts string to FieldMask according to proto3 JSON spec."""
- self.Clear()
- for path in value.split(','):
- self.paths.append(path)
-
- def IsValidForDescriptor(self, message_descriptor):
- """Checks whether the FieldMask is valid for Message Descriptor."""
- for path in self.paths:
- if not _IsValidPath(message_descriptor, path):
- return False
- return True
-
- def AllFieldsFromDescriptor(self, message_descriptor):
- """Gets all direct fields of Message Descriptor to FieldMask."""
- self.Clear()
- for field in message_descriptor.fields:
- self.paths.append(field.name)
-
- def CanonicalFormFromMask(self, mask):
- """Converts a FieldMask to the canonical form.
-
- Removes paths that are covered by another path. For example,
- "foo.bar" is covered by "foo" and will be removed if "foo"
- is also in the FieldMask. Then sorts all paths in alphabetical order.
-
- Args:
- mask: The original FieldMask to be converted.
- """
- tree = _FieldMaskTree(mask)
- tree.ToFieldMask(self)
-
- def Union(self, mask1, mask2):
- """Merges mask1 and mask2 into this FieldMask."""
- _CheckFieldMaskMessage(mask1)
- _CheckFieldMaskMessage(mask2)
- tree = _FieldMaskTree(mask1)
- tree.MergeFromFieldMask(mask2)
- tree.ToFieldMask(self)
-
- def Intersect(self, mask1, mask2):
- """Intersects mask1 and mask2 into this FieldMask."""
- _CheckFieldMaskMessage(mask1)
- _CheckFieldMaskMessage(mask2)
- tree = _FieldMaskTree(mask1)
- intersection = _FieldMaskTree()
- for path in mask2.paths:
- tree.IntersectPath(path, intersection)
- intersection.ToFieldMask(self)
-
- def MergeMessage(
- self, source, destination,
- replace_message_field=False, replace_repeated_field=False):
- """Merges fields specified in FieldMask from source to destination.
-
- Args:
- source: Source message.
- destination: The destination message to be merged into.
- replace_message_field: Replace message field if True. Merge message
- field if False.
- replace_repeated_field: Replace repeated field if True. Append
- elements of repeated field if False.
- """
- tree = _FieldMaskTree(self)
- tree.MergeMessage(
- source, destination, replace_message_field, replace_repeated_field)
-
-
-def _IsValidPath(message_descriptor, path):
- """Checks whether the path is valid for Message Descriptor."""
- parts = path.split('.')
- last = parts.pop()
- for name in parts:
- field = message_descriptor.fields_by_name[name]
- if (field is None or
- field.label == FieldDescriptor.LABEL_REPEATED or
- field.type != FieldDescriptor.TYPE_MESSAGE):
- return False
- message_descriptor = field.message_type
- return last in message_descriptor.fields_by_name
-
-
-def _CheckFieldMaskMessage(message):
- """Raises ValueError if message is not a FieldMask."""
- message_descriptor = message.DESCRIPTOR
- if (message_descriptor.name != 'FieldMask' or
- message_descriptor.file.name != 'google/protobuf/field_mask.proto'):
- raise ValueError('Message {0} is not a FieldMask.'.format(
- message_descriptor.full_name))
-
-
-class _FieldMaskTree(object):
- """Represents a FieldMask in a tree structure.
-
- For example, given a FieldMask "foo.bar,foo.baz,bar.baz",
- the FieldMaskTree will be:
- [_root] -+- foo -+- bar
- | |
- | +- baz
- |
- +- bar --- baz
- In the tree, each leaf node represents a field path.
- """
-
- def __init__(self, field_mask=None):
- """Initializes the tree by FieldMask."""
- self._root = {}
- if field_mask:
- self.MergeFromFieldMask(field_mask)
-
- def MergeFromFieldMask(self, field_mask):
- """Merges a FieldMask to the tree."""
- for path in field_mask.paths:
- self.AddPath(path)
-
- def AddPath(self, path):
- """Adds a field path into the tree.
-
- If the field path to add is a sub-path of an existing field path
- in the tree (i.e., a leaf node), it means the tree already matches
- the given path so nothing will be added to the tree. If the path
- matches an existing non-leaf node in the tree, that non-leaf node
- will be turned into a leaf node with all its children removed because
- the path matches all the node's children. Otherwise, a new path will
- be added.
-
- Args:
- path: The field path to add.
- """
- node = self._root
- for name in path.split('.'):
- if name not in node:
- node[name] = {}
- elif not node[name]:
- # Pre-existing empty node implies we already have this entire tree.
- return
- node = node[name]
- # Remove any sub-trees we might have had.
- node.clear()
-
- def ToFieldMask(self, field_mask):
- """Converts the tree to a FieldMask."""
- field_mask.Clear()
- _AddFieldPaths(self._root, '', field_mask)
-
- def IntersectPath(self, path, intersection):
- """Calculates the intersection part of a field path with this tree.
-
- Args:
- path: The field path to calculates.
- intersection: The out tree to record the intersection part.
- """
- node = self._root
- for name in path.split('.'):
- if name not in node:
- return
- elif not node[name]:
- intersection.AddPath(path)
- return
- node = node[name]
- intersection.AddLeafNodes(path, node)
-
- def AddLeafNodes(self, prefix, node):
- """Adds leaf nodes begin with prefix to this tree."""
- if not node:
- self.AddPath(prefix)
- for name in node:
- child_path = prefix + '.' + name
- self.AddLeafNodes(child_path, node[name])
-
- def MergeMessage(
- self, source, destination,
- replace_message, replace_repeated):
- """Merge all fields specified by this tree from source to destination."""
- _MergeMessage(
- self._root, source, destination, replace_message, replace_repeated)
-
-
-def _StrConvert(value):
- """Converts value to str if it is not."""
- # This file is imported by c extension and some methods like ClearField
- # requires string for the field name. py2/py3 has different text
- # type and may use unicode.
- if not isinstance(value, str):
- return value.encode('utf-8')
- return value
-
-
-def _MergeMessage(
- node, source, destination, replace_message, replace_repeated):
- """Merge all fields specified by a sub-tree from source to destination."""
- source_descriptor = source.DESCRIPTOR
- for name in node:
- child = node[name]
- field = source_descriptor.fields_by_name[name]
- if field is None:
- raise ValueError('Error: Can\'t find field {0} in message {1}.'.format(
- name, source_descriptor.full_name))
- if child:
- # Sub-paths are only allowed for singular message fields.
- if (field.label == FieldDescriptor.LABEL_REPEATED or
- field.cpp_type != FieldDescriptor.CPPTYPE_MESSAGE):
- raise ValueError('Error: Field {0} in message {1} is not a singular '
- 'message field and cannot have sub-fields.'.format(
- name, source_descriptor.full_name))
- _MergeMessage(
- child, getattr(source, name), getattr(destination, name),
- replace_message, replace_repeated)
- continue
- if field.label == FieldDescriptor.LABEL_REPEATED:
- if replace_repeated:
- destination.ClearField(_StrConvert(name))
- repeated_source = getattr(source, name)
- repeated_destination = getattr(destination, name)
- if field.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE:
- for item in repeated_source:
- repeated_destination.add().MergeFrom(item)
- else:
- repeated_destination.extend(repeated_source)
- else:
- if field.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE:
- if replace_message:
- destination.ClearField(_StrConvert(name))
- if source.HasField(name):
- getattr(destination, name).MergeFrom(getattr(source, name))
- else:
- setattr(destination, name, getattr(source, name))
-
-
-def _AddFieldPaths(node, prefix, field_mask):
- """Adds the field paths descended from node to field_mask."""
- if not node:
- field_mask.paths.append(prefix)
- return
- for name in sorted(node):
- if prefix:
- child_path = prefix + '.' + name
- else:
- child_path = name
- _AddFieldPaths(node[name], child_path, field_mask)
-
-
-_INT_OR_FLOAT = six.integer_types + (float,)
-
-
-def _SetStructValue(struct_value, value):
- if value is None:
- struct_value.null_value = 0
- elif isinstance(value, bool):
- # Note: this check must come before the number check because in Python
- # True and False are also considered numbers.
- struct_value.bool_value = value
- elif isinstance(value, six.string_types):
- struct_value.string_value = value
- elif isinstance(value, _INT_OR_FLOAT):
- struct_value.number_value = value
- else:
- raise ValueError('Unexpected type')
-
-
-def _GetStructValue(struct_value):
- which = struct_value.WhichOneof('kind')
- if which == 'struct_value':
- return struct_value.struct_value
- elif which == 'null_value':
- return None
- elif which == 'number_value':
- return struct_value.number_value
- elif which == 'string_value':
- return struct_value.string_value
- elif which == 'bool_value':
- return struct_value.bool_value
- elif which == 'list_value':
- return struct_value.list_value
- elif which is None:
- raise ValueError('Value not set')
-
-
-class Struct(object):
- """Class for Struct message type."""
-
- __slots__ = []
-
- def __getitem__(self, key):
- return _GetStructValue(self.fields[key])
-
- def __setitem__(self, key, value):
- _SetStructValue(self.fields[key], value)
-
- def get_or_create_list(self, key):
- """Returns a list for this key, creating if it didn't exist already."""
- return self.fields[key].list_value
-
- def get_or_create_struct(self, key):
- """Returns a struct for this key, creating if it didn't exist already."""
- return self.fields[key].struct_value
-
- # TODO(haberman): allow constructing/merging from dict.
-
-
-class ListValue(object):
- """Class for ListValue message type."""
-
- def __len__(self):
- return len(self.values)
-
- def append(self, value):
- _SetStructValue(self.values.add(), value)
-
- def extend(self, elem_seq):
- for value in elem_seq:
- self.append(value)
-
- def __getitem__(self, index):
- """Retrieves item by the specified index."""
- return _GetStructValue(self.values.__getitem__(index))
-
- def __setitem__(self, index, value):
- _SetStructValue(self.values.__getitem__(index), value)
-
- def items(self):
- for i in range(len(self)):
- yield self[i]
-
- def add_struct(self):
- """Appends and returns a struct value as the next value in the list."""
- return self.values.add().struct_value
-
- def add_list(self):
- """Appends and returns a list value as the next value in the list."""
- return self.values.add().list_value
-
-
-WKTBASES = {
- 'google.protobuf.Any': Any,
- 'google.protobuf.Duration': Duration,
- 'google.protobuf.FieldMask': FieldMask,
- 'google.protobuf.ListValue': ListValue,
- 'google.protobuf.Struct': Struct,
- 'google.protobuf.Timestamp': Timestamp,
-}
diff --git a/third_party/protobuf/python/google/protobuf/internal/wire_format.py b/third_party/protobuf/python/google/protobuf/internal/wire_format.py
deleted file mode 100755
index 883f525585..0000000000
--- a/third_party/protobuf/python/google/protobuf/internal/wire_format.py
+++ /dev/null
@@ -1,268 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc. All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-# * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""Constants and static functions to support protocol buffer wire format."""
-
-__author__ = 'robinson@google.com (Will Robinson)'
-
-import struct
-from google.protobuf import descriptor
-from google.protobuf import message
-
-
-TAG_TYPE_BITS = 3 # Number of bits used to hold type info in a proto tag.
-TAG_TYPE_MASK = (1 << TAG_TYPE_BITS) - 1 # 0x7
-
-# These numbers identify the wire type of a protocol buffer value.
-# We use the least-significant TAG_TYPE_BITS bits of the varint-encoded
-# tag-and-type to store one of these WIRETYPE_* constants.
-# These values must match WireType enum in google/protobuf/wire_format.h.
-WIRETYPE_VARINT = 0
-WIRETYPE_FIXED64 = 1
-WIRETYPE_LENGTH_DELIMITED = 2
-WIRETYPE_START_GROUP = 3
-WIRETYPE_END_GROUP = 4
-WIRETYPE_FIXED32 = 5
-_WIRETYPE_MAX = 5
-
-
-# Bounds for various integer types.
-INT32_MAX = int((1 << 31) - 1)
-INT32_MIN = int(-(1 << 31))
-UINT32_MAX = (1 << 32) - 1
-
-INT64_MAX = (1 << 63) - 1
-INT64_MIN = -(1 << 63)
-UINT64_MAX = (1 << 64) - 1
-
-# "struct" format strings that will encode/decode the specified formats.
-FORMAT_UINT32_LITTLE_ENDIAN = '<I'
-FORMAT_UINT64_LITTLE_ENDIAN = '<Q'
-FORMAT_FLOAT_LITTLE_ENDIAN = '<f'
-FORMAT_DOUBLE_LITTLE_ENDIAN = '<d'
-
-
-# We'll have to provide alternate implementations of AppendLittleEndian*() on
-# any architectures where these checks fail.
-if struct.calcsize(FORMAT_UINT32_LITTLE_ENDIAN) != 4:
- raise AssertionError('Format "I" is not a 32-bit number.')
-if struct.calcsize(FORMAT_UINT64_LITTLE_ENDIAN) != 8:
- raise AssertionError('Format "Q" is not a 64-bit number.')
-
-
-def PackTag(field_number, wire_type):
- """Returns an unsigned 32-bit integer that encodes the field number and
- wire type information in standard protocol message wire format.
-
- Args:
- field_number: Expected to be an integer in the range [1, 1 << 29)
- wire_type: One of the WIRETYPE_* constants.
- """
- if not 0 <= wire_type <= _WIRETYPE_MAX:
- raise message.EncodeError('Unknown wire type: %d' % wire_type)
- return (field_number << TAG_TYPE_BITS) | wire_type
-
-
-def UnpackTag(tag):
- """The inverse of PackTag(). Given an unsigned 32-bit number,
- returns a (field_number, wire_type) tuple.
- """
- return (tag >> TAG_TYPE_BITS), (tag & TAG_TYPE_MASK)
-
-
-def ZigZagEncode(value):
- """ZigZag Transform: Encodes signed integers so that they can be
- effectively used with varint encoding. See wire_format.h for
- more details.
- """
- if value >= 0:
- return value << 1
- return (value << 1) ^ (~0)
-
-
-def ZigZagDecode(value):
- """Inverse of ZigZagEncode()."""
- if not value & 0x1:
- return value >> 1
- return (value >> 1) ^ (~0)
-
-
-
-# The *ByteSize() functions below return the number of bytes required to
-# serialize "field number + type" information and then serialize the value.
-
-
-def Int32ByteSize(field_number, int32):
- return Int64ByteSize(field_number, int32)
-
-
-def Int32ByteSizeNoTag(int32):
- return _VarUInt64ByteSizeNoTag(0xffffffffffffffff & int32)
-
-
-def Int64ByteSize(field_number, int64):
- # Have to convert to uint before calling UInt64ByteSize().
- return UInt64ByteSize(field_number, 0xffffffffffffffff & int64)
-
-
-def UInt32ByteSize(field_number, uint32):
- return UInt64ByteSize(field_number, uint32)
-
-
-def UInt64ByteSize(field_number, uint64):
- return TagByteSize(field_number) + _VarUInt64ByteSizeNoTag(uint64)
-
-
-def SInt32ByteSize(field_number, int32):
- return UInt32ByteSize(field_number, ZigZagEncode(int32))
-
-
-def SInt64ByteSize(field_number, int64):
- return UInt64ByteSize(field_number, ZigZagEncode(int64))
-
-
-def Fixed32ByteSize(field_number, fixed32):
- return TagByteSize(field_number) + 4
-
-
-def Fixed64ByteSize(field_number, fixed64):
- return TagByteSize(field_number) + 8
-
-
-def SFixed32ByteSize(field_number, sfixed32):
- return TagByteSize(field_number) + 4
-
-
-def SFixed64ByteSize(field_number, sfixed64):
- return TagByteSize(field_number) + 8
-
-
-def FloatByteSize(field_number, flt):
- return TagByteSize(field_number) + 4
-
-
-def DoubleByteSize(field_number, double):
- return TagByteSize(field_number) + 8
-
-
-def BoolByteSize(field_number, b):
- return TagByteSize(field_number) + 1
-
-
-def EnumByteSize(field_number, enum):
- return UInt32ByteSize(field_number, enum)
-
-
-def StringByteSize(field_number, string):
- return BytesByteSize(field_number, string.encode('utf-8'))
-
-
-def BytesByteSize(field_number, b):
- return (TagByteSize(field_number)
- + _VarUInt64ByteSizeNoTag(len(b))
- + len(b))
-
-
-def GroupByteSize(field_number, message):
- return (2 * TagByteSize(field_number) # START and END group.
- + message.ByteSize())
-
-
-def MessageByteSize(field_number, message):
- return (TagByteSize(field_number)
- + _VarUInt64ByteSizeNoTag(message.ByteSize())
- + message.ByteSize())
-
-
-def MessageSetItemByteSize(field_number, msg):
- # First compute the sizes of the tags.
- # There are 2 tags for the beginning and ending of the repeated group, that
- # is field number 1, one with field number 2 (type_id) and one with field
- # number 3 (message).
- total_size = (2 * TagByteSize(1) + TagByteSize(2) + TagByteSize(3))
-
- # Add the number of bytes for type_id.
- total_size += _VarUInt64ByteSizeNoTag(field_number)
-
- message_size = msg.ByteSize()
-
- # The number of bytes for encoding the length of the message.
- total_size += _VarUInt64ByteSizeNoTag(message_size)
-
- # The size of the message.
- total_size += message_size
- return total_size
-
-
-def TagByteSize(field_number):
- """Returns the bytes required to serialize a tag with this field number."""
- # Just pass in type 0, since the type won't affect the tag+type size.
- return _VarUInt64ByteSizeNoTag(PackTag(field_number, 0))
-
-
-# Private helper function for the *ByteSize() functions above.
-
-def _VarUInt64ByteSizeNoTag(uint64):
- """Returns the number of bytes required to serialize a single varint
- using boundary value comparisons. (unrolled loop optimization -WPierce)
- uint64 must be unsigned.
- """
- if uint64 <= 0x7f: return 1
- if uint64 <= 0x3fff: return 2
- if uint64 <= 0x1fffff: return 3
- if uint64 <= 0xfffffff: return 4
- if uint64 <= 0x7ffffffff: return 5
- if uint64 <= 0x3ffffffffff: return 6
- if uint64 <= 0x1ffffffffffff: return 7
- if uint64 <= 0xffffffffffffff: return 8
- if uint64 <= 0x7fffffffffffffff: return 9
- if uint64 > UINT64_MAX:
- raise message.EncodeError('Value out of range: %d' % uint64)
- return 10
-
-
-NON_PACKABLE_TYPES = (
- descriptor.FieldDescriptor.TYPE_STRING,
- descriptor.FieldDescriptor.TYPE_GROUP,
- descriptor.FieldDescriptor.TYPE_MESSAGE,
- descriptor.FieldDescriptor.TYPE_BYTES
-)
-
-
-def IsTypePackable(field_type):
- """Return true iff packable = true is valid for fields of this type.
-
- Args:
- field_type: a FieldDescriptor::Type value.
-
- Returns:
- True iff fields of this type are packable.
- """
- return field_type not in NON_PACKABLE_TYPES
diff --git a/third_party/protobuf/python/google/protobuf/json_format.py b/third_party/protobuf/python/google/protobuf/json_format.py
deleted file mode 100644
index 23382bdb05..0000000000
--- a/third_party/protobuf/python/google/protobuf/json_format.py
+++ /dev/null
@@ -1,645 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc. All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-# * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""Contains routines for printing protocol messages in JSON format.
-
-Simple usage example:
-
- # Create a proto object and serialize it to a json format string.
- message = my_proto_pb2.MyMessage(foo='bar')
- json_string = json_format.MessageToJson(message)
-
- # Parse a json format string to proto object.
- message = json_format.Parse(json_string, my_proto_pb2.MyMessage())
-"""
-
-__author__ = 'jieluo@google.com (Jie Luo)'
-
-import base64
-import json
-import math
-import six
-import sys
-
-from google.protobuf import descriptor
-from google.protobuf import symbol_database
-
-_TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S'
-_INT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_INT32,
- descriptor.FieldDescriptor.CPPTYPE_UINT32,
- descriptor.FieldDescriptor.CPPTYPE_INT64,
- descriptor.FieldDescriptor.CPPTYPE_UINT64])
-_INT64_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_INT64,
- descriptor.FieldDescriptor.CPPTYPE_UINT64])
-_FLOAT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_FLOAT,
- descriptor.FieldDescriptor.CPPTYPE_DOUBLE])
-_INFINITY = 'Infinity'
-_NEG_INFINITY = '-Infinity'
-_NAN = 'NaN'
-
-
-class Error(Exception):
- """Top-level module error for json_format."""
-
-
-class SerializeToJsonError(Error):
- """Thrown if serialization to JSON fails."""
-
-
-class ParseError(Error):
- """Thrown in case of parsing error."""
-
-
-def MessageToJson(message, including_default_value_fields=False):
- """Converts protobuf message to JSON format.
-
- Args:
- message: The protocol buffers message instance to serialize.
- including_default_value_fields: If True, singular primitive fields,
- repeated fields, and map fields will always be serialized. If
- False, only serialize non-empty fields. Singular message fields
- and oneof fields are not affected by this option.
-
- Returns:
- A string containing the JSON formatted protocol buffer message.
- """
- js = _MessageToJsonObject(message, including_default_value_fields)
- return json.dumps(js, indent=2)
-
-
-def _MessageToJsonObject(message, including_default_value_fields):
- """Converts message to an object according to Proto3 JSON Specification."""
- message_descriptor = message.DESCRIPTOR
- full_name = message_descriptor.full_name
- if _IsWrapperMessage(message_descriptor):
- return _WrapperMessageToJsonObject(message)
- if full_name in _WKTJSONMETHODS:
- return _WKTJSONMETHODS[full_name][0](
- message, including_default_value_fields)
- js = {}
- return _RegularMessageToJsonObject(
- message, js, including_default_value_fields)
-
-
-def _IsMapEntry(field):
- return (field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and
- field.message_type.has_options and
- field.message_type.GetOptions().map_entry)
-
-
-def _RegularMessageToJsonObject(message, js, including_default_value_fields):
- """Converts normal message according to Proto3 JSON Specification."""
- fields = message.ListFields()
- include_default = including_default_value_fields
-
- try:
- for field, value in fields:
- name = field.camelcase_name
- if _IsMapEntry(field):
- # Convert a map field.
- v_field = field.message_type.fields_by_name['value']
- js_map = {}
- for key in value:
- if isinstance(key, bool):
- if key:
- recorded_key = 'true'
- else:
- recorded_key = 'false'
- else:
- recorded_key = key
- js_map[recorded_key] = _FieldToJsonObject(
- v_field, value[key], including_default_value_fields)
- js[name] = js_map
- elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
- # Convert a repeated field.
- js[name] = [_FieldToJsonObject(field, k, include_default)
- for k in value]
- else:
- js[name] = _FieldToJsonObject(field, value, include_default)
-
- # Serialize default value if including_default_value_fields is True.
- if including_default_value_fields:
- message_descriptor = message.DESCRIPTOR
- for field in message_descriptor.fields:
- # Singular message fields and oneof fields will not be affected.
- if ((field.label != descriptor.FieldDescriptor.LABEL_REPEATED and
- field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE) or
- field.containing_oneof):
- continue
- name = field.camelcase_name
- if name in js:
- # Skip the field which has been serailized already.
- continue
- if _IsMapEntry(field):
- js[name] = {}
- elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
- js[name] = []
- else:
- js[name] = _FieldToJsonObject(field, field.default_value)
-
- except ValueError as e:
- raise SerializeToJsonError(
- 'Failed to serialize {0} field: {1}.'.format(field.name, e))
-
- return js
-
-
-def _FieldToJsonObject(
- field, value, including_default_value_fields=False):
- """Converts field value according to Proto3 JSON Specification."""
- if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
- return _MessageToJsonObject(value, including_default_value_fields)
- elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM:
- enum_value = field.enum_type.values_by_number.get(value, None)
- if enum_value is not None:
- return enum_value.name
- else:
- raise SerializeToJsonError('Enum field contains an integer value '
- 'which can not mapped to an enum value.')
- elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING:
- if field.type == descriptor.FieldDescriptor.TYPE_BYTES:
- # Use base64 Data encoding for bytes
- return base64.b64encode(value).decode('utf-8')
- else:
- return value
- elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL:
- return bool(value)
- elif field.cpp_type in _INT64_TYPES:
- return str(value)
- elif field.cpp_type in _FLOAT_TYPES:
- if math.isinf(value):
- if value < 0.0:
- return _NEG_INFINITY
- else:
- return _INFINITY
- if math.isnan(value):
- return _NAN
- return value
-
-
-def _AnyMessageToJsonObject(message, including_default):
- """Converts Any message according to Proto3 JSON Specification."""
- if not message.ListFields():
- return {}
- js = {}
- type_url = message.type_url
- js['@type'] = type_url
- sub_message = _CreateMessageFromTypeUrl(type_url)
- sub_message.ParseFromString(message.value)
- message_descriptor = sub_message.DESCRIPTOR
- full_name = message_descriptor.full_name
- if _IsWrapperMessage(message_descriptor):
- js['value'] = _WrapperMessageToJsonObject(sub_message)
- return js
- if full_name in _WKTJSONMETHODS:
- js['value'] = _WKTJSONMETHODS[full_name][0](sub_message, including_default)
- return js
- return _RegularMessageToJsonObject(sub_message, js, including_default)
-
-
-def _CreateMessageFromTypeUrl(type_url):
- # TODO(jieluo): Should add a way that users can register the type resolver
- # instead of the default one.
- db = symbol_database.Default()
- type_name = type_url.split('/')[-1]
- try:
- message_descriptor = db.pool.FindMessageTypeByName(type_name)
- except KeyError:
- raise TypeError(
- 'Can not find message descriptor by type_url: {0}.'.format(type_url))
- message_class = db.GetPrototype(message_descriptor)
- return message_class()
-
-
-def _GenericMessageToJsonObject(message, unused_including_default):
- """Converts message by ToJsonString according to Proto3 JSON Specification."""
- # Duration, Timestamp and FieldMask have ToJsonString method to do the
- # convert. Users can also call the method directly.
- return message.ToJsonString()
-
-
-def _ValueMessageToJsonObject(message, unused_including_default=False):
- """Converts Value message according to Proto3 JSON Specification."""
- which = message.WhichOneof('kind')
- # If the Value message is not set treat as null_value when serialize
- # to JSON. The parse back result will be different from original message.
- if which is None or which == 'null_value':
- return None
- if which == 'list_value':
- return _ListValueMessageToJsonObject(message.list_value)
- if which == 'struct_value':
- value = message.struct_value
- else:
- value = getattr(message, which)
- oneof_descriptor = message.DESCRIPTOR.fields_by_name[which]
- return _FieldToJsonObject(oneof_descriptor, value)
-
-
-def _ListValueMessageToJsonObject(message, unused_including_default=False):
- """Converts ListValue message according to Proto3 JSON Specification."""
- return [_ValueMessageToJsonObject(value)
- for value in message.values]
-
-
-def _StructMessageToJsonObject(message, unused_including_default=False):
- """Converts Struct message according to Proto3 JSON Specification."""
- fields = message.fields
- js = {}
- for key in fields.keys():
- js[key] = _ValueMessageToJsonObject(fields[key])
- return js
-
-
-def _IsWrapperMessage(message_descriptor):
- return message_descriptor.file.name == 'google/protobuf/wrappers.proto'
-
-
-def _WrapperMessageToJsonObject(message):
- return _FieldToJsonObject(
- message.DESCRIPTOR.fields_by_name['value'], message.value)
-
-
-def _DuplicateChecker(js):
- result = {}
- for name, value in js:
- if name in result:
- raise ParseError('Failed to load JSON: duplicate key {0}.'.format(name))
- result[name] = value
- return result
-
-
-def Parse(text, message):
- """Parses a JSON representation of a protocol message into a message.
-
- Args:
- text: Message JSON representation.
- message: A protocol beffer message to merge into.
-
- Returns:
- The same message passed as argument.
-
- Raises::
- ParseError: On JSON parsing problems.
- """
- if not isinstance(text, six.text_type): text = text.decode('utf-8')
- try:
- if sys.version_info < (2, 7):
- # object_pair_hook is not supported before python2.7
- js = json.loads(text)
- else:
- js = json.loads(text, object_pairs_hook=_DuplicateChecker)
- except ValueError as e:
- raise ParseError('Failed to load JSON: {0}.'.format(str(e)))
- _ConvertMessage(js, message)
- return message
-
-
-def _ConvertFieldValuePair(js, message):
- """Convert field value pairs into regular message.
-
- Args:
- js: A JSON object to convert the field value pairs.
- message: A regular protocol message to record the data.
-
- Raises:
- ParseError: In case of problems converting.
- """
- names = []
- message_descriptor = message.DESCRIPTOR
- for name in js:
- try:
- field = message_descriptor.fields_by_camelcase_name.get(name, None)
- if not field:
- raise ParseError(
- 'Message type "{0}" has no field named "{1}".'.format(
- message_descriptor.full_name, name))
- if name in names:
- raise ParseError(
- 'Message type "{0}" should not have multiple "{1}" fields.'.format(
- message.DESCRIPTOR.full_name, name))
- names.append(name)
- # Check no other oneof field is parsed.
- if field.containing_oneof is not None:
- oneof_name = field.containing_oneof.name
- if oneof_name in names:
- raise ParseError('Message type "{0}" should not have multiple "{1}" '
- 'oneof fields.'.format(
- message.DESCRIPTOR.full_name, oneof_name))
- names.append(oneof_name)
-
- value = js[name]
- if value is None:
- message.ClearField(field.name)
- continue
-
- # Parse field value.
- if _IsMapEntry(field):
- message.ClearField(field.name)
- _ConvertMapFieldValue(value, message, field)
- elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
- message.ClearField(field.name)
- if not isinstance(value, list):
- raise ParseError('repeated field {0} must be in [] which is '
- '{1}.'.format(name, value))
- if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
- # Repeated message field.
- for item in value:
- sub_message = getattr(message, field.name).add()
- # None is a null_value in Value.
- if (item is None and
- sub_message.DESCRIPTOR.full_name != 'google.protobuf.Value'):
- raise ParseError('null is not allowed to be used as an element'
- ' in a repeated field.')
- _ConvertMessage(item, sub_message)
- else:
- # Repeated scalar field.
- for item in value:
- if item is None:
- raise ParseError('null is not allowed to be used as an element'
- ' in a repeated field.')
- getattr(message, field.name).append(
- _ConvertScalarFieldValue(item, field))
- elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
- sub_message = getattr(message, field.name)
- _ConvertMessage(value, sub_message)
- else:
- setattr(message, field.name, _ConvertScalarFieldValue(value, field))
- except ParseError as e:
- if field and field.containing_oneof is None:
- raise ParseError('Failed to parse {0} field: {1}'.format(name, e))
- else:
- raise ParseError(str(e))
- except ValueError as e:
- raise ParseError('Failed to parse {0} field: {1}.'.format(name, e))
- except TypeError as e:
- raise ParseError('Failed to parse {0} field: {1}.'.format(name, e))
-
-
-def _ConvertMessage(value, message):
- """Convert a JSON object into a message.
-
- Args:
- value: A JSON object.
- message: A WKT or regular protocol message to record the data.
-
- Raises:
- ParseError: In case of convert problems.
- """
- message_descriptor = message.DESCRIPTOR
- full_name = message_descriptor.full_name
- if _IsWrapperMessage(message_descriptor):
- _ConvertWrapperMessage(value, message)
- elif full_name in _WKTJSONMETHODS:
- _WKTJSONMETHODS[full_name][1](value, message)
- else:
- _ConvertFieldValuePair(value, message)
-
-
-def _ConvertAnyMessage(value, message):
- """Convert a JSON representation into Any message."""
- if isinstance(value, dict) and not value:
- return
- try:
- type_url = value['@type']
- except KeyError:
- raise ParseError('@type is missing when parsing any message.')
-
- sub_message = _CreateMessageFromTypeUrl(type_url)
- message_descriptor = sub_message.DESCRIPTOR
- full_name = message_descriptor.full_name
- if _IsWrapperMessage(message_descriptor):
- _ConvertWrapperMessage(value['value'], sub_message)
- elif full_name in _WKTJSONMETHODS:
- _WKTJSONMETHODS[full_name][1](value['value'], sub_message)
- else:
- del value['@type']
- _ConvertFieldValuePair(value, sub_message)
- # Sets Any message
- message.value = sub_message.SerializeToString()
- message.type_url = type_url
-
-
-def _ConvertGenericMessage(value, message):
- """Convert a JSON representation into message with FromJsonString."""
- # Durantion, Timestamp, FieldMask have FromJsonString method to do the
- # convert. Users can also call the method directly.
- message.FromJsonString(value)
-
-
-_INT_OR_FLOAT = six.integer_types + (float,)
-
-
-def _ConvertValueMessage(value, message):
- """Convert a JSON representation into Value message."""
- if isinstance(value, dict):
- _ConvertStructMessage(value, message.struct_value)
- elif isinstance(value, list):
- _ConvertListValueMessage(value, message.list_value)
- elif value is None:
- message.null_value = 0
- elif isinstance(value, bool):
- message.bool_value = value
- elif isinstance(value, six.string_types):
- message.string_value = value
- elif isinstance(value, _INT_OR_FLOAT):
- message.number_value = value
- else:
- raise ParseError('Unexpected type for Value message.')
-
-
-def _ConvertListValueMessage(value, message):
- """Convert a JSON representation into ListValue message."""
- if not isinstance(value, list):
- raise ParseError(
- 'ListValue must be in [] which is {0}.'.format(value))
- message.ClearField('values')
- for item in value:
- _ConvertValueMessage(item, message.values.add())
-
-
-def _ConvertStructMessage(value, message):
- """Convert a JSON representation into Struct message."""
- if not isinstance(value, dict):
- raise ParseError(
- 'Struct must be in a dict which is {0}.'.format(value))
- for key in value:
- _ConvertValueMessage(value[key], message.fields[key])
- return
-
-
-def _ConvertWrapperMessage(value, message):
- """Convert a JSON representation into Wrapper message."""
- field = message.DESCRIPTOR.fields_by_name['value']
- setattr(message, 'value', _ConvertScalarFieldValue(value, field))
-
-
-def _ConvertMapFieldValue(value, message, field):
- """Convert map field value for a message map field.
-
- Args:
- value: A JSON object to convert the map field value.
- message: A protocol message to record the converted data.
- field: The descriptor of the map field to be converted.
-
- Raises:
- ParseError: In case of convert problems.
- """
- if not isinstance(value, dict):
- raise ParseError(
- 'Map field {0} must be in a dict which is {1}.'.format(
- field.name, value))
- key_field = field.message_type.fields_by_name['key']
- value_field = field.message_type.fields_by_name['value']
- for key in value:
- key_value = _ConvertScalarFieldValue(key, key_field, True)
- if value_field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
- _ConvertMessage(value[key], getattr(message, field.name)[key_value])
- else:
- getattr(message, field.name)[key_value] = _ConvertScalarFieldValue(
- value[key], value_field)
-
-
-def _ConvertScalarFieldValue(value, field, require_str=False):
- """Convert a single scalar field value.
-
- Args:
- value: A scalar value to convert the scalar field value.
- field: The descriptor of the field to convert.
- require_str: If True, the field value must be a str.
-
- Returns:
- The converted scalar field value
-
- Raises:
- ParseError: In case of convert problems.
- """
- if field.cpp_type in _INT_TYPES:
- return _ConvertInteger(value)
- elif field.cpp_type in _FLOAT_TYPES:
- return _ConvertFloat(value)
- elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL:
- return _ConvertBool(value, require_str)
- elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING:
- if field.type == descriptor.FieldDescriptor.TYPE_BYTES:
- return base64.b64decode(value)
- else:
- return value
- elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM:
- # Convert an enum value.
- enum_value = field.enum_type.values_by_name.get(value, None)
- if enum_value is None:
- raise ParseError(
- 'Enum value must be a string literal with double quotes. '
- 'Type "{0}" has no value named {1}.'.format(
- field.enum_type.full_name, value))
- return enum_value.number
-
-
-def _ConvertInteger(value):
- """Convert an integer.
-
- Args:
- value: A scalar value to convert.
-
- Returns:
- The integer value.
-
- Raises:
- ParseError: If an integer couldn't be consumed.
- """
- if isinstance(value, float):
- raise ParseError('Couldn\'t parse integer: {0}.'.format(value))
-
- if isinstance(value, six.text_type) and value.find(' ') != -1:
- raise ParseError('Couldn\'t parse integer: "{0}".'.format(value))
-
- return int(value)
-
-
-def _ConvertFloat(value):
- """Convert an floating point number."""
- if value == 'nan':
- raise ParseError('Couldn\'t parse float "nan", use "NaN" instead.')
- try:
- # Assume Python compatible syntax.
- return float(value)
- except ValueError:
- # Check alternative spellings.
- if value == _NEG_INFINITY:
- return float('-inf')
- elif value == _INFINITY:
- return float('inf')
- elif value == _NAN:
- return float('nan')
- else:
- raise ParseError('Couldn\'t parse float: {0}.'.format(value))
-
-
-def _ConvertBool(value, require_str):
- """Convert a boolean value.
-
- Args:
- value: A scalar value to convert.
- require_str: If True, value must be a str.
-
- Returns:
- The bool parsed.
-
- Raises:
- ParseError: If a boolean value couldn't be consumed.
- """
- if require_str:
- if value == 'true':
- return True
- elif value == 'false':
- return False
- else:
- raise ParseError('Expected "true" or "false", not {0}.'.format(value))
-
- if not isinstance(value, bool):
- raise ParseError('Expected true or false without quotes.')
- return value
-
-_WKTJSONMETHODS = {
- 'google.protobuf.Any': [_AnyMessageToJsonObject,
- _ConvertAnyMessage],
- 'google.protobuf.Duration': [_GenericMessageToJsonObject,
- _ConvertGenericMessage],
- 'google.protobuf.FieldMask': [_GenericMessageToJsonObject,
- _ConvertGenericMessage],
- 'google.protobuf.ListValue': [_ListValueMessageToJsonObject,
- _ConvertListValueMessage],
- 'google.protobuf.Struct': [_StructMessageToJsonObject,
- _ConvertStructMessage],
- 'google.protobuf.Timestamp': [_GenericMessageToJsonObject,
- _ConvertGenericMessage],
- 'google.protobuf.Value': [_ValueMessageToJsonObject,
- _ConvertValueMessage]
-}
diff --git a/third_party/protobuf/python/google/protobuf/message.py b/third_party/protobuf/python/google/protobuf/message.py
deleted file mode 100755
index de2f5697e2..0000000000
--- a/third_party/protobuf/python/google/protobuf/message.py
+++ /dev/null
@@ -1,292 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc. All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-# * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-# TODO(robinson): We should just make these methods all "pure-virtual" and move
-# all implementation out, into reflection.py for now.
-
-
-"""Contains an abstract base class for protocol messages."""
-
-__author__ = 'robinson@google.com (Will Robinson)'
-
-class Error(Exception): pass
-class DecodeError(Error): pass
-class EncodeError(Error): pass
-
-
-class Message(object):
-
- """Abstract base class for protocol messages.
-
- Protocol message classes are almost always generated by the protocol
- compiler. These generated types subclass Message and implement the methods
- shown below.
-
- TODO(robinson): Link to an HTML document here.
-
- TODO(robinson): Document that instances of this class will also
- have an Extensions attribute with __getitem__ and __setitem__.
- Again, not sure how to best convey this.
-
- TODO(robinson): Document that the class must also have a static
- RegisterExtension(extension_field) method.
- Not sure how to best express at this point.
- """
-
- # TODO(robinson): Document these fields and methods.
-
- __slots__ = []
-
- DESCRIPTOR = None
-
- def __deepcopy__(self, memo=None):
- clone = type(self)()
- clone.MergeFrom(self)
- return clone
-
- def __eq__(self, other_msg):
- """Recursively compares two messages by value and structure."""
- raise NotImplementedError
-
- def __ne__(self, other_msg):
- # Can't just say self != other_msg, since that would infinitely recurse. :)
- return not self == other_msg
-
- def __hash__(self):
- raise TypeError('unhashable object')
-
- def __str__(self):
- """Outputs a human-readable representation of the message."""
- raise NotImplementedError
-
- def __unicode__(self):
- """Outputs a human-readable representation of the message."""
- raise NotImplementedError
-
- def MergeFrom(self, other_msg):
- """Merges the contents of the specified message into current message.
-
- This method merges the contents of the specified message into the current
- message. Singular fields that are set in the specified message overwrite
- the corresponding fields in the current message. Repeated fields are
- appended. Singular sub-messages and groups are recursively merged.
-
- Args:
- other_msg: Message to merge into the current message.
- """
- raise NotImplementedError
-
- def CopyFrom(self, other_msg):
- """Copies the content of the specified message into the current message.
-
- The method clears the current message and then merges the specified
- message using MergeFrom.
-
- Args:
- other_msg: Message to copy into the current one.
- """
- if self is other_msg:
- return
- self.Clear()
- self.MergeFrom(other_msg)
-
- def Clear(self):
- """Clears all data that was set in the message."""
- raise NotImplementedError
-
- def SetInParent(self):
- """Mark this as present in the parent.
-
- This normally happens automatically when you assign a field of a
- sub-message, but sometimes you want to make the sub-message
- present while keeping it empty. If you find yourself using this,
- you may want to reconsider your design."""
- raise NotImplementedError
-
- def IsInitialized(self):
- """Checks if the message is initialized.
-
- Returns:
- The method returns True if the message is initialized (i.e. all of its
- required fields are set).
- """
- raise NotImplementedError
-
- # TODO(robinson): MergeFromString() should probably return None and be
- # implemented in terms of a helper that returns the # of bytes read. Our
- # deserialization routines would use the helper when recursively
- # deserializing, but the end user would almost always just want the no-return
- # MergeFromString().
-
- def MergeFromString(self, serialized):
- """Merges serialized protocol buffer data into this message.
-
- When we find a field in |serialized| that is already present
- in this message:
- - If it's a "repeated" field, we append to the end of our list.
- - Else, if it's a scalar, we overwrite our field.
- - Else, (it's a nonrepeated composite), we recursively merge
- into the existing composite.
-
- TODO(robinson): Document handling of unknown fields.
-
- Args:
- serialized: Any object that allows us to call buffer(serialized)
- to access a string of bytes using the buffer interface.
-
- TODO(robinson): When we switch to a helper, this will return None.
-
- Returns:
- The number of bytes read from |serialized|.
- For non-group messages, this will always be len(serialized),
- but for messages which are actually groups, this will
- generally be less than len(serialized), since we must
- stop when we reach an END_GROUP tag. Note that if
- we *do* stop because of an END_GROUP tag, the number
- of bytes returned does not include the bytes
- for the END_GROUP tag information.
- """
- raise NotImplementedError
-
- def ParseFromString(self, serialized):
- """Parse serialized protocol buffer data into this message.
-
- Like MergeFromString(), except we clear the object first and
- do not return the value that MergeFromString returns.
- """
- self.Clear()
- self.MergeFromString(serialized)
-
- def SerializeToString(self):
- """Serializes the protocol message to a binary string.
-
- Returns:
- A binary string representation of the message if all of the required
- fields in the message are set (i.e. the message is initialized).
-
- Raises:
- message.EncodeError if the message isn't initialized.
- """
- raise NotImplementedError
-
- def SerializePartialToString(self):
- """Serializes the protocol message to a binary string.
-
- This method is similar to SerializeToString but doesn't check if the
- message is initialized.
-
- Returns:
- A string representation of the partial message.
- """
- raise NotImplementedError
-
- # TODO(robinson): Decide whether we like these better
- # than auto-generated has_foo() and clear_foo() methods
- # on the instances themselves. This way is less consistent
- # with C++, but it makes reflection-type access easier and
- # reduces the number of magically autogenerated things.
- #
- # TODO(robinson): Be sure to document (and test) exactly
- # which field names are accepted here. Are we case-sensitive?
- # What do we do with fields that share names with Python keywords
- # like 'lambda' and 'yield'?
- #
- # nnorwitz says:
- # """
- # Typically (in python), an underscore is appended to names that are
- # keywords. So they would become lambda_ or yield_.
- # """
- def ListFields(self):
- """Returns a list of (FieldDescriptor, value) tuples for all
- fields in the message which are not empty. A singular field is non-empty
- if HasField() would return true, and a repeated field is non-empty if
- it contains at least one element. The fields are ordered by field
- number"""
- raise NotImplementedError
-
- def HasField(self, field_name):
- """Checks if a certain field is set for the message, or if any field inside
- a oneof group is set. Note that if the field_name is not defined in the
- message descriptor, ValueError will be raised."""
- raise NotImplementedError
-
- def ClearField(self, field_name):
- """Clears the contents of a given field, or the field set inside a oneof
- group. If the name neither refers to a defined field or oneof group,
- ValueError is raised."""
- raise NotImplementedError
-
- def WhichOneof(self, oneof_group):
- """Returns the name of the field that is set inside a oneof group, or
- None if no field is set. If no group with the given name exists, ValueError
- will be raised."""
- raise NotImplementedError
-
- def HasExtension(self, extension_handle):
- raise NotImplementedError
-
- def ClearExtension(self, extension_handle):
- raise NotImplementedError
-
- def ByteSize(self):
- """Returns the serialized size of this message.
- Recursively calls ByteSize() on all contained messages.
- """
- raise NotImplementedError
-
- def _SetListener(self, message_listener):
- """Internal method used by the protocol message implementation.
- Clients should not call this directly.
-
- Sets a listener that this message will call on certain state transitions.
-
- The purpose of this method is to register back-edges from children to
- parents at runtime, for the purpose of setting "has" bits and
- byte-size-dirty bits in the parent and ancestor objects whenever a child or
- descendant object is modified.
-
- If the client wants to disconnect this Message from the object tree, she
- explicitly sets callback to None.
-
- If message_listener is None, unregisters any existing listener. Otherwise,
- message_listener must implement the MessageListener interface in
- internal/message_listener.py, and we discard any listener registered
- via a previous _SetListener() call.
- """
- raise NotImplementedError
-
- def __getstate__(self):
- """Support the pickle protocol."""
- return dict(serialized=self.SerializePartialToString())
-
- def __setstate__(self, state):
- """Support the pickle protocol."""
- self.__init__()
- self.ParseFromString(state['serialized'])
diff --git a/third_party/protobuf/python/google/protobuf/message_factory.py b/third_party/protobuf/python/google/protobuf/message_factory.py
deleted file mode 100644
index 1b059d130b..0000000000
--- a/third_party/protobuf/python/google/protobuf/message_factory.py
+++ /dev/null
@@ -1,147 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc. All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-# * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""Provides a factory class for generating dynamic messages.
-
-The easiest way to use this class is if you have access to the FileDescriptor
-protos containing the messages you want to create you can just do the following:
-
-message_classes = message_factory.GetMessages(iterable_of_file_descriptors)
-my_proto_instance = message_classes['some.proto.package.MessageName']()
-"""
-
-__author__ = 'matthewtoia@google.com (Matt Toia)'
-
-from google.protobuf import descriptor_pool
-from google.protobuf import message
-from google.protobuf import reflection
-
-
-class MessageFactory(object):
- """Factory for creating Proto2 messages from descriptors in a pool."""
-
- def __init__(self, pool=None):
- """Initializes a new factory."""
- self.pool = pool or descriptor_pool.DescriptorPool()
-
- # local cache of all classes built from protobuf descriptors
- self._classes = {}
-
- def GetPrototype(self, descriptor):
- """Builds a proto2 message class based on the passed in descriptor.
-
- Passing a descriptor with a fully qualified name matching a previous
- invocation will cause the same class to be returned.
-
- Args:
- descriptor: The descriptor to build from.
-
- Returns:
- A class describing the passed in descriptor.
- """
- if descriptor.full_name not in self._classes:
- descriptor_name = descriptor.name
- if str is bytes: # PY2
- descriptor_name = descriptor.name.encode('ascii', 'ignore')
- result_class = reflection.GeneratedProtocolMessageType(
- descriptor_name,
- (message.Message,),
- {'DESCRIPTOR': descriptor, '__module__': None})
- # If module not set, it wrongly points to the reflection.py module.
- self._classes[descriptor.full_name] = result_class
- for field in descriptor.fields:
- if field.message_type:
- self.GetPrototype(field.message_type)
- for extension in result_class.DESCRIPTOR.extensions:
- if extension.containing_type.full_name not in self._classes:
- self.GetPrototype(extension.containing_type)
- extended_class = self._classes[extension.containing_type.full_name]
- extended_class.RegisterExtension(extension)
- return self._classes[descriptor.full_name]
-
- def GetMessages(self, files):
- """Gets all the messages from a specified file.
-
- This will find and resolve dependencies, failing if the descriptor
- pool cannot satisfy them.
-
- Args:
- files: The file names to extract messages from.
-
- Returns:
- A dictionary mapping proto names to the message classes. This will include
- any dependent messages as well as any messages defined in the same file as
- a specified message.
- """
- result = {}
- for file_name in files:
- file_desc = self.pool.FindFileByName(file_name)
- for name, msg in file_desc.message_types_by_name.items():
- if file_desc.package:
- full_name = '.'.join([file_desc.package, name])
- else:
- full_name = msg.name
- result[full_name] = self.GetPrototype(
- self.pool.FindMessageTypeByName(full_name))
-
- # While the extension FieldDescriptors are created by the descriptor pool,
- # the python classes created in the factory need them to be registered
- # explicitly, which is done below.
- #
- # The call to RegisterExtension will specifically check if the
- # extension was already registered on the object and either
- # ignore the registration if the original was the same, or raise
- # an error if they were different.
-
- for name, extension in file_desc.extensions_by_name.items():
- if extension.containing_type.full_name not in self._classes:
- self.GetPrototype(extension.containing_type)
- extended_class = self._classes[extension.containing_type.full_name]
- extended_class.RegisterExtension(extension)
- return result
-
-
-_FACTORY = MessageFactory()
-
-
-def GetMessages(file_protos):
- """Builds a dictionary of all the messages available in a set of files.
-
- Args:
- file_protos: A sequence of file protos to build messages out of.
-
- Returns:
- A dictionary mapping proto names to the message classes. This will include
- any dependent messages as well as any messages defined in the same file as
- a specified message.
- """
- for file_proto in file_protos:
- _FACTORY.pool.Add(file_proto)
- return _FACTORY.GetMessages([file_proto.name for file_proto in file_protos])
diff --git a/third_party/protobuf/python/google/protobuf/proto_builder.py b/third_party/protobuf/python/google/protobuf/proto_builder.py
deleted file mode 100644
index 736caed385..0000000000
--- a/third_party/protobuf/python/google/protobuf/proto_builder.py
+++ /dev/null
@@ -1,130 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc. All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-# * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""Dynamic Protobuf class creator."""
-
-try:
- from collections import OrderedDict
-except ImportError:
- from ordereddict import OrderedDict #PY26
-import hashlib
-import os
-
-from google.protobuf import descriptor_pb2
-from google.protobuf import message_factory
-
-
-def _GetMessageFromFactory(factory, full_name):
- """Get a proto class from the MessageFactory by name.
-
- Args:
- factory: a MessageFactory instance.
- full_name: str, the fully qualified name of the proto type.
- Returns:
- A class, for the type identified by full_name.
- Raises:
- KeyError, if the proto is not found in the factory's descriptor pool.
- """
- proto_descriptor = factory.pool.FindMessageTypeByName(full_name)
- proto_cls = factory.GetPrototype(proto_descriptor)
- return proto_cls
-
-
-def MakeSimpleProtoClass(fields, full_name=None, pool=None):
- """Create a Protobuf class whose fields are basic types.
-
- Note: this doesn't validate field names!
-
- Args:
- fields: dict of {name: field_type} mappings for each field in the proto. If
- this is an OrderedDict the order will be maintained, otherwise the
- fields will be sorted by name.
- full_name: optional str, the fully-qualified name of the proto type.
- pool: optional DescriptorPool instance.
- Returns:
- a class, the new protobuf class with a FileDescriptor.
- """
- factory = message_factory.MessageFactory(pool=pool)
-
- if full_name is not None:
- try:
- proto_cls = _GetMessageFromFactory(factory, full_name)
- return proto_cls
- except KeyError:
- # The factory's DescriptorPool doesn't know about this class yet.
- pass
-
- # Get a list of (name, field_type) tuples from the fields dict. If fields was
- # an OrderedDict we keep the order, but otherwise we sort the field to ensure
- # consistent ordering.
- field_items = fields.items()
- if not isinstance(fields, OrderedDict):
- field_items = sorted(field_items)
-
- # Use a consistent file name that is unlikely to conflict with any imported
- # proto files.
- fields_hash = hashlib.sha1()
- for f_name, f_type in field_items:
- fields_hash.update(f_name.encode('utf-8'))
- fields_hash.update(str(f_type).encode('utf-8'))
- proto_file_name = fields_hash.hexdigest() + '.proto'
-
- # If the proto is anonymous, use the same hash to name it.
- if full_name is None:
- full_name = ('net.proto2.python.public.proto_builder.AnonymousProto_' +
- fields_hash.hexdigest())
- try:
- proto_cls = _GetMessageFromFactory(factory, full_name)
- return proto_cls
- except KeyError:
- # The factory's DescriptorPool doesn't know about this class yet.
- pass
-
- # This is the first time we see this proto: add a new descriptor to the pool.
- factory.pool.Add(
- _MakeFileDescriptorProto(proto_file_name, full_name, field_items))
- return _GetMessageFromFactory(factory, full_name)
-
-
-def _MakeFileDescriptorProto(proto_file_name, full_name, field_items):
- """Populate FileDescriptorProto for MessageFactory's DescriptorPool."""
- package, name = full_name.rsplit('.', 1)
- file_proto = descriptor_pb2.FileDescriptorProto()
- file_proto.name = os.path.join(package.replace('.', '/'), proto_file_name)
- file_proto.package = package
- desc_proto = file_proto.message_type.add()
- desc_proto.name = name
- for f_number, (f_name, f_type) in enumerate(field_items, 1):
- field_proto = desc_proto.field.add()
- field_proto.name = f_name
- field_proto.number = f_number
- field_proto.label = descriptor_pb2.FieldDescriptorProto.LABEL_OPTIONAL
- field_proto.type = f_type
- return file_proto
diff --git a/third_party/protobuf/python/google/protobuf/pyext/README b/third_party/protobuf/python/google/protobuf/pyext/README
deleted file mode 100644
index 6d61cb45bf..0000000000
--- a/third_party/protobuf/python/google/protobuf/pyext/README
+++ /dev/null
@@ -1,6 +0,0 @@
-This is the 'v2' C++ implementation for python proto2.
-
-It is active when:
-
-PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=cpp
-PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION_VERSION=2
diff --git a/third_party/protobuf/python/google/protobuf/pyext/__init__.py b/third_party/protobuf/python/google/protobuf/pyext/__init__.py
deleted file mode 100644
index 5585614122..0000000000
--- a/third_party/protobuf/python/google/protobuf/pyext/__init__.py
+++ /dev/null
@@ -1,4 +0,0 @@
-try:
- __import__('pkg_resources').declare_namespace(__name__)
-except ImportError:
- __path__ = __import__('pkgutil').extend_path(__path__, __name__)
diff --git a/third_party/protobuf/python/google/protobuf/pyext/cpp_message.py b/third_party/protobuf/python/google/protobuf/pyext/cpp_message.py
deleted file mode 100644
index b215211ee5..0000000000
--- a/third_party/protobuf/python/google/protobuf/pyext/cpp_message.py
+++ /dev/null
@@ -1,65 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc. All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-# * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""Protocol message implementation hooks for C++ implementation.
-
-Contains helper functions used to create protocol message classes from
-Descriptor objects at runtime backed by the protocol buffer C++ API.
-"""
-
-__author__ = 'tibell@google.com (Johan Tibell)'
-
-from google.protobuf.pyext import _message
-
-
-class GeneratedProtocolMessageType(_message.MessageMeta):
-
- """Metaclass for protocol message classes created at runtime from Descriptors.
-
- The protocol compiler currently uses this metaclass to create protocol
- message classes at runtime. Clients can also manually create their own
- classes at runtime, as in this example:
-
- mydescriptor = Descriptor(.....)
- class MyProtoClass(Message):
- __metaclass__ = GeneratedProtocolMessageType
- DESCRIPTOR = mydescriptor
- myproto_instance = MyProtoClass()
- myproto.foo_field = 23
- ...
-
- The above example will not work for nested types. If you wish to include them,
- use reflection.MakeClass() instead of manually instantiating the class in
- order to create the appropriate class structure.
- """
-
- # Must be consistent with the protocol-compiler code in
- # proto2/compiler/internal/generator.*.
- _DESCRIPTOR_KEY = 'DESCRIPTOR'
diff --git a/third_party/protobuf/python/google/protobuf/pyext/descriptor.cc b/third_party/protobuf/python/google/protobuf/pyext/descriptor.cc
deleted file mode 100644
index a875a7be86..0000000000
--- a/third_party/protobuf/python/google/protobuf/pyext/descriptor.cc
+++ /dev/null
@@ -1,1583 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Author: petar@google.com (Petar Petrov)
-
-#include <Python.h>
-#include <frameobject.h>
-#include <string>
-
-#include <google/protobuf/io/coded_stream.h>
-#include <google/protobuf/descriptor.pb.h>
-#include <google/protobuf/dynamic_message.h>
-#include <google/protobuf/pyext/descriptor.h>
-#include <google/protobuf/pyext/descriptor_containers.h>
-#include <google/protobuf/pyext/descriptor_pool.h>
-#include <google/protobuf/pyext/message.h>
-#include <google/protobuf/pyext/scoped_pyobject_ptr.h>
-
-#if PY_MAJOR_VERSION >= 3
- #define PyString_FromStringAndSize PyUnicode_FromStringAndSize
- #define PyString_Check PyUnicode_Check
- #define PyString_InternFromString PyUnicode_InternFromString
- #define PyInt_FromLong PyLong_FromLong
- #define PyInt_FromSize_t PyLong_FromSize_t
- #if PY_VERSION_HEX < 0x03030000
- #error "Python 3.0 - 3.2 are not supported."
- #endif
- #define PyString_AsStringAndSize(ob, charpp, sizep) \
- (PyUnicode_Check(ob)? \
- ((*(charpp) = PyUnicode_AsUTF8AndSize(ob, (sizep))) == NULL? -1: 0): \
- PyBytes_AsStringAndSize(ob, (charpp), (sizep)))
-#endif
-
-namespace google {
-namespace protobuf {
-namespace python {
-
-// Store interned descriptors, so that the same C++ descriptor yields the same
-// Python object. Objects are not immortal: this map does not own the
-// references, and items are deleted when the last reference to the object is
-// released.
-// This is enough to support the "is" operator on live objects.
-// All descriptors are stored here.
-hash_map<const void*, PyObject*> interned_descriptors;
-
-PyObject* PyString_FromCppString(const string& str) {
- return PyString_FromStringAndSize(str.c_str(), str.size());
-}
-
-// Check that the calling Python code is the global scope of a _pb2.py module.
-// This function is used to support the current code generated by the proto
-// compiler, which creates descriptors, then update some properties.
-// For example:
-// message_descriptor = Descriptor(
-// name='Message',
-// fields = [FieldDescriptor(name='field')]
-// message_descriptor.fields[0].containing_type = message_descriptor
-//
-// This code is still executed, but the descriptors now have no other storage
-// than the (const) C++ pointer, and are immutable.
-// So we let this code pass, by simply ignoring the new value.
-//
-// From user code, descriptors still look immutable.
-//
-// TODO(amauryfa): Change the proto2 compiler to remove the assignments, and
-// remove this hack.
-bool _CalledFromGeneratedFile(int stacklevel) {
- PyThreadState *state = PyThreadState_GET();
- if (state == NULL) {
- return false;
- }
- PyFrameObject* frame = state->frame;
- if (frame == NULL) {
- return false;
- }
- while (stacklevel-- > 0) {
- frame = frame->f_back;
- if (frame == NULL) {
- return false;
- }
- }
- if (frame->f_globals != frame->f_locals) {
- // Not at global module scope
- return false;
- }
-
- if (frame->f_code->co_filename == NULL) {
- return false;
- }
- char* filename;
- Py_ssize_t filename_size;
- if (PyString_AsStringAndSize(frame->f_code->co_filename,
- &filename, &filename_size) < 0) {
- // filename is not a string.
- PyErr_Clear();
- return false;
- }
- if (filename_size < 7) {
- // filename is too short.
- return false;
- }
- if (strcmp(&filename[filename_size - 7], "_pb2.py") != 0) {
- // Filename is not ending with _pb2.
- return false;
- }
- return true;
-}
-
-// If the calling code is not a _pb2.py file, raise AttributeError.
-// To be used in attribute setters.
-static int CheckCalledFromGeneratedFile(const char* attr_name) {
- if (_CalledFromGeneratedFile(0)) {
- return 0;
- }
- PyErr_Format(PyExc_AttributeError,
- "attribute is not writable: %s", attr_name);
- return -1;
-}
-
-
-#ifndef PyVarObject_HEAD_INIT
-#define PyVarObject_HEAD_INIT(type, size) PyObject_HEAD_INIT(type) size,
-#endif
-#ifndef Py_TYPE
-#define Py_TYPE(ob) (((PyObject*)(ob))->ob_type)
-#endif
-
-
-// Helper functions for descriptor objects.
-
-// A set of templates to retrieve the C++ FileDescriptor of any descriptor.
-template<class DescriptorClass>
-const FileDescriptor* GetFileDescriptor(const DescriptorClass* descriptor) {
- return descriptor->file();
-}
-template<>
-const FileDescriptor* GetFileDescriptor(const FileDescriptor* descriptor) {
- return descriptor;
-}
-template<>
-const FileDescriptor* GetFileDescriptor(const EnumValueDescriptor* descriptor) {
- return descriptor->type()->file();
-}
-template<>
-const FileDescriptor* GetFileDescriptor(const OneofDescriptor* descriptor) {
- return descriptor->containing_type()->file();
-}
-
-// Converts options into a Python protobuf, and cache the result.
-//
-// This is a bit tricky because options can contain extension fields defined in
-// the same proto file. In this case the options parsed from the serialized_pb
-// have unkown fields, and we need to parse them again.
-//
-// Always returns a new reference.
-template<class DescriptorClass>
-static PyObject* GetOrBuildOptions(const DescriptorClass *descriptor) {
- // Options (and their extensions) are completely resolved in the proto file
- // containing the descriptor.
- PyDescriptorPool* pool = GetDescriptorPool_FromPool(
- GetFileDescriptor(descriptor)->pool());
-
- hash_map<const void*, PyObject*>* descriptor_options =
- pool->descriptor_options;
- // First search in the cache.
- if (descriptor_options->find(descriptor) != descriptor_options->end()) {
- PyObject *value = (*descriptor_options)[descriptor];
- Py_INCREF(value);
- return value;
- }
-
- // Build the Options object: get its Python class, and make a copy of the C++
- // read-only instance.
- const Message& options(descriptor->options());
- const Descriptor *message_type = options.GetDescriptor();
- PyObject* message_class(cdescriptor_pool::GetMessageClass(
- pool, message_type));
- if (message_class == NULL) {
- // The Options message was not found in the current DescriptorPool.
- // In this case, there cannot be extensions to these options, and we can
- // try to use the basic pool instead.
- PyErr_Clear();
- message_class = cdescriptor_pool::GetMessageClass(
- GetDefaultDescriptorPool(), message_type);
- }
- if (message_class == NULL) {
- PyErr_Format(PyExc_TypeError, "Could not retrieve class for Options: %s",
- message_type->full_name().c_str());
- return NULL;
- }
- ScopedPyObjectPtr value(PyEval_CallObject(message_class, NULL));
- if (value == NULL) {
- return NULL;
- }
- if (!PyObject_TypeCheck(value.get(), &CMessage_Type)) {
- PyErr_Format(PyExc_TypeError, "Invalid class for %s: %s",
- message_type->full_name().c_str(),
- Py_TYPE(value.get())->tp_name);
- return NULL;
- }
- CMessage* cmsg = reinterpret_cast<CMessage*>(value.get());
-
- const Reflection* reflection = options.GetReflection();
- const UnknownFieldSet& unknown_fields(reflection->GetUnknownFields(options));
- if (unknown_fields.empty()) {
- cmsg->message->CopyFrom(options);
- } else {
- // Reparse options string! XXX call cmessage::MergeFromString
- string serialized;
- options.SerializeToString(&serialized);
- io::CodedInputStream input(
- reinterpret_cast<const uint8*>(serialized.c_str()), serialized.size());
- input.SetExtensionRegistry(pool->pool, pool->message_factory);
- bool success = cmsg->message->MergePartialFromCodedStream(&input);
- if (!success) {
- PyErr_Format(PyExc_ValueError, "Error parsing Options message");
- return NULL;
- }
- }
-
- // Cache the result.
- Py_INCREF(value.get());
- (*pool->descriptor_options)[descriptor] = value.get();
-
- return value.release();
-}
-
-// Copy the C++ descriptor to a Python message.
-// The Python message is an instance of descriptor_pb2.DescriptorProto
-// or similar.
-template<class DescriptorProtoClass, class DescriptorClass>
-static PyObject* CopyToPythonProto(const DescriptorClass *descriptor,
- PyObject *target) {
- const Descriptor* self_descriptor =
- DescriptorProtoClass::default_instance().GetDescriptor();
- CMessage* message = reinterpret_cast<CMessage*>(target);
- if (!PyObject_TypeCheck(target, &CMessage_Type) ||
- message->message->GetDescriptor() != self_descriptor) {
- PyErr_Format(PyExc_TypeError, "Not a %s message",
- self_descriptor->full_name().c_str());
- return NULL;
- }
- cmessage::AssureWritable(message);
- DescriptorProtoClass* descriptor_message =
- static_cast<DescriptorProtoClass*>(message->message);
- descriptor->CopyTo(descriptor_message);
- Py_RETURN_NONE;
-}
-
-// All Descriptors classes share the same memory layout.
-typedef struct PyBaseDescriptor {
- PyObject_HEAD
-
- // Pointer to the C++ proto2 descriptor.
- // Like all descriptors, it is owned by the global DescriptorPool.
- const void* descriptor;
-
- // Owned reference to the DescriptorPool, to ensure it is kept alive.
- PyDescriptorPool* pool;
-} PyBaseDescriptor;
-
-
-// FileDescriptor structure "inherits" from the base descriptor.
-typedef struct PyFileDescriptor {
- PyBaseDescriptor base;
-
- // The cached version of serialized pb. Either NULL, or a Bytes string.
- // We own the reference.
- PyObject *serialized_pb;
-} PyFileDescriptor;
-
-
-namespace descriptor {
-
-// Creates or retrieve a Python descriptor of the specified type.
-// Objects are interned: the same descriptor will return the same object if it
-// was kept alive.
-// 'was_created' is an optional pointer to a bool, and is set to true if a new
-// object was allocated.
-// Always return a new reference.
-template<class DescriptorClass>
-PyObject* NewInternedDescriptor(PyTypeObject* type,
- const DescriptorClass* descriptor,
- bool* was_created) {
- if (was_created) {
- *was_created = false;
- }
- if (descriptor == NULL) {
- PyErr_BadInternalCall();
- return NULL;
- }
-
- // See if the object is in the map of interned descriptors
- hash_map<const void*, PyObject*>::iterator it =
- interned_descriptors.find(descriptor);
- if (it != interned_descriptors.end()) {
- GOOGLE_DCHECK(Py_TYPE(it->second) == type);
- Py_INCREF(it->second);
- return it->second;
- }
- // Create a new descriptor object
- PyBaseDescriptor* py_descriptor = PyObject_New(
- PyBaseDescriptor, type);
- if (py_descriptor == NULL) {
- return NULL;
- }
- py_descriptor->descriptor = descriptor;
-
- // and cache it.
- interned_descriptors.insert(
- std::make_pair(descriptor, reinterpret_cast<PyObject*>(py_descriptor)));
-
- // Ensures that the DescriptorPool stays alive.
- PyDescriptorPool* pool = GetDescriptorPool_FromPool(
- GetFileDescriptor(descriptor)->pool());
- if (pool == NULL) {
- // Don't DECREF, the object is not fully initialized.
- PyObject_Del(py_descriptor);
- return NULL;
- }
- Py_INCREF(pool);
- py_descriptor->pool = pool;
-
- if (was_created) {
- *was_created = true;
- }
- return reinterpret_cast<PyObject*>(py_descriptor);
-}
-
-static void Dealloc(PyBaseDescriptor* self) {
- // Remove from interned dictionary
- interned_descriptors.erase(self->descriptor);
- Py_CLEAR(self->pool);
- Py_TYPE(self)->tp_free(reinterpret_cast<PyObject*>(self));
-}
-
-static PyGetSetDef Getters[] = {
- {NULL}
-};
-
-PyTypeObject PyBaseDescriptor_Type = {
- PyVarObject_HEAD_INIT(&PyType_Type, 0)
- FULL_MODULE_NAME ".DescriptorBase", // tp_name
- sizeof(PyBaseDescriptor), // tp_basicsize
- 0, // tp_itemsize
- (destructor)Dealloc, // tp_dealloc
- 0, // tp_print
- 0, // tp_getattr
- 0, // tp_setattr
- 0, // tp_compare
- 0, // tp_repr
- 0, // tp_as_number
- 0, // tp_as_sequence
- 0, // tp_as_mapping
- 0, // tp_hash
- 0, // tp_call
- 0, // tp_str
- 0, // tp_getattro
- 0, // tp_setattro
- 0, // tp_as_buffer
- Py_TPFLAGS_DEFAULT, // tp_flags
- "Descriptors base class", // tp_doc
- 0, // tp_traverse
- 0, // tp_clear
- 0, // tp_richcompare
- 0, // tp_weaklistoffset
- 0, // tp_iter
- 0, // tp_iternext
- 0, // tp_methods
- 0, // tp_members
- Getters, // tp_getset
-};
-
-} // namespace descriptor
-
-const void* PyDescriptor_AsVoidPtr(PyObject* obj) {
- if (!PyObject_TypeCheck(obj, &descriptor::PyBaseDescriptor_Type)) {
- PyErr_SetString(PyExc_TypeError, "Not a BaseDescriptor");
- return NULL;
- }
- return reinterpret_cast<PyBaseDescriptor*>(obj)->descriptor;
-}
-
-namespace message_descriptor {
-
-// Unchecked accessor to the C++ pointer.
-static const Descriptor* _GetDescriptor(PyBaseDescriptor* self) {
- return reinterpret_cast<const Descriptor*>(self->descriptor);
-}
-
-static PyObject* GetName(PyBaseDescriptor* self, void *closure) {
- return PyString_FromCppString(_GetDescriptor(self)->name());
-}
-
-static PyObject* GetFullName(PyBaseDescriptor* self, void *closure) {
- return PyString_FromCppString(_GetDescriptor(self)->full_name());
-}
-
-static PyObject* GetFile(PyBaseDescriptor *self, void *closure) {
- return PyFileDescriptor_FromDescriptor(_GetDescriptor(self)->file());
-}
-
-static PyObject* GetConcreteClass(PyBaseDescriptor* self, void *closure) {
- // Retuns the canonical class for the given descriptor.
- // This is the class that was registered with the primary descriptor pool
- // which contains this descriptor.
- // This might not be the one you expect! For example the returned object does
- // not know about extensions defined in a custom pool.
- PyObject* concrete_class(cdescriptor_pool::GetMessageClass(
- GetDescriptorPool_FromPool(_GetDescriptor(self)->file()->pool()),
- _GetDescriptor(self)));
- Py_XINCREF(concrete_class);
- return concrete_class;
-}
-
-static PyObject* GetFieldsByName(PyBaseDescriptor* self, void *closure) {
- return NewMessageFieldsByName(_GetDescriptor(self));
-}
-
-static PyObject* GetFieldsByCamelcaseName(PyBaseDescriptor* self,
- void *closure) {
- return NewMessageFieldsByCamelcaseName(_GetDescriptor(self));
-}
-
-static PyObject* GetFieldsByNumber(PyBaseDescriptor* self, void *closure) {
- return NewMessageFieldsByNumber(_GetDescriptor(self));
-}
-
-static PyObject* GetFieldsSeq(PyBaseDescriptor* self, void *closure) {
- return NewMessageFieldsSeq(_GetDescriptor(self));
-}
-
-static PyObject* GetNestedTypesByName(PyBaseDescriptor* self, void *closure) {
- return NewMessageNestedTypesByName(_GetDescriptor(self));
-}
-
-static PyObject* GetNestedTypesSeq(PyBaseDescriptor* self, void *closure) {
- return NewMessageNestedTypesSeq(_GetDescriptor(self));
-}
-
-static PyObject* GetExtensionsByName(PyBaseDescriptor* self, void *closure) {
- return NewMessageExtensionsByName(_GetDescriptor(self));
-}
-
-static PyObject* GetExtensions(PyBaseDescriptor* self, void *closure) {
- return NewMessageExtensionsSeq(_GetDescriptor(self));
-}
-
-static PyObject* GetEnumsSeq(PyBaseDescriptor* self, void *closure) {
- return NewMessageEnumsSeq(_GetDescriptor(self));
-}
-
-static PyObject* GetEnumTypesByName(PyBaseDescriptor* self, void *closure) {
- return NewMessageEnumsByName(_GetDescriptor(self));
-}
-
-static PyObject* GetEnumValuesByName(PyBaseDescriptor* self, void *closure) {
- return NewMessageEnumValuesByName(_GetDescriptor(self));
-}
-
-static PyObject* GetOneofsByName(PyBaseDescriptor* self, void *closure) {
- return NewMessageOneofsByName(_GetDescriptor(self));
-}
-
-static PyObject* GetOneofsSeq(PyBaseDescriptor* self, void *closure) {
- return NewMessageOneofsSeq(_GetDescriptor(self));
-}
-
-static PyObject* IsExtendable(PyBaseDescriptor *self, void *closure) {
- if (_GetDescriptor(self)->extension_range_count() > 0) {
- Py_RETURN_TRUE;
- } else {
- Py_RETURN_FALSE;
- }
-}
-
-static PyObject* GetExtensionRanges(PyBaseDescriptor *self, void *closure) {
- const Descriptor* descriptor = _GetDescriptor(self);
- PyObject* range_list = PyList_New(descriptor->extension_range_count());
-
- for (int i = 0; i < descriptor->extension_range_count(); i++) {
- const Descriptor::ExtensionRange* range = descriptor->extension_range(i);
- PyObject* start = PyInt_FromLong(range->start);
- PyObject* end = PyInt_FromLong(range->end);
- PyList_SetItem(range_list, i, PyTuple_Pack(2, start, end));
- }
-
- return range_list;
-}
-
-static PyObject* GetContainingType(PyBaseDescriptor *self, void *closure) {
- const Descriptor* containing_type =
- _GetDescriptor(self)->containing_type();
- if (containing_type) {
- return PyMessageDescriptor_FromDescriptor(containing_type);
- } else {
- Py_RETURN_NONE;
- }
-}
-
-static int SetContainingType(PyBaseDescriptor *self, PyObject *value,
- void *closure) {
- return CheckCalledFromGeneratedFile("containing_type");
-}
-
-static PyObject* GetHasOptions(PyBaseDescriptor *self, void *closure) {
- const MessageOptions& options(_GetDescriptor(self)->options());
- if (&options != &MessageOptions::default_instance()) {
- Py_RETURN_TRUE;
- } else {
- Py_RETURN_FALSE;
- }
-}
-static int SetHasOptions(PyBaseDescriptor *self, PyObject *value,
- void *closure) {
- return CheckCalledFromGeneratedFile("has_options");
-}
-
-static PyObject* GetOptions(PyBaseDescriptor *self) {
- return GetOrBuildOptions(_GetDescriptor(self));
-}
-
-static int SetOptions(PyBaseDescriptor *self, PyObject *value,
- void *closure) {
- return CheckCalledFromGeneratedFile("_options");
-}
-
-static PyObject* CopyToProto(PyBaseDescriptor *self, PyObject *target) {
- return CopyToPythonProto<DescriptorProto>(_GetDescriptor(self), target);
-}
-
-static PyObject* EnumValueName(PyBaseDescriptor *self, PyObject *args) {
- const char *enum_name;
- int number;
- if (!PyArg_ParseTuple(args, "si", &enum_name, &number))
- return NULL;
- const EnumDescriptor *enum_type =
- _GetDescriptor(self)->FindEnumTypeByName(enum_name);
- if (enum_type == NULL) {
- PyErr_SetString(PyExc_KeyError, enum_name);
- return NULL;
- }
- const EnumValueDescriptor *enum_value =
- enum_type->FindValueByNumber(number);
- if (enum_value == NULL) {
- PyErr_Format(PyExc_KeyError, "%d", number);
- return NULL;
- }
- return PyString_FromCppString(enum_value->name());
-}
-
-static PyObject* GetSyntax(PyBaseDescriptor *self, void *closure) {
- return PyString_InternFromString(
- FileDescriptor::SyntaxName(_GetDescriptor(self)->file()->syntax()));
-}
-
-static PyGetSetDef Getters[] = {
- { "name", (getter)GetName, NULL, "Last name"},
- { "full_name", (getter)GetFullName, NULL, "Full name"},
- { "_concrete_class", (getter)GetConcreteClass, NULL, "concrete class"},
- { "file", (getter)GetFile, NULL, "File descriptor"},
-
- { "fields", (getter)GetFieldsSeq, NULL, "Fields sequence"},
- { "fields_by_name", (getter)GetFieldsByName, NULL, "Fields by name"},
- { "fields_by_camelcase_name", (getter)GetFieldsByCamelcaseName, NULL,
- "Fields by camelCase name"},
- { "fields_by_number", (getter)GetFieldsByNumber, NULL, "Fields by number"},
- { "nested_types", (getter)GetNestedTypesSeq, NULL, "Nested types sequence"},
- { "nested_types_by_name", (getter)GetNestedTypesByName, NULL,
- "Nested types by name"},
- { "extensions", (getter)GetExtensions, NULL, "Extensions Sequence"},
- { "extensions_by_name", (getter)GetExtensionsByName, NULL,
- "Extensions by name"},
- { "extension_ranges", (getter)GetExtensionRanges, NULL, "Extension ranges"},
- { "enum_types", (getter)GetEnumsSeq, NULL, "Enum sequence"},
- { "enum_types_by_name", (getter)GetEnumTypesByName, NULL,
- "Enum types by name"},
- { "enum_values_by_name", (getter)GetEnumValuesByName, NULL,
- "Enum values by name"},
- { "oneofs_by_name", (getter)GetOneofsByName, NULL, "Oneofs by name"},
- { "oneofs", (getter)GetOneofsSeq, NULL, "Oneofs by name"},
- { "containing_type", (getter)GetContainingType, (setter)SetContainingType,
- "Containing type"},
- { "is_extendable", (getter)IsExtendable, (setter)NULL},
- { "has_options", (getter)GetHasOptions, (setter)SetHasOptions, "Has Options"},
- { "_options", (getter)NULL, (setter)SetOptions, "Options"},
- { "syntax", (getter)GetSyntax, (setter)NULL, "Syntax"},
- {NULL}
-};
-
-static PyMethodDef Methods[] = {
- { "GetOptions", (PyCFunction)GetOptions, METH_NOARGS, },
- { "CopyToProto", (PyCFunction)CopyToProto, METH_O, },
- { "EnumValueName", (PyCFunction)EnumValueName, METH_VARARGS, },
- {NULL}
-};
-
-} // namespace message_descriptor
-
-PyTypeObject PyMessageDescriptor_Type = {
- PyVarObject_HEAD_INIT(&PyType_Type, 0)
- FULL_MODULE_NAME ".MessageDescriptor", // tp_name
- sizeof(PyBaseDescriptor), // tp_basicsize
- 0, // tp_itemsize
- 0, // tp_dealloc
- 0, // tp_print
- 0, // tp_getattr
- 0, // tp_setattr
- 0, // tp_compare
- 0, // tp_repr
- 0, // tp_as_number
- 0, // tp_as_sequence
- 0, // tp_as_mapping
- 0, // tp_hash
- 0, // tp_call
- 0, // tp_str
- 0, // tp_getattro
- 0, // tp_setattro
- 0, // tp_as_buffer
- Py_TPFLAGS_DEFAULT, // tp_flags
- "A Message Descriptor", // tp_doc
- 0, // tp_traverse
- 0, // tp_clear
- 0, // tp_richcompare
- 0, // tp_weaklistoffset
- 0, // tp_iter
- 0, // tp_iternext
- message_descriptor::Methods, // tp_methods
- 0, // tp_members
- message_descriptor::Getters, // tp_getset
- &descriptor::PyBaseDescriptor_Type, // tp_base
-};
-
-PyObject* PyMessageDescriptor_FromDescriptor(
- const Descriptor* message_descriptor) {
- return descriptor::NewInternedDescriptor(
- &PyMessageDescriptor_Type, message_descriptor, NULL);
-}
-
-const Descriptor* PyMessageDescriptor_AsDescriptor(PyObject* obj) {
- if (!PyObject_TypeCheck(obj, &PyMessageDescriptor_Type)) {
- PyErr_SetString(PyExc_TypeError, "Not a MessageDescriptor");
- return NULL;
- }
- return reinterpret_cast<const Descriptor*>(
- reinterpret_cast<PyBaseDescriptor*>(obj)->descriptor);
-}
-
-namespace field_descriptor {
-
-// Unchecked accessor to the C++ pointer.
-static const FieldDescriptor* _GetDescriptor(
- PyBaseDescriptor *self) {
- return reinterpret_cast<const FieldDescriptor*>(self->descriptor);
-}
-
-static PyObject* GetFullName(PyBaseDescriptor* self, void *closure) {
- return PyString_FromCppString(_GetDescriptor(self)->full_name());
-}
-
-static PyObject* GetName(PyBaseDescriptor *self, void *closure) {
- return PyString_FromCppString(_GetDescriptor(self)->name());
-}
-
-static PyObject* GetCamelcaseName(PyBaseDescriptor* self, void *closure) {
- return PyString_FromCppString(_GetDescriptor(self)->camelcase_name());
-}
-
-static PyObject* GetType(PyBaseDescriptor *self, void *closure) {
- return PyInt_FromLong(_GetDescriptor(self)->type());
-}
-
-static PyObject* GetCppType(PyBaseDescriptor *self, void *closure) {
- return PyInt_FromLong(_GetDescriptor(self)->cpp_type());
-}
-
-static PyObject* GetLabel(PyBaseDescriptor *self, void *closure) {
- return PyInt_FromLong(_GetDescriptor(self)->label());
-}
-
-static PyObject* GetNumber(PyBaseDescriptor *self, void *closure) {
- return PyInt_FromLong(_GetDescriptor(self)->number());
-}
-
-static PyObject* GetIndex(PyBaseDescriptor *self, void *closure) {
- return PyInt_FromLong(_GetDescriptor(self)->index());
-}
-
-static PyObject* GetID(PyBaseDescriptor *self, void *closure) {
- return PyLong_FromVoidPtr(self);
-}
-
-static PyObject* IsExtension(PyBaseDescriptor *self, void *closure) {
- return PyBool_FromLong(_GetDescriptor(self)->is_extension());
-}
-
-static PyObject* HasDefaultValue(PyBaseDescriptor *self, void *closure) {
- return PyBool_FromLong(_GetDescriptor(self)->has_default_value());
-}
-
-static PyObject* GetDefaultValue(PyBaseDescriptor *self, void *closure) {
- PyObject *result;
-
- switch (_GetDescriptor(self)->cpp_type()) {
- case FieldDescriptor::CPPTYPE_INT32: {
- int32 value = _GetDescriptor(self)->default_value_int32();
- result = PyInt_FromLong(value);
- break;
- }
- case FieldDescriptor::CPPTYPE_INT64: {
- int64 value = _GetDescriptor(self)->default_value_int64();
- result = PyLong_FromLongLong(value);
- break;
- }
- case FieldDescriptor::CPPTYPE_UINT32: {
- uint32 value = _GetDescriptor(self)->default_value_uint32();
- result = PyInt_FromSize_t(value);
- break;
- }
- case FieldDescriptor::CPPTYPE_UINT64: {
- uint64 value = _GetDescriptor(self)->default_value_uint64();
- result = PyLong_FromUnsignedLongLong(value);
- break;
- }
- case FieldDescriptor::CPPTYPE_FLOAT: {
- float value = _GetDescriptor(self)->default_value_float();
- result = PyFloat_FromDouble(value);
- break;
- }
- case FieldDescriptor::CPPTYPE_DOUBLE: {
- double value = _GetDescriptor(self)->default_value_double();
- result = PyFloat_FromDouble(value);
- break;
- }
- case FieldDescriptor::CPPTYPE_BOOL: {
- bool value = _GetDescriptor(self)->default_value_bool();
- result = PyBool_FromLong(value);
- break;
- }
- case FieldDescriptor::CPPTYPE_STRING: {
- string value = _GetDescriptor(self)->default_value_string();
- result = ToStringObject(_GetDescriptor(self), value);
- break;
- }
- case FieldDescriptor::CPPTYPE_ENUM: {
- const EnumValueDescriptor* value =
- _GetDescriptor(self)->default_value_enum();
- result = PyInt_FromLong(value->number());
- break;
- }
- default:
- PyErr_Format(PyExc_NotImplementedError, "default value for %s",
- _GetDescriptor(self)->full_name().c_str());
- return NULL;
- }
- return result;
-}
-
-static PyObject* GetCDescriptor(PyObject *self, void *closure) {
- Py_INCREF(self);
- return self;
-}
-
-static PyObject *GetEnumType(PyBaseDescriptor *self, void *closure) {
- const EnumDescriptor* enum_type = _GetDescriptor(self)->enum_type();
- if (enum_type) {
- return PyEnumDescriptor_FromDescriptor(enum_type);
- } else {
- Py_RETURN_NONE;
- }
-}
-
-static int SetEnumType(PyBaseDescriptor *self, PyObject *value, void *closure) {
- return CheckCalledFromGeneratedFile("enum_type");
-}
-
-static PyObject *GetMessageType(PyBaseDescriptor *self, void *closure) {
- const Descriptor* message_type = _GetDescriptor(self)->message_type();
- if (message_type) {
- return PyMessageDescriptor_FromDescriptor(message_type);
- } else {
- Py_RETURN_NONE;
- }
-}
-
-static int SetMessageType(PyBaseDescriptor *self, PyObject *value,
- void *closure) {
- return CheckCalledFromGeneratedFile("message_type");
-}
-
-static PyObject* GetContainingType(PyBaseDescriptor *self, void *closure) {
- const Descriptor* containing_type =
- _GetDescriptor(self)->containing_type();
- if (containing_type) {
- return PyMessageDescriptor_FromDescriptor(containing_type);
- } else {
- Py_RETURN_NONE;
- }
-}
-
-static int SetContainingType(PyBaseDescriptor *self, PyObject *value,
- void *closure) {
- return CheckCalledFromGeneratedFile("containing_type");
-}
-
-static PyObject* GetExtensionScope(PyBaseDescriptor *self, void *closure) {
- const Descriptor* extension_scope =
- _GetDescriptor(self)->extension_scope();
- if (extension_scope) {
- return PyMessageDescriptor_FromDescriptor(extension_scope);
- } else {
- Py_RETURN_NONE;
- }
-}
-
-static PyObject* GetContainingOneof(PyBaseDescriptor *self, void *closure) {
- const OneofDescriptor* containing_oneof =
- _GetDescriptor(self)->containing_oneof();
- if (containing_oneof) {
- return PyOneofDescriptor_FromDescriptor(containing_oneof);
- } else {
- Py_RETURN_NONE;
- }
-}
-
-static int SetContainingOneof(PyBaseDescriptor *self, PyObject *value,
- void *closure) {
- return CheckCalledFromGeneratedFile("containing_oneof");
-}
-
-static PyObject* GetHasOptions(PyBaseDescriptor *self, void *closure) {
- const FieldOptions& options(_GetDescriptor(self)->options());
- if (&options != &FieldOptions::default_instance()) {
- Py_RETURN_TRUE;
- } else {
- Py_RETURN_FALSE;
- }
-}
-static int SetHasOptions(PyBaseDescriptor *self, PyObject *value,
- void *closure) {
- return CheckCalledFromGeneratedFile("has_options");
-}
-
-static PyObject* GetOptions(PyBaseDescriptor *self) {
- return GetOrBuildOptions(_GetDescriptor(self));
-}
-
-static int SetOptions(PyBaseDescriptor *self, PyObject *value,
- void *closure) {
- return CheckCalledFromGeneratedFile("_options");
-}
-
-
-static PyGetSetDef Getters[] = {
- { "full_name", (getter)GetFullName, NULL, "Full name"},
- { "name", (getter)GetName, NULL, "Unqualified name"},
- { "camelcase_name", (getter)GetCamelcaseName, NULL, "Camelcase name"},
- { "type", (getter)GetType, NULL, "C++ Type"},
- { "cpp_type", (getter)GetCppType, NULL, "C++ Type"},
- { "label", (getter)GetLabel, NULL, "Label"},
- { "number", (getter)GetNumber, NULL, "Number"},
- { "index", (getter)GetIndex, NULL, "Index"},
- { "default_value", (getter)GetDefaultValue, NULL, "Default Value"},
- { "has_default_value", (getter)HasDefaultValue},
- { "is_extension", (getter)IsExtension, NULL, "ID"},
- { "id", (getter)GetID, NULL, "ID"},
- { "_cdescriptor", (getter)GetCDescriptor, NULL, "HAACK REMOVE ME"},
-
- { "message_type", (getter)GetMessageType, (setter)SetMessageType,
- "Message type"},
- { "enum_type", (getter)GetEnumType, (setter)SetEnumType, "Enum type"},
- { "containing_type", (getter)GetContainingType, (setter)SetContainingType,
- "Containing type"},
- { "extension_scope", (getter)GetExtensionScope, (setter)NULL,
- "Extension scope"},
- { "containing_oneof", (getter)GetContainingOneof, (setter)SetContainingOneof,
- "Containing oneof"},
- { "has_options", (getter)GetHasOptions, (setter)SetHasOptions, "Has Options"},
- { "_options", (getter)NULL, (setter)SetOptions, "Options"},
- {NULL}
-};
-
-static PyMethodDef Methods[] = {
- { "GetOptions", (PyCFunction)GetOptions, METH_NOARGS, },
- {NULL}
-};
-
-} // namespace field_descriptor
-
-PyTypeObject PyFieldDescriptor_Type = {
- PyVarObject_HEAD_INIT(&PyType_Type, 0)
- FULL_MODULE_NAME ".FieldDescriptor", // tp_name
- sizeof(PyBaseDescriptor), // tp_basicsize
- 0, // tp_itemsize
- 0, // tp_dealloc
- 0, // tp_print
- 0, // tp_getattr
- 0, // tp_setattr
- 0, // tp_compare
- 0, // tp_repr
- 0, // tp_as_number
- 0, // tp_as_sequence
- 0, // tp_as_mapping
- 0, // tp_hash
- 0, // tp_call
- 0, // tp_str
- 0, // tp_getattro
- 0, // tp_setattro
- 0, // tp_as_buffer
- Py_TPFLAGS_DEFAULT, // tp_flags
- "A Field Descriptor", // tp_doc
- 0, // tp_traverse
- 0, // tp_clear
- 0, // tp_richcompare
- 0, // tp_weaklistoffset
- 0, // tp_iter
- 0, // tp_iternext
- field_descriptor::Methods, // tp_methods
- 0, // tp_members
- field_descriptor::Getters, // tp_getset
- &descriptor::PyBaseDescriptor_Type, // tp_base
-};
-
-PyObject* PyFieldDescriptor_FromDescriptor(
- const FieldDescriptor* field_descriptor) {
- return descriptor::NewInternedDescriptor(
- &PyFieldDescriptor_Type, field_descriptor, NULL);
-}
-
-const FieldDescriptor* PyFieldDescriptor_AsDescriptor(PyObject* obj) {
- if (!PyObject_TypeCheck(obj, &PyFieldDescriptor_Type)) {
- PyErr_SetString(PyExc_TypeError, "Not a FieldDescriptor");
- return NULL;
- }
- return reinterpret_cast<const FieldDescriptor*>(
- reinterpret_cast<PyBaseDescriptor*>(obj)->descriptor);
-}
-
-namespace enum_descriptor {
-
-// Unchecked accessor to the C++ pointer.
-static const EnumDescriptor* _GetDescriptor(
- PyBaseDescriptor *self) {
- return reinterpret_cast<const EnumDescriptor*>(self->descriptor);
-}
-
-static PyObject* GetFullName(PyBaseDescriptor* self, void *closure) {
- return PyString_FromCppString(_GetDescriptor(self)->full_name());
-}
-
-static PyObject* GetName(PyBaseDescriptor *self, void *closure) {
- return PyString_FromCppString(_GetDescriptor(self)->name());
-}
-
-static PyObject* GetFile(PyBaseDescriptor *self, void *closure) {
- return PyFileDescriptor_FromDescriptor(_GetDescriptor(self)->file());
-}
-
-static PyObject* GetEnumvaluesByName(PyBaseDescriptor* self, void *closure) {
- return NewEnumValuesByName(_GetDescriptor(self));
-}
-
-static PyObject* GetEnumvaluesByNumber(PyBaseDescriptor* self, void *closure) {
- return NewEnumValuesByNumber(_GetDescriptor(self));
-}
-
-static PyObject* GetEnumvaluesSeq(PyBaseDescriptor* self, void *closure) {
- return NewEnumValuesSeq(_GetDescriptor(self));
-}
-
-static PyObject* GetContainingType(PyBaseDescriptor *self, void *closure) {
- const Descriptor* containing_type =
- _GetDescriptor(self)->containing_type();
- if (containing_type) {
- return PyMessageDescriptor_FromDescriptor(containing_type);
- } else {
- Py_RETURN_NONE;
- }
-}
-
-static int SetContainingType(PyBaseDescriptor *self, PyObject *value,
- void *closure) {
- return CheckCalledFromGeneratedFile("containing_type");
-}
-
-
-static PyObject* GetHasOptions(PyBaseDescriptor *self, void *closure) {
- const EnumOptions& options(_GetDescriptor(self)->options());
- if (&options != &EnumOptions::default_instance()) {
- Py_RETURN_TRUE;
- } else {
- Py_RETURN_FALSE;
- }
-}
-static int SetHasOptions(PyBaseDescriptor *self, PyObject *value,
- void *closure) {
- return CheckCalledFromGeneratedFile("has_options");
-}
-
-static PyObject* GetOptions(PyBaseDescriptor *self) {
- return GetOrBuildOptions(_GetDescriptor(self));
-}
-
-static int SetOptions(PyBaseDescriptor *self, PyObject *value,
- void *closure) {
- return CheckCalledFromGeneratedFile("_options");
-}
-
-static PyObject* CopyToProto(PyBaseDescriptor *self, PyObject *target) {
- return CopyToPythonProto<EnumDescriptorProto>(_GetDescriptor(self), target);
-}
-
-static PyMethodDef Methods[] = {
- { "GetOptions", (PyCFunction)GetOptions, METH_NOARGS, },
- { "CopyToProto", (PyCFunction)CopyToProto, METH_O, },
- {NULL}
-};
-
-static PyGetSetDef Getters[] = {
- { "full_name", (getter)GetFullName, NULL, "Full name"},
- { "name", (getter)GetName, NULL, "last name"},
- { "file", (getter)GetFile, NULL, "File descriptor"},
- { "values", (getter)GetEnumvaluesSeq, NULL, "values"},
- { "values_by_name", (getter)GetEnumvaluesByName, NULL,
- "Enum values by name"},
- { "values_by_number", (getter)GetEnumvaluesByNumber, NULL,
- "Enum values by number"},
-
- { "containing_type", (getter)GetContainingType, (setter)SetContainingType,
- "Containing type"},
- { "has_options", (getter)GetHasOptions, (setter)SetHasOptions, "Has Options"},
- { "_options", (getter)NULL, (setter)SetOptions, "Options"},
- {NULL}
-};
-
-} // namespace enum_descriptor
-
-PyTypeObject PyEnumDescriptor_Type = {
- PyVarObject_HEAD_INIT(&PyType_Type, 0)
- FULL_MODULE_NAME ".EnumDescriptor", // tp_name
- sizeof(PyBaseDescriptor), // tp_basicsize
- 0, // tp_itemsize
- 0, // tp_dealloc
- 0, // tp_print
- 0, // tp_getattr
- 0, // tp_setattr
- 0, // tp_compare
- 0, // tp_repr
- 0, // tp_as_number
- 0, // tp_as_sequence
- 0, // tp_as_mapping
- 0, // tp_hash
- 0, // tp_call
- 0, // tp_str
- 0, // tp_getattro
- 0, // tp_setattro
- 0, // tp_as_buffer
- Py_TPFLAGS_DEFAULT, // tp_flags
- "A Enum Descriptor", // tp_doc
- 0, // tp_traverse
- 0, // tp_clear
- 0, // tp_richcompare
- 0, // tp_weaklistoffset
- 0, // tp_iter
- 0, // tp_iternext
- enum_descriptor::Methods, // tp_getset
- 0, // tp_members
- enum_descriptor::Getters, // tp_getset
- &descriptor::PyBaseDescriptor_Type, // tp_base
-};
-
-PyObject* PyEnumDescriptor_FromDescriptor(
- const EnumDescriptor* enum_descriptor) {
- return descriptor::NewInternedDescriptor(
- &PyEnumDescriptor_Type, enum_descriptor, NULL);
-}
-
-const EnumDescriptor* PyEnumDescriptor_AsDescriptor(PyObject* obj) {
- if (!PyObject_TypeCheck(obj, &PyEnumDescriptor_Type)) {
- PyErr_SetString(PyExc_TypeError, "Not an EnumDescriptor");
- return NULL;
- }
- return reinterpret_cast<const EnumDescriptor*>(
- reinterpret_cast<PyBaseDescriptor*>(obj)->descriptor);
-}
-
-namespace enumvalue_descriptor {
-
-// Unchecked accessor to the C++ pointer.
-static const EnumValueDescriptor* _GetDescriptor(
- PyBaseDescriptor *self) {
- return reinterpret_cast<const EnumValueDescriptor*>(self->descriptor);
-}
-
-static PyObject* GetName(PyBaseDescriptor *self, void *closure) {
- return PyString_FromCppString(_GetDescriptor(self)->name());
-}
-
-static PyObject* GetNumber(PyBaseDescriptor *self, void *closure) {
- return PyInt_FromLong(_GetDescriptor(self)->number());
-}
-
-static PyObject* GetIndex(PyBaseDescriptor *self, void *closure) {
- return PyInt_FromLong(_GetDescriptor(self)->index());
-}
-
-static PyObject* GetType(PyBaseDescriptor *self, void *closure) {
- return PyEnumDescriptor_FromDescriptor(_GetDescriptor(self)->type());
-}
-
-static PyObject* GetHasOptions(PyBaseDescriptor *self, void *closure) {
- const EnumValueOptions& options(_GetDescriptor(self)->options());
- if (&options != &EnumValueOptions::default_instance()) {
- Py_RETURN_TRUE;
- } else {
- Py_RETURN_FALSE;
- }
-}
-static int SetHasOptions(PyBaseDescriptor *self, PyObject *value,
- void *closure) {
- return CheckCalledFromGeneratedFile("has_options");
-}
-
-static PyObject* GetOptions(PyBaseDescriptor *self) {
- return GetOrBuildOptions(_GetDescriptor(self));
-}
-
-static int SetOptions(PyBaseDescriptor *self, PyObject *value,
- void *closure) {
- return CheckCalledFromGeneratedFile("_options");
-}
-
-
-static PyGetSetDef Getters[] = {
- { "name", (getter)GetName, NULL, "name"},
- { "number", (getter)GetNumber, NULL, "number"},
- { "index", (getter)GetIndex, NULL, "index"},
- { "type", (getter)GetType, NULL, "index"},
-
- { "has_options", (getter)GetHasOptions, (setter)SetHasOptions, "Has Options"},
- { "_options", (getter)NULL, (setter)SetOptions, "Options"},
- {NULL}
-};
-
-static PyMethodDef Methods[] = {
- { "GetOptions", (PyCFunction)GetOptions, METH_NOARGS, },
- {NULL}
-};
-
-} // namespace enumvalue_descriptor
-
-PyTypeObject PyEnumValueDescriptor_Type = {
- PyVarObject_HEAD_INIT(&PyType_Type, 0)
- FULL_MODULE_NAME ".EnumValueDescriptor", // tp_name
- sizeof(PyBaseDescriptor), // tp_basicsize
- 0, // tp_itemsize
- 0, // tp_dealloc
- 0, // tp_print
- 0, // tp_getattr
- 0, // tp_setattr
- 0, // tp_compare
- 0, // tp_repr
- 0, // tp_as_number
- 0, // tp_as_sequence
- 0, // tp_as_mapping
- 0, // tp_hash
- 0, // tp_call
- 0, // tp_str
- 0, // tp_getattro
- 0, // tp_setattro
- 0, // tp_as_buffer
- Py_TPFLAGS_DEFAULT, // tp_flags
- "A EnumValue Descriptor", // tp_doc
- 0, // tp_traverse
- 0, // tp_clear
- 0, // tp_richcompare
- 0, // tp_weaklistoffset
- 0, // tp_iter
- 0, // tp_iternext
- enumvalue_descriptor::Methods, // tp_methods
- 0, // tp_members
- enumvalue_descriptor::Getters, // tp_getset
- &descriptor::PyBaseDescriptor_Type, // tp_base
-};
-
-PyObject* PyEnumValueDescriptor_FromDescriptor(
- const EnumValueDescriptor* enumvalue_descriptor) {
- return descriptor::NewInternedDescriptor(
- &PyEnumValueDescriptor_Type, enumvalue_descriptor, NULL);
-}
-
-namespace file_descriptor {
-
-// Unchecked accessor to the C++ pointer.
-static const FileDescriptor* _GetDescriptor(PyFileDescriptor *self) {
- return reinterpret_cast<const FileDescriptor*>(self->base.descriptor);
-}
-
-static void Dealloc(PyFileDescriptor* self) {
- Py_XDECREF(self->serialized_pb);
- descriptor::Dealloc(&self->base);
-}
-
-static PyObject* GetPool(PyFileDescriptor *self, void *closure) {
- PyObject* pool = reinterpret_cast<PyObject*>(
- GetDescriptorPool_FromPool(_GetDescriptor(self)->pool()));
- Py_XINCREF(pool);
- return pool;
-}
-
-static PyObject* GetName(PyFileDescriptor *self, void *closure) {
- return PyString_FromCppString(_GetDescriptor(self)->name());
-}
-
-static PyObject* GetPackage(PyFileDescriptor *self, void *closure) {
- return PyString_FromCppString(_GetDescriptor(self)->package());
-}
-
-static PyObject* GetSerializedPb(PyFileDescriptor *self, void *closure) {
- PyObject *serialized_pb = self->serialized_pb;
- if (serialized_pb != NULL) {
- Py_INCREF(serialized_pb);
- return serialized_pb;
- }
- FileDescriptorProto file_proto;
- _GetDescriptor(self)->CopyTo(&file_proto);
- string contents;
- file_proto.SerializePartialToString(&contents);
- self->serialized_pb = PyBytes_FromStringAndSize(
- contents.c_str(), contents.size());
- if (self->serialized_pb == NULL) {
- return NULL;
- }
- Py_INCREF(self->serialized_pb);
- return self->serialized_pb;
-}
-
-static PyObject* GetMessageTypesByName(PyFileDescriptor* self, void *closure) {
- return NewFileMessageTypesByName(_GetDescriptor(self));
-}
-
-static PyObject* GetEnumTypesByName(PyFileDescriptor* self, void *closure) {
- return NewFileEnumTypesByName(_GetDescriptor(self));
-}
-
-static PyObject* GetExtensionsByName(PyFileDescriptor* self, void *closure) {
- return NewFileExtensionsByName(_GetDescriptor(self));
-}
-
-static PyObject* GetDependencies(PyFileDescriptor* self, void *closure) {
- return NewFileDependencies(_GetDescriptor(self));
-}
-
-static PyObject* GetPublicDependencies(PyFileDescriptor* self, void *closure) {
- return NewFilePublicDependencies(_GetDescriptor(self));
-}
-
-static PyObject* GetHasOptions(PyFileDescriptor *self, void *closure) {
- const FileOptions& options(_GetDescriptor(self)->options());
- if (&options != &FileOptions::default_instance()) {
- Py_RETURN_TRUE;
- } else {
- Py_RETURN_FALSE;
- }
-}
-static int SetHasOptions(PyFileDescriptor *self, PyObject *value,
- void *closure) {
- return CheckCalledFromGeneratedFile("has_options");
-}
-
-static PyObject* GetOptions(PyFileDescriptor *self) {
- return GetOrBuildOptions(_GetDescriptor(self));
-}
-
-static int SetOptions(PyFileDescriptor *self, PyObject *value,
- void *closure) {
- return CheckCalledFromGeneratedFile("_options");
-}
-
-static PyObject* GetSyntax(PyFileDescriptor *self, void *closure) {
- return PyString_InternFromString(
- FileDescriptor::SyntaxName(_GetDescriptor(self)->syntax()));
-}
-
-static PyObject* CopyToProto(PyFileDescriptor *self, PyObject *target) {
- return CopyToPythonProto<FileDescriptorProto>(_GetDescriptor(self), target);
-}
-
-static PyGetSetDef Getters[] = {
- { "pool", (getter)GetPool, NULL, "pool"},
- { "name", (getter)GetName, NULL, "name"},
- { "package", (getter)GetPackage, NULL, "package"},
- { "serialized_pb", (getter)GetSerializedPb},
- { "message_types_by_name", (getter)GetMessageTypesByName, NULL,
- "Messages by name"},
- { "enum_types_by_name", (getter)GetEnumTypesByName, NULL, "Enums by name"},
- { "extensions_by_name", (getter)GetExtensionsByName, NULL,
- "Extensions by name"},
- { "dependencies", (getter)GetDependencies, NULL, "Dependencies"},
- { "public_dependencies", (getter)GetPublicDependencies, NULL, "Dependencies"},
-
- { "has_options", (getter)GetHasOptions, (setter)SetHasOptions, "Has Options"},
- { "_options", (getter)NULL, (setter)SetOptions, "Options"},
- { "syntax", (getter)GetSyntax, (setter)NULL, "Syntax"},
- {NULL}
-};
-
-static PyMethodDef Methods[] = {
- { "GetOptions", (PyCFunction)GetOptions, METH_NOARGS, },
- { "CopyToProto", (PyCFunction)CopyToProto, METH_O, },
- {NULL}
-};
-
-} // namespace file_descriptor
-
-PyTypeObject PyFileDescriptor_Type = {
- PyVarObject_HEAD_INIT(&PyType_Type, 0)
- FULL_MODULE_NAME ".FileDescriptor", // tp_name
- sizeof(PyFileDescriptor), // tp_basicsize
- 0, // tp_itemsize
- (destructor)file_descriptor::Dealloc, // tp_dealloc
- 0, // tp_print
- 0, // tp_getattr
- 0, // tp_setattr
- 0, // tp_compare
- 0, // tp_repr
- 0, // tp_as_number
- 0, // tp_as_sequence
- 0, // tp_as_mapping
- 0, // tp_hash
- 0, // tp_call
- 0, // tp_str
- 0, // tp_getattro
- 0, // tp_setattro
- 0, // tp_as_buffer
- Py_TPFLAGS_DEFAULT, // tp_flags
- "A File Descriptor", // tp_doc
- 0, // tp_traverse
- 0, // tp_clear
- 0, // tp_richcompare
- 0, // tp_weaklistoffset
- 0, // tp_iter
- 0, // tp_iternext
- file_descriptor::Methods, // tp_methods
- 0, // tp_members
- file_descriptor::Getters, // tp_getset
- &descriptor::PyBaseDescriptor_Type, // tp_base
- 0, // tp_dict
- 0, // tp_descr_get
- 0, // tp_descr_set
- 0, // tp_dictoffset
- 0, // tp_init
- 0, // tp_alloc
- 0, // tp_new
- PyObject_Del, // tp_free
-};
-
-PyObject* PyFileDescriptor_FromDescriptor(
- const FileDescriptor* file_descriptor) {
- return PyFileDescriptor_FromDescriptorWithSerializedPb(file_descriptor,
- NULL);
-}
-
-PyObject* PyFileDescriptor_FromDescriptorWithSerializedPb(
- const FileDescriptor* file_descriptor, PyObject *serialized_pb) {
- bool was_created;
- PyObject* py_descriptor = descriptor::NewInternedDescriptor(
- &PyFileDescriptor_Type, file_descriptor, &was_created);
- if (py_descriptor == NULL) {
- return NULL;
- }
- if (was_created) {
- PyFileDescriptor* cfile_descriptor =
- reinterpret_cast<PyFileDescriptor*>(py_descriptor);
- Py_XINCREF(serialized_pb);
- cfile_descriptor->serialized_pb = serialized_pb;
- }
- // TODO(amauryfa): In the case of a cached object, check that serialized_pb
- // is the same as before.
-
- return py_descriptor;
-}
-
-const FileDescriptor* PyFileDescriptor_AsDescriptor(PyObject* obj) {
- if (!PyObject_TypeCheck(obj, &PyFileDescriptor_Type)) {
- PyErr_SetString(PyExc_TypeError, "Not a FileDescriptor");
- return NULL;
- }
- return reinterpret_cast<const FileDescriptor*>(
- reinterpret_cast<PyBaseDescriptor*>(obj)->descriptor);
-}
-
-namespace oneof_descriptor {
-
-// Unchecked accessor to the C++ pointer.
-static const OneofDescriptor* _GetDescriptor(
- PyBaseDescriptor *self) {
- return reinterpret_cast<const OneofDescriptor*>(self->descriptor);
-}
-
-static PyObject* GetName(PyBaseDescriptor* self, void *closure) {
- return PyString_FromCppString(_GetDescriptor(self)->name());
-}
-
-static PyObject* GetFullName(PyBaseDescriptor* self, void *closure) {
- return PyString_FromCppString(_GetDescriptor(self)->full_name());
-}
-
-static PyObject* GetIndex(PyBaseDescriptor *self, void *closure) {
- return PyInt_FromLong(_GetDescriptor(self)->index());
-}
-
-static PyObject* GetFields(PyBaseDescriptor* self, void *closure) {
- return NewOneofFieldsSeq(_GetDescriptor(self));
-}
-
-static PyObject* GetContainingType(PyBaseDescriptor *self, void *closure) {
- const Descriptor* containing_type =
- _GetDescriptor(self)->containing_type();
- if (containing_type) {
- return PyMessageDescriptor_FromDescriptor(containing_type);
- } else {
- Py_RETURN_NONE;
- }
-}
-
-static PyGetSetDef Getters[] = {
- { "name", (getter)GetName, NULL, "Name"},
- { "full_name", (getter)GetFullName, NULL, "Full name"},
- { "index", (getter)GetIndex, NULL, "Index"},
-
- { "containing_type", (getter)GetContainingType, NULL, "Containing type"},
- { "fields", (getter)GetFields, NULL, "Fields"},
- {NULL}
-};
-
-} // namespace oneof_descriptor
-
-PyTypeObject PyOneofDescriptor_Type = {
- PyVarObject_HEAD_INIT(&PyType_Type, 0)
- FULL_MODULE_NAME ".OneofDescriptor", // tp_name
- sizeof(PyBaseDescriptor), // tp_basicsize
- 0, // tp_itemsize
- 0, // tp_dealloc
- 0, // tp_print
- 0, // tp_getattr
- 0, // tp_setattr
- 0, // tp_compare
- 0, // tp_repr
- 0, // tp_as_number
- 0, // tp_as_sequence
- 0, // tp_as_mapping
- 0, // tp_hash
- 0, // tp_call
- 0, // tp_str
- 0, // tp_getattro
- 0, // tp_setattro
- 0, // tp_as_buffer
- Py_TPFLAGS_DEFAULT, // tp_flags
- "A Oneof Descriptor", // tp_doc
- 0, // tp_traverse
- 0, // tp_clear
- 0, // tp_richcompare
- 0, // tp_weaklistoffset
- 0, // tp_iter
- 0, // tp_iternext
- 0, // tp_methods
- 0, // tp_members
- oneof_descriptor::Getters, // tp_getset
- &descriptor::PyBaseDescriptor_Type, // tp_base
-};
-
-PyObject* PyOneofDescriptor_FromDescriptor(
- const OneofDescriptor* oneof_descriptor) {
- return descriptor::NewInternedDescriptor(
- &PyOneofDescriptor_Type, oneof_descriptor, NULL);
-}
-
-// Add a enum values to a type dictionary.
-static bool AddEnumValues(PyTypeObject *type,
- const EnumDescriptor* enum_descriptor) {
- for (int i = 0; i < enum_descriptor->value_count(); ++i) {
- const EnumValueDescriptor* value = enum_descriptor->value(i);
- ScopedPyObjectPtr obj(PyInt_FromLong(value->number()));
- if (obj == NULL) {
- return false;
- }
- if (PyDict_SetItemString(type->tp_dict, value->name().c_str(), obj.get()) <
- 0) {
- return false;
- }
- }
- return true;
-}
-
-static bool AddIntConstant(PyTypeObject *type, const char* name, int value) {
- ScopedPyObjectPtr obj(PyInt_FromLong(value));
- if (PyDict_SetItemString(type->tp_dict, name, obj.get()) < 0) {
- return false;
- }
- return true;
-}
-
-
-bool InitDescriptor() {
- if (PyType_Ready(&PyMessageDescriptor_Type) < 0)
- return false;
-
- if (PyType_Ready(&PyFieldDescriptor_Type) < 0)
- return false;
-
- if (!AddEnumValues(&PyFieldDescriptor_Type,
- FieldDescriptorProto::Label_descriptor())) {
- return false;
- }
- if (!AddEnumValues(&PyFieldDescriptor_Type,
- FieldDescriptorProto::Type_descriptor())) {
- return false;
- }
-#define ADD_FIELDDESC_CONSTANT(NAME) AddIntConstant( \
- &PyFieldDescriptor_Type, #NAME, FieldDescriptor::NAME)
- if (!ADD_FIELDDESC_CONSTANT(CPPTYPE_INT32) ||
- !ADD_FIELDDESC_CONSTANT(CPPTYPE_INT64) ||
- !ADD_FIELDDESC_CONSTANT(CPPTYPE_UINT32) ||
- !ADD_FIELDDESC_CONSTANT(CPPTYPE_UINT64) ||
- !ADD_FIELDDESC_CONSTANT(CPPTYPE_DOUBLE) ||
- !ADD_FIELDDESC_CONSTANT(CPPTYPE_FLOAT) ||
- !ADD_FIELDDESC_CONSTANT(CPPTYPE_BOOL) ||
- !ADD_FIELDDESC_CONSTANT(CPPTYPE_ENUM) ||
- !ADD_FIELDDESC_CONSTANT(CPPTYPE_STRING) ||
- !ADD_FIELDDESC_CONSTANT(CPPTYPE_MESSAGE)) {
- return false;
- }
-#undef ADD_FIELDDESC_CONSTANT
-
- if (PyType_Ready(&PyEnumDescriptor_Type) < 0)
- return false;
-
- if (PyType_Ready(&PyEnumValueDescriptor_Type) < 0)
- return false;
-
- if (PyType_Ready(&PyFileDescriptor_Type) < 0)
- return false;
-
- if (PyType_Ready(&PyOneofDescriptor_Type) < 0)
- return false;
-
- if (!InitDescriptorMappingTypes())
- return false;
-
- return true;
-}
-
-} // namespace python
-} // namespace protobuf
-} // namespace google
diff --git a/third_party/protobuf/python/google/protobuf/pyext/descriptor.h b/third_party/protobuf/python/google/protobuf/pyext/descriptor.h
deleted file mode 100644
index eb99df182e..0000000000
--- a/third_party/protobuf/python/google/protobuf/pyext/descriptor.h
+++ /dev/null
@@ -1,97 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Author: petar@google.com (Petar Petrov)
-
-#ifndef GOOGLE_PROTOBUF_PYTHON_CPP_DESCRIPTOR_H__
-#define GOOGLE_PROTOBUF_PYTHON_CPP_DESCRIPTOR_H__
-
-#include <Python.h>
-
-#include <google/protobuf/descriptor.h>
-
-namespace google {
-namespace protobuf {
-namespace python {
-
-extern PyTypeObject PyMessageDescriptor_Type;
-extern PyTypeObject PyFieldDescriptor_Type;
-extern PyTypeObject PyEnumDescriptor_Type;
-extern PyTypeObject PyEnumValueDescriptor_Type;
-extern PyTypeObject PyFileDescriptor_Type;
-extern PyTypeObject PyOneofDescriptor_Type;
-
-// Wraps a Descriptor in a Python object.
-// The C++ pointer is usually borrowed from the global DescriptorPool.
-// In any case, it must stay alive as long as the Python object.
-// Returns a new reference.
-PyObject* PyMessageDescriptor_FromDescriptor(const Descriptor* descriptor);
-PyObject* PyFieldDescriptor_FromDescriptor(const FieldDescriptor* descriptor);
-PyObject* PyEnumDescriptor_FromDescriptor(const EnumDescriptor* descriptor);
-PyObject* PyEnumValueDescriptor_FromDescriptor(
- const EnumValueDescriptor* descriptor);
-PyObject* PyOneofDescriptor_FromDescriptor(const OneofDescriptor* descriptor);
-PyObject* PyFileDescriptor_FromDescriptor(
- const FileDescriptor* file_descriptor);
-
-// Alternate constructor of PyFileDescriptor, used when we already have a
-// serialized FileDescriptorProto that can be cached.
-// Returns a new reference.
-PyObject* PyFileDescriptor_FromDescriptorWithSerializedPb(
- const FileDescriptor* file_descriptor, PyObject* serialized_pb);
-
-// Return the C++ descriptor pointer.
-// This function checks the parameter type; on error, return NULL with a Python
-// exception set.
-const Descriptor* PyMessageDescriptor_AsDescriptor(PyObject* obj);
-const FieldDescriptor* PyFieldDescriptor_AsDescriptor(PyObject* obj);
-const EnumDescriptor* PyEnumDescriptor_AsDescriptor(PyObject* obj);
-const FileDescriptor* PyFileDescriptor_AsDescriptor(PyObject* obj);
-
-// Returns the raw C++ pointer.
-const void* PyDescriptor_AsVoidPtr(PyObject* obj);
-
-// Check that the calling Python code is the global scope of a _pb2.py module.
-// This function is used to support the current code generated by the proto
-// compiler, which insists on modifying descriptors after they have been
-// created.
-//
-// stacklevel indicates which Python frame should be the _pb2.py module.
-//
-// Don't use this function outside descriptor classes.
-bool _CalledFromGeneratedFile(int stacklevel);
-
-bool InitDescriptor();
-
-} // namespace python
-} // namespace protobuf
-
-} // namespace google
-#endif // GOOGLE_PROTOBUF_PYTHON_CPP_DESCRIPTOR_H__
diff --git a/third_party/protobuf/python/google/protobuf/pyext/descriptor_containers.cc b/third_party/protobuf/python/google/protobuf/pyext/descriptor_containers.cc
deleted file mode 100644
index e505d8122c..0000000000
--- a/third_party/protobuf/python/google/protobuf/pyext/descriptor_containers.cc
+++ /dev/null
@@ -1,1652 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Mappings and Sequences of descriptors.
-// Used by Descriptor.fields_by_name, EnumDescriptor.values...
-//
-// They avoid the allocation of a full dictionary or a full list: they simply
-// store a pointer to the parent descriptor, use the C++ Descriptor methods (see
-// google/protobuf/descriptor.h) to retrieve other descriptors, and create
-// Python objects on the fly.
-//
-// The containers fully conform to abc.Mapping and abc.Sequence, and behave just
-// like read-only dictionaries and lists.
-//
-// Because the interface of C++ Descriptors is quite regular, this file actually
-// defines only three types, the exact behavior of a container is controlled by
-// a DescriptorContainerDef structure, which contains functions that uses the
-// public Descriptor API.
-//
-// Note: This DescriptorContainerDef is similar to the "virtual methods table"
-// that a C++ compiler generates for a class. We have to make it explicit
-// because the Python API is based on C, and does not play well with C++
-// inheritance.
-
-#include <Python.h>
-
-#include <google/protobuf/descriptor.h>
-#include <google/protobuf/pyext/descriptor_containers.h>
-#include <google/protobuf/pyext/descriptor_pool.h>
-#include <google/protobuf/pyext/descriptor.h>
-#include <google/protobuf/pyext/scoped_pyobject_ptr.h>
-
-#if PY_MAJOR_VERSION >= 3
- #define PyString_FromStringAndSize PyUnicode_FromStringAndSize
- #define PyString_FromFormat PyUnicode_FromFormat
- #define PyInt_FromLong PyLong_FromLong
- #if PY_VERSION_HEX < 0x03030000
- #error "Python 3.0 - 3.2 are not supported."
- #endif
- #define PyString_AsStringAndSize(ob, charpp, sizep) \
- (PyUnicode_Check(ob)? \
- ((*(charpp) = PyUnicode_AsUTF8AndSize(ob, (sizep))) == NULL? -1: 0): \
- PyBytes_AsStringAndSize(ob, (charpp), (sizep)))
-#endif
-
-namespace google {
-namespace protobuf {
-namespace python {
-
-struct PyContainer;
-
-typedef int (*CountMethod)(PyContainer* self);
-typedef const void* (*GetByIndexMethod)(PyContainer* self, int index);
-typedef const void* (*GetByNameMethod)(PyContainer* self, const string& name);
-typedef const void* (*GetByCamelcaseNameMethod)(PyContainer* self,
- const string& name);
-typedef const void* (*GetByNumberMethod)(PyContainer* self, int index);
-typedef PyObject* (*NewObjectFromItemMethod)(const void* descriptor);
-typedef const string& (*GetItemNameMethod)(const void* descriptor);
-typedef const string& (*GetItemCamelcaseNameMethod)(const void* descriptor);
-typedef int (*GetItemNumberMethod)(const void* descriptor);
-typedef int (*GetItemIndexMethod)(const void* descriptor);
-
-struct DescriptorContainerDef {
- const char* mapping_name;
- // Returns the number of items in the container.
- CountMethod count_fn;
- // Retrieve item by index (usually the order of declaration in the proto file)
- // Used by sequences, but also iterators. 0 <= index < Count().
- GetByIndexMethod get_by_index_fn;
- // Retrieve item by name (usually a call to some 'FindByName' method).
- // Used by "by_name" mappings.
- GetByNameMethod get_by_name_fn;
- // Retrieve item by camelcase name (usually a call to some
- // 'FindByCamelcaseName' method). Used by "by_camelcase_name" mappings.
- GetByCamelcaseNameMethod get_by_camelcase_name_fn;
- // Retrieve item by declared number (field tag, or enum value).
- // Used by "by_number" mappings.
- GetByNumberMethod get_by_number_fn;
- // Converts a item C++ descriptor to a Python object. Returns a new reference.
- NewObjectFromItemMethod new_object_from_item_fn;
- // Retrieve the name of an item. Used by iterators on "by_name" mappings.
- GetItemNameMethod get_item_name_fn;
- // Retrieve the camelcase name of an item. Used by iterators on
- // "by_camelcase_name" mappings.
- GetItemCamelcaseNameMethod get_item_camelcase_name_fn;
- // Retrieve the number of an item. Used by iterators on "by_number" mappings.
- GetItemNumberMethod get_item_number_fn;
- // Retrieve the index of an item for the container type.
- // Used by "__contains__".
- // If not set, "x in sequence" will do a linear search.
- GetItemIndexMethod get_item_index_fn;
-};
-
-struct PyContainer {
- PyObject_HEAD
-
- // The proto2 descriptor this container belongs to the global DescriptorPool.
- const void* descriptor;
-
- // A pointer to a static structure with function pointers that control the
- // behavior of the container. Very similar to the table of virtual functions
- // of a C++ class.
- const DescriptorContainerDef* container_def;
-
- // The kind of container: list, or dict by name or value.
- enum ContainerKind {
- KIND_SEQUENCE,
- KIND_BYNAME,
- KIND_BYCAMELCASENAME,
- KIND_BYNUMBER,
- } kind;
-};
-
-struct PyContainerIterator {
- PyObject_HEAD
-
- // The container we are iterating over. Own a reference.
- PyContainer* container;
-
- // The current index in the iterator.
- int index;
-
- // The kind of container: list, or dict by name or value.
- enum IterKind {
- KIND_ITERKEY,
- KIND_ITERVALUE,
- KIND_ITERITEM,
- KIND_ITERVALUE_REVERSED, // For sequences
- } kind;
-};
-
-namespace descriptor {
-
-// Returns the C++ item descriptor for a given Python key.
-// When the descriptor is found, return true and set *item.
-// When the descriptor is not found, return true, but set *item to NULL.
-// On error, returns false with an exception set.
-static bool _GetItemByKey(PyContainer* self, PyObject* key, const void** item) {
- switch (self->kind) {
- case PyContainer::KIND_BYNAME:
- {
- char* name;
- Py_ssize_t name_size;
- if (PyString_AsStringAndSize(key, &name, &name_size) < 0) {
- if (PyErr_ExceptionMatches(PyExc_TypeError)) {
- // Not a string, cannot be in the container.
- PyErr_Clear();
- *item = NULL;
- return true;
- }
- return false;
- }
- *item = self->container_def->get_by_name_fn(
- self, string(name, name_size));
- return true;
- }
- case PyContainer::KIND_BYCAMELCASENAME:
- {
- char* camelcase_name;
- Py_ssize_t name_size;
- if (PyString_AsStringAndSize(key, &camelcase_name, &name_size) < 0) {
- if (PyErr_ExceptionMatches(PyExc_TypeError)) {
- // Not a string, cannot be in the container.
- PyErr_Clear();
- *item = NULL;
- return true;
- }
- return false;
- }
- *item = self->container_def->get_by_camelcase_name_fn(
- self, string(camelcase_name, name_size));
- return true;
- }
- case PyContainer::KIND_BYNUMBER:
- {
- Py_ssize_t number = PyNumber_AsSsize_t(key, NULL);
- if (number == -1 && PyErr_Occurred()) {
- if (PyErr_ExceptionMatches(PyExc_TypeError)) {
- // Not a number, cannot be in the container.
- PyErr_Clear();
- *item = NULL;
- return true;
- }
- return false;
- }
- *item = self->container_def->get_by_number_fn(self, number);
- return true;
- }
- default:
- PyErr_SetNone(PyExc_NotImplementedError);
- return false;
- }
-}
-
-// Returns the key of the object at the given index.
-// Used when iterating over mappings.
-static PyObject* _NewKey_ByIndex(PyContainer* self, Py_ssize_t index) {
- const void* item = self->container_def->get_by_index_fn(self, index);
- switch (self->kind) {
- case PyContainer::KIND_BYNAME:
- {
- const string& name(self->container_def->get_item_name_fn(item));
- return PyString_FromStringAndSize(name.c_str(), name.size());
- }
- case PyContainer::KIND_BYCAMELCASENAME:
- {
- const string& name(
- self->container_def->get_item_camelcase_name_fn(item));
- return PyString_FromStringAndSize(name.c_str(), name.size());
- }
- case PyContainer::KIND_BYNUMBER:
- {
- int value = self->container_def->get_item_number_fn(item);
- return PyInt_FromLong(value);
- }
- default:
- PyErr_SetNone(PyExc_NotImplementedError);
- return NULL;
- }
-}
-
-// Returns the object at the given index.
-// Also used when iterating over mappings.
-static PyObject* _NewObj_ByIndex(PyContainer* self, Py_ssize_t index) {
- return self->container_def->new_object_from_item_fn(
- self->container_def->get_by_index_fn(self, index));
-}
-
-static Py_ssize_t Length(PyContainer* self) {
- return self->container_def->count_fn(self);
-}
-
-// The DescriptorMapping type.
-
-static PyObject* Subscript(PyContainer* self, PyObject* key) {
- const void* item = NULL;
- if (!_GetItemByKey(self, key, &item)) {
- return NULL;
- }
- if (!item) {
- PyErr_SetObject(PyExc_KeyError, key);
- return NULL;
- }
- return self->container_def->new_object_from_item_fn(item);
-}
-
-static int AssSubscript(PyContainer* self, PyObject* key, PyObject* value) {
- if (_CalledFromGeneratedFile(0)) {
- return 0;
- }
- PyErr_Format(PyExc_TypeError,
- "'%.200s' object does not support item assignment",
- Py_TYPE(self)->tp_name);
- return -1;
-}
-
-static PyMappingMethods MappingMappingMethods = {
- (lenfunc)Length, // mp_length
- (binaryfunc)Subscript, // mp_subscript
- (objobjargproc)AssSubscript, // mp_ass_subscript
-};
-
-static int Contains(PyContainer* self, PyObject* key) {
- const void* item = NULL;
- if (!_GetItemByKey(self, key, &item)) {
- return -1;
- }
- if (item) {
- return 1;
- } else {
- return 0;
- }
-}
-
-static PyObject* ContainerRepr(PyContainer* self) {
- const char* kind = "";
- switch (self->kind) {
- case PyContainer::KIND_SEQUENCE:
- kind = "sequence";
- break;
- case PyContainer::KIND_BYNAME:
- kind = "mapping by name";
- break;
- case PyContainer::KIND_BYCAMELCASENAME:
- kind = "mapping by camelCase name";
- break;
- case PyContainer::KIND_BYNUMBER:
- kind = "mapping by number";
- break;
- }
- return PyString_FromFormat(
- "<%s %s>", self->container_def->mapping_name, kind);
-}
-
-extern PyTypeObject DescriptorMapping_Type;
-extern PyTypeObject DescriptorSequence_Type;
-
-// A sequence container can only be equal to another sequence container, or (for
-// backward compatibility) to a list containing the same items.
-// Returns 1 if equal, 0 if unequal, -1 on error.
-static int DescriptorSequence_Equal(PyContainer* self, PyObject* other) {
- // Check the identity of C++ pointers.
- if (PyObject_TypeCheck(other, &DescriptorSequence_Type)) {
- PyContainer* other_container = reinterpret_cast<PyContainer*>(other);
- if (self->descriptor == other_container->descriptor &&
- self->container_def == other_container->container_def &&
- self->kind == other_container->kind) {
- return 1;
- } else {
- return 0;
- }
- }
-
- // If other is a list
- if (PyList_Check(other)) {
- // return list(self) == other
- int size = Length(self);
- if (size != PyList_Size(other)) {
- return false;
- }
- for (int index = 0; index < size; index++) {
- ScopedPyObjectPtr value1(_NewObj_ByIndex(self, index));
- if (value1 == NULL) {
- return -1;
- }
- PyObject* value2 = PyList_GetItem(other, index);
- if (value2 == NULL) {
- return -1;
- }
- int cmp = PyObject_RichCompareBool(value1.get(), value2, Py_EQ);
- if (cmp != 1) // error or not equal
- return cmp;
- }
- // All items were found and equal
- return 1;
- }
-
- // Any other object is different.
- return 0;
-}
-
-// A mapping container can only be equal to another mapping container, or (for
-// backward compatibility) to a dict containing the same items.
-// Returns 1 if equal, 0 if unequal, -1 on error.
-static int DescriptorMapping_Equal(PyContainer* self, PyObject* other) {
- // Check the identity of C++ pointers.
- if (PyObject_TypeCheck(other, &DescriptorMapping_Type)) {
- PyContainer* other_container = reinterpret_cast<PyContainer*>(other);
- if (self->descriptor == other_container->descriptor &&
- self->container_def == other_container->container_def &&
- self->kind == other_container->kind) {
- return 1;
- } else {
- return 0;
- }
- }
-
- // If other is a dict
- if (PyDict_Check(other)) {
- // equivalent to dict(self.items()) == other
- int size = Length(self);
- if (size != PyDict_Size(other)) {
- return false;
- }
- for (int index = 0; index < size; index++) {
- ScopedPyObjectPtr key(_NewKey_ByIndex(self, index));
- if (key == NULL) {
- return -1;
- }
- ScopedPyObjectPtr value1(_NewObj_ByIndex(self, index));
- if (value1 == NULL) {
- return -1;
- }
- PyObject* value2 = PyDict_GetItem(other, key.get());
- if (value2 == NULL) {
- // Not found in the other dictionary
- return 0;
- }
- int cmp = PyObject_RichCompareBool(value1.get(), value2, Py_EQ);
- if (cmp != 1) // error or not equal
- return cmp;
- }
- // All items were found and equal
- return 1;
- }
-
- // Any other object is different.
- return 0;
-}
-
-static PyObject* RichCompare(PyContainer* self, PyObject* other, int opid) {
- if (opid != Py_EQ && opid != Py_NE) {
- Py_INCREF(Py_NotImplemented);
- return Py_NotImplemented;
- }
-
- int result;
-
- if (self->kind == PyContainer::KIND_SEQUENCE) {
- result = DescriptorSequence_Equal(self, other);
- } else {
- result = DescriptorMapping_Equal(self, other);
- }
- if (result < 0) {
- return NULL;
- }
- if (result ^ (opid == Py_NE)) {
- Py_RETURN_TRUE;
- } else {
- Py_RETURN_FALSE;
- }
-}
-
-static PySequenceMethods MappingSequenceMethods = {
- 0, // sq_length
- 0, // sq_concat
- 0, // sq_repeat
- 0, // sq_item
- 0, // sq_slice
- 0, // sq_ass_item
- 0, // sq_ass_slice
- (objobjproc)Contains, // sq_contains
-};
-
-static PyObject* Get(PyContainer* self, PyObject* args) {
- PyObject* key;
- PyObject* default_value = Py_None;
- if (!PyArg_UnpackTuple(args, "get", 1, 2, &key, &default_value)) {
- return NULL;
- }
-
- const void* item;
- if (!_GetItemByKey(self, key, &item)) {
- return NULL;
- }
- if (item == NULL) {
- Py_INCREF(default_value);
- return default_value;
- }
- return self->container_def->new_object_from_item_fn(item);
-}
-
-static PyObject* Keys(PyContainer* self, PyObject* args) {
- Py_ssize_t count = Length(self);
- ScopedPyObjectPtr list(PyList_New(count));
- if (list == NULL) {
- return NULL;
- }
- for (Py_ssize_t index = 0; index < count; ++index) {
- PyObject* key = _NewKey_ByIndex(self, index);
- if (key == NULL) {
- return NULL;
- }
- PyList_SET_ITEM(list.get(), index, key);
- }
- return list.release();
-}
-
-static PyObject* Values(PyContainer* self, PyObject* args) {
- Py_ssize_t count = Length(self);
- ScopedPyObjectPtr list(PyList_New(count));
- if (list == NULL) {
- return NULL;
- }
- for (Py_ssize_t index = 0; index < count; ++index) {
- PyObject* value = _NewObj_ByIndex(self, index);
- if (value == NULL) {
- return NULL;
- }
- PyList_SET_ITEM(list.get(), index, value);
- }
- return list.release();
-}
-
-static PyObject* Items(PyContainer* self, PyObject* args) {
- Py_ssize_t count = Length(self);
- ScopedPyObjectPtr list(PyList_New(count));
- if (list == NULL) {
- return NULL;
- }
- for (Py_ssize_t index = 0; index < count; ++index) {
- ScopedPyObjectPtr obj(PyTuple_New(2));
- if (obj == NULL) {
- return NULL;
- }
- PyObject* key = _NewKey_ByIndex(self, index);
- if (key == NULL) {
- return NULL;
- }
- PyTuple_SET_ITEM(obj.get(), 0, key);
- PyObject* value = _NewObj_ByIndex(self, index);
- if (value == NULL) {
- return NULL;
- }
- PyTuple_SET_ITEM(obj.get(), 1, value);
- PyList_SET_ITEM(list.get(), index, obj.release());
- }
- return list.release();
-}
-
-static PyObject* NewContainerIterator(PyContainer* mapping,
- PyContainerIterator::IterKind kind);
-
-static PyObject* Iter(PyContainer* self) {
- return NewContainerIterator(self, PyContainerIterator::KIND_ITERKEY);
-}
-static PyObject* IterKeys(PyContainer* self, PyObject* args) {
- return NewContainerIterator(self, PyContainerIterator::KIND_ITERKEY);
-}
-static PyObject* IterValues(PyContainer* self, PyObject* args) {
- return NewContainerIterator(self, PyContainerIterator::KIND_ITERVALUE);
-}
-static PyObject* IterItems(PyContainer* self, PyObject* args) {
- return NewContainerIterator(self, PyContainerIterator::KIND_ITERITEM);
-}
-
-static PyMethodDef MappingMethods[] = {
- { "get", (PyCFunction)Get, METH_VARARGS, },
- { "keys", (PyCFunction)Keys, METH_NOARGS, },
- { "values", (PyCFunction)Values, METH_NOARGS, },
- { "items", (PyCFunction)Items, METH_NOARGS, },
- { "iterkeys", (PyCFunction)IterKeys, METH_NOARGS, },
- { "itervalues", (PyCFunction)IterValues, METH_NOARGS, },
- { "iteritems", (PyCFunction)IterItems, METH_NOARGS, },
- {NULL}
-};
-
-PyTypeObject DescriptorMapping_Type = {
- PyVarObject_HEAD_INIT(&PyType_Type, 0)
- "DescriptorMapping", // tp_name
- sizeof(PyContainer), // tp_basicsize
- 0, // tp_itemsize
- 0, // tp_dealloc
- 0, // tp_print
- 0, // tp_getattr
- 0, // tp_setattr
- 0, // tp_compare
- (reprfunc)ContainerRepr, // tp_repr
- 0, // tp_as_number
- &MappingSequenceMethods, // tp_as_sequence
- &MappingMappingMethods, // tp_as_mapping
- 0, // tp_hash
- 0, // tp_call
- 0, // tp_str
- 0, // tp_getattro
- 0, // tp_setattro
- 0, // tp_as_buffer
- Py_TPFLAGS_DEFAULT, // tp_flags
- 0, // tp_doc
- 0, // tp_traverse
- 0, // tp_clear
- (richcmpfunc)RichCompare, // tp_richcompare
- 0, // tp_weaklistoffset
- (getiterfunc)Iter, // tp_iter
- 0, // tp_iternext
- MappingMethods, // tp_methods
- 0, // tp_members
- 0, // tp_getset
- 0, // tp_base
- 0, // tp_dict
- 0, // tp_descr_get
- 0, // tp_descr_set
- 0, // tp_dictoffset
- 0, // tp_init
- 0, // tp_alloc
- 0, // tp_new
- 0, // tp_free
-};
-
-// The DescriptorSequence type.
-
-static PyObject* GetItem(PyContainer* self, Py_ssize_t index) {
- if (index < 0) {
- index += Length(self);
- }
- if (index < 0 || index >= Length(self)) {
- PyErr_SetString(PyExc_IndexError, "index out of range");
- return NULL;
- }
- return _NewObj_ByIndex(self, index);
-}
-
-// Returns the position of the item in the sequence, of -1 if not found.
-// This function never fails.
-int Find(PyContainer* self, PyObject* item) {
- // The item can only be in one position: item.index.
- // Check that self[item.index] == item, it's faster than a linear search.
- //
- // This assumes that sequences are only defined by syntax of the .proto file:
- // a specific item belongs to only one sequence, depending on its position in
- // the .proto file definition.
- const void* descriptor_ptr = PyDescriptor_AsVoidPtr(item);
- if (descriptor_ptr == NULL) {
- // Not a descriptor, it cannot be in the list.
- return -1;
- }
- if (self->container_def->get_item_index_fn) {
- int index = self->container_def->get_item_index_fn(descriptor_ptr);
- if (index < 0 || index >= Length(self)) {
- // This index is not from this collection.
- return -1;
- }
- if (self->container_def->get_by_index_fn(self, index) != descriptor_ptr) {
- // The descriptor at this index is not the same.
- return -1;
- }
- // self[item.index] == item, so return the index.
- return index;
- } else {
- // Fall back to linear search.
- int length = Length(self);
- for (int index=0; index < length; index++) {
- if (self->container_def->get_by_index_fn(self, index) == descriptor_ptr) {
- return index;
- }
- }
- // Not found
- return -1;
- }
-}
-
-// Implements list.index(): the position of the item is in the sequence.
-static PyObject* Index(PyContainer* self, PyObject* item) {
- int position = Find(self, item);
- if (position < 0) {
- // Not found
- PyErr_SetNone(PyExc_ValueError);
- return NULL;
- } else {
- return PyInt_FromLong(position);
- }
-}
-// Implements "list.__contains__()": is the object in the sequence.
-static int SeqContains(PyContainer* self, PyObject* item) {
- int position = Find(self, item);
- if (position < 0) {
- return 0;
- } else {
- return 1;
- }
-}
-
-// Implements list.count(): number of occurrences of the item in the sequence.
-// An item can only appear once in a sequence. If it exists, return 1.
-static PyObject* Count(PyContainer* self, PyObject* item) {
- int position = Find(self, item);
- if (position < 0) {
- return PyInt_FromLong(0);
- } else {
- return PyInt_FromLong(1);
- }
-}
-
-static PyObject* Append(PyContainer* self, PyObject* args) {
- if (_CalledFromGeneratedFile(0)) {
- Py_RETURN_NONE;
- }
- PyErr_Format(PyExc_TypeError,
- "'%.200s' object is not a mutable sequence",
- Py_TYPE(self)->tp_name);
- return NULL;
-}
-
-static PyObject* Reversed(PyContainer* self, PyObject* args) {
- return NewContainerIterator(self,
- PyContainerIterator::KIND_ITERVALUE_REVERSED);
-}
-
-static PyMethodDef SeqMethods[] = {
- { "index", (PyCFunction)Index, METH_O, },
- { "count", (PyCFunction)Count, METH_O, },
- { "append", (PyCFunction)Append, METH_O, },
- { "__reversed__", (PyCFunction)Reversed, METH_NOARGS, },
- {NULL}
-};
-
-static PySequenceMethods SeqSequenceMethods = {
- (lenfunc)Length, // sq_length
- 0, // sq_concat
- 0, // sq_repeat
- (ssizeargfunc)GetItem, // sq_item
- 0, // sq_slice
- 0, // sq_ass_item
- 0, // sq_ass_slice
- (objobjproc)SeqContains, // sq_contains
-};
-
-PyTypeObject DescriptorSequence_Type = {
- PyVarObject_HEAD_INIT(&PyType_Type, 0)
- "DescriptorSequence", // tp_name
- sizeof(PyContainer), // tp_basicsize
- 0, // tp_itemsize
- 0, // tp_dealloc
- 0, // tp_print
- 0, // tp_getattr
- 0, // tp_setattr
- 0, // tp_compare
- (reprfunc)ContainerRepr, // tp_repr
- 0, // tp_as_number
- &SeqSequenceMethods, // tp_as_sequence
- 0, // tp_as_mapping
- 0, // tp_hash
- 0, // tp_call
- 0, // tp_str
- 0, // tp_getattro
- 0, // tp_setattro
- 0, // tp_as_buffer
- Py_TPFLAGS_DEFAULT, // tp_flags
- 0, // tp_doc
- 0, // tp_traverse
- 0, // tp_clear
- (richcmpfunc)RichCompare, // tp_richcompare
- 0, // tp_weaklistoffset
- 0, // tp_iter
- 0, // tp_iternext
- SeqMethods, // tp_methods
- 0, // tp_members
- 0, // tp_getset
- 0, // tp_base
- 0, // tp_dict
- 0, // tp_descr_get
- 0, // tp_descr_set
- 0, // tp_dictoffset
- 0, // tp_init
- 0, // tp_alloc
- 0, // tp_new
- 0, // tp_free
-};
-
-static PyObject* NewMappingByName(
- DescriptorContainerDef* container_def, const void* descriptor) {
- PyContainer* self = PyObject_New(PyContainer, &DescriptorMapping_Type);
- if (self == NULL) {
- return NULL;
- }
- self->descriptor = descriptor;
- self->container_def = container_def;
- self->kind = PyContainer::KIND_BYNAME;
- return reinterpret_cast<PyObject*>(self);
-}
-
-static PyObject* NewMappingByCamelcaseName(
- DescriptorContainerDef* container_def, const void* descriptor) {
- PyContainer* self = PyObject_New(PyContainer, &DescriptorMapping_Type);
- if (self == NULL) {
- return NULL;
- }
- self->descriptor = descriptor;
- self->container_def = container_def;
- self->kind = PyContainer::KIND_BYCAMELCASENAME;
- return reinterpret_cast<PyObject*>(self);
-}
-
-static PyObject* NewMappingByNumber(
- DescriptorContainerDef* container_def, const void* descriptor) {
- if (container_def->get_by_number_fn == NULL ||
- container_def->get_item_number_fn == NULL) {
- PyErr_SetNone(PyExc_NotImplementedError);
- return NULL;
- }
- PyContainer* self = PyObject_New(PyContainer, &DescriptorMapping_Type);
- if (self == NULL) {
- return NULL;
- }
- self->descriptor = descriptor;
- self->container_def = container_def;
- self->kind = PyContainer::KIND_BYNUMBER;
- return reinterpret_cast<PyObject*>(self);
-}
-
-static PyObject* NewSequence(
- DescriptorContainerDef* container_def, const void* descriptor) {
- PyContainer* self = PyObject_New(PyContainer, &DescriptorSequence_Type);
- if (self == NULL) {
- return NULL;
- }
- self->descriptor = descriptor;
- self->container_def = container_def;
- self->kind = PyContainer::KIND_SEQUENCE;
- return reinterpret_cast<PyObject*>(self);
-}
-
-// Implement iterators over PyContainers.
-
-static void Iterator_Dealloc(PyContainerIterator* self) {
- Py_CLEAR(self->container);
- Py_TYPE(self)->tp_free(reinterpret_cast<PyObject*>(self));
-}
-
-static PyObject* Iterator_Next(PyContainerIterator* self) {
- int count = self->container->container_def->count_fn(self->container);
- if (self->index >= count) {
- // Return NULL with no exception to indicate the end.
- return NULL;
- }
- int index = self->index;
- self->index += 1;
- switch (self->kind) {
- case PyContainerIterator::KIND_ITERKEY:
- return _NewKey_ByIndex(self->container, index);
- case PyContainerIterator::KIND_ITERVALUE:
- return _NewObj_ByIndex(self->container, index);
- case PyContainerIterator::KIND_ITERVALUE_REVERSED:
- return _NewObj_ByIndex(self->container, count - index - 1);
- case PyContainerIterator::KIND_ITERITEM:
- {
- PyObject* obj = PyTuple_New(2);
- if (obj == NULL) {
- return NULL;
- }
- PyObject* key = _NewKey_ByIndex(self->container, index);
- if (key == NULL) {
- Py_DECREF(obj);
- return NULL;
- }
- PyTuple_SET_ITEM(obj, 0, key);
- PyObject* value = _NewObj_ByIndex(self->container, index);
- if (value == NULL) {
- Py_DECREF(obj);
- return NULL;
- }
- PyTuple_SET_ITEM(obj, 1, value);
- return obj;
- }
- default:
- PyErr_SetNone(PyExc_NotImplementedError);
- return NULL;
- }
-}
-
-static PyTypeObject ContainerIterator_Type = {
- PyVarObject_HEAD_INIT(&PyType_Type, 0)
- "DescriptorContainerIterator", // tp_name
- sizeof(PyContainerIterator), // tp_basicsize
- 0, // tp_itemsize
- (destructor)Iterator_Dealloc, // tp_dealloc
- 0, // tp_print
- 0, // tp_getattr
- 0, // tp_setattr
- 0, // tp_compare
- 0, // tp_repr
- 0, // tp_as_number
- 0, // tp_as_sequence
- 0, // tp_as_mapping
- 0, // tp_hash
- 0, // tp_call
- 0, // tp_str
- 0, // tp_getattro
- 0, // tp_setattro
- 0, // tp_as_buffer
- Py_TPFLAGS_DEFAULT, // tp_flags
- 0, // tp_doc
- 0, // tp_traverse
- 0, // tp_clear
- 0, // tp_richcompare
- 0, // tp_weaklistoffset
- PyObject_SelfIter, // tp_iter
- (iternextfunc)Iterator_Next, // tp_iternext
- 0, // tp_methods
- 0, // tp_members
- 0, // tp_getset
- 0, // tp_base
- 0, // tp_dict
- 0, // tp_descr_get
- 0, // tp_descr_set
- 0, // tp_dictoffset
- 0, // tp_init
- 0, // tp_alloc
- 0, // tp_new
- 0, // tp_free
-};
-
-static PyObject* NewContainerIterator(PyContainer* container,
- PyContainerIterator::IterKind kind) {
- PyContainerIterator* self = PyObject_New(PyContainerIterator,
- &ContainerIterator_Type);
- if (self == NULL) {
- return NULL;
- }
- Py_INCREF(container);
- self->container = container;
- self->kind = kind;
- self->index = 0;
-
- return reinterpret_cast<PyObject*>(self);
-}
-
-} // namespace descriptor
-
-// Now define the real collections!
-
-namespace message_descriptor {
-
-typedef const Descriptor* ParentDescriptor;
-
-static ParentDescriptor GetDescriptor(PyContainer* self) {
- return reinterpret_cast<ParentDescriptor>(self->descriptor);
-}
-
-namespace fields {
-
-typedef const FieldDescriptor* ItemDescriptor;
-
-static int Count(PyContainer* self) {
- return GetDescriptor(self)->field_count();
-}
-
-static ItemDescriptor GetByName(PyContainer* self, const string& name) {
- return GetDescriptor(self)->FindFieldByName(name);
-}
-
-static ItemDescriptor GetByCamelcaseName(PyContainer* self,
- const string& name) {
- return GetDescriptor(self)->FindFieldByCamelcaseName(name);
-}
-
-static ItemDescriptor GetByNumber(PyContainer* self, int number) {
- return GetDescriptor(self)->FindFieldByNumber(number);
-}
-
-static ItemDescriptor GetByIndex(PyContainer* self, int index) {
- return GetDescriptor(self)->field(index);
-}
-
-static PyObject* NewObjectFromItem(ItemDescriptor item) {
- return PyFieldDescriptor_FromDescriptor(item);
-}
-
-static const string& GetItemName(ItemDescriptor item) {
- return item->name();
-}
-
-static const string& GetItemCamelcaseName(ItemDescriptor item) {
- return item->camelcase_name();
-}
-
-static int GetItemNumber(ItemDescriptor item) {
- return item->number();
-}
-
-static int GetItemIndex(ItemDescriptor item) {
- return item->index();
-}
-
-static DescriptorContainerDef ContainerDef = {
- "MessageFields",
- (CountMethod)Count,
- (GetByIndexMethod)GetByIndex,
- (GetByNameMethod)GetByName,
- (GetByCamelcaseNameMethod)GetByCamelcaseName,
- (GetByNumberMethod)GetByNumber,
- (NewObjectFromItemMethod)NewObjectFromItem,
- (GetItemNameMethod)GetItemName,
- (GetItemCamelcaseNameMethod)GetItemCamelcaseName,
- (GetItemNumberMethod)GetItemNumber,
- (GetItemIndexMethod)GetItemIndex,
-};
-
-} // namespace fields
-
-PyObject* NewMessageFieldsByName(ParentDescriptor descriptor) {
- return descriptor::NewMappingByName(&fields::ContainerDef, descriptor);
-}
-
-PyObject* NewMessageFieldsByCamelcaseName(ParentDescriptor descriptor) {
- return descriptor::NewMappingByCamelcaseName(&fields::ContainerDef,
- descriptor);
-}
-
-PyObject* NewMessageFieldsByNumber(ParentDescriptor descriptor) {
- return descriptor::NewMappingByNumber(&fields::ContainerDef, descriptor);
-}
-
-PyObject* NewMessageFieldsSeq(ParentDescriptor descriptor) {
- return descriptor::NewSequence(&fields::ContainerDef, descriptor);
-}
-
-namespace nested_types {
-
-typedef const Descriptor* ItemDescriptor;
-
-static int Count(PyContainer* self) {
- return GetDescriptor(self)->nested_type_count();
-}
-
-static ItemDescriptor GetByName(PyContainer* self, const string& name) {
- return GetDescriptor(self)->FindNestedTypeByName(name);
-}
-
-static ItemDescriptor GetByIndex(PyContainer* self, int index) {
- return GetDescriptor(self)->nested_type(index);
-}
-
-static PyObject* NewObjectFromItem(ItemDescriptor item) {
- return PyMessageDescriptor_FromDescriptor(item);
-}
-
-static const string& GetItemName(ItemDescriptor item) {
- return item->name();
-}
-
-static int GetItemIndex(ItemDescriptor item) {
- return item->index();
-}
-
-static DescriptorContainerDef ContainerDef = {
- "MessageNestedTypes",
- (CountMethod)Count,
- (GetByIndexMethod)GetByIndex,
- (GetByNameMethod)GetByName,
- (GetByCamelcaseNameMethod)NULL,
- (GetByNumberMethod)NULL,
- (NewObjectFromItemMethod)NewObjectFromItem,
- (GetItemNameMethod)GetItemName,
- (GetItemCamelcaseNameMethod)NULL,
- (GetItemNumberMethod)NULL,
- (GetItemIndexMethod)GetItemIndex,
-};
-
-} // namespace nested_types
-
-PyObject* NewMessageNestedTypesSeq(ParentDescriptor descriptor) {
- return descriptor::NewSequence(&nested_types::ContainerDef, descriptor);
-}
-
-PyObject* NewMessageNestedTypesByName(ParentDescriptor descriptor) {
- return descriptor::NewMappingByName(&nested_types::ContainerDef, descriptor);
-}
-
-namespace enums {
-
-typedef const EnumDescriptor* ItemDescriptor;
-
-static int Count(PyContainer* self) {
- return GetDescriptor(self)->enum_type_count();
-}
-
-static ItemDescriptor GetByName(PyContainer* self, const string& name) {
- return GetDescriptor(self)->FindEnumTypeByName(name);
-}
-
-static ItemDescriptor GetByIndex(PyContainer* self, int index) {
- return GetDescriptor(self)->enum_type(index);
-}
-
-static PyObject* NewObjectFromItem(ItemDescriptor item) {
- return PyEnumDescriptor_FromDescriptor(item);
-}
-
-static const string& GetItemName(ItemDescriptor item) {
- return item->name();
-}
-
-static int GetItemIndex(ItemDescriptor item) {
- return item->index();
-}
-
-static DescriptorContainerDef ContainerDef = {
- "MessageNestedEnums",
- (CountMethod)Count,
- (GetByIndexMethod)GetByIndex,
- (GetByNameMethod)GetByName,
- (GetByCamelcaseNameMethod)NULL,
- (GetByNumberMethod)NULL,
- (NewObjectFromItemMethod)NewObjectFromItem,
- (GetItemNameMethod)GetItemName,
- (GetItemCamelcaseNameMethod)NULL,
- (GetItemNumberMethod)NULL,
- (GetItemIndexMethod)GetItemIndex,
-};
-
-} // namespace enums
-
-PyObject* NewMessageEnumsByName(ParentDescriptor descriptor) {
- return descriptor::NewMappingByName(&enums::ContainerDef, descriptor);
-}
-
-PyObject* NewMessageEnumsSeq(ParentDescriptor descriptor) {
- return descriptor::NewSequence(&enums::ContainerDef, descriptor);
-}
-
-namespace enumvalues {
-
-// This is the "enum_values_by_name" mapping, which collects values from all
-// enum types in a message.
-//
-// Note that the behavior of the C++ descriptor is different: it will search and
-// return the first value that matches the name, whereas the Python
-// implementation retrieves the last one.
-
-typedef const EnumValueDescriptor* ItemDescriptor;
-
-static int Count(PyContainer* self) {
- int count = 0;
- for (int i = 0; i < GetDescriptor(self)->enum_type_count(); ++i) {
- count += GetDescriptor(self)->enum_type(i)->value_count();
- }
- return count;
-}
-
-static ItemDescriptor GetByName(PyContainer* self, const string& name) {
- return GetDescriptor(self)->FindEnumValueByName(name);
-}
-
-static ItemDescriptor GetByIndex(PyContainer* self, int index) {
- // This is not optimal, but the number of enums *types* in a given message
- // is small. This function is only used when iterating over the mapping.
- const EnumDescriptor* enum_type = NULL;
- int enum_type_count = GetDescriptor(self)->enum_type_count();
- for (int i = 0; i < enum_type_count; ++i) {
- enum_type = GetDescriptor(self)->enum_type(i);
- int enum_value_count = enum_type->value_count();
- if (index < enum_value_count) {
- // Found it!
- break;
- }
- index -= enum_value_count;
- }
- // The next statement cannot overflow, because this function is only called by
- // internal iterators which ensure that 0 <= index < Count().
- return enum_type->value(index);
-}
-
-static PyObject* NewObjectFromItem(ItemDescriptor item) {
- return PyEnumValueDescriptor_FromDescriptor(item);
-}
-
-static const string& GetItemName(ItemDescriptor item) {
- return item->name();
-}
-
-static DescriptorContainerDef ContainerDef = {
- "MessageEnumValues",
- (CountMethod)Count,
- (GetByIndexMethod)GetByIndex,
- (GetByNameMethod)GetByName,
- (GetByCamelcaseNameMethod)NULL,
- (GetByNumberMethod)NULL,
- (NewObjectFromItemMethod)NewObjectFromItem,
- (GetItemNameMethod)GetItemName,
- (GetItemCamelcaseNameMethod)NULL,
- (GetItemNumberMethod)NULL,
- (GetItemIndexMethod)NULL,
-};
-
-} // namespace enumvalues
-
-PyObject* NewMessageEnumValuesByName(ParentDescriptor descriptor) {
- return descriptor::NewMappingByName(&enumvalues::ContainerDef, descriptor);
-}
-
-namespace extensions {
-
-typedef const FieldDescriptor* ItemDescriptor;
-
-static int Count(PyContainer* self) {
- return GetDescriptor(self)->extension_count();
-}
-
-static ItemDescriptor GetByName(PyContainer* self, const string& name) {
- return GetDescriptor(self)->FindExtensionByName(name);
-}
-
-static ItemDescriptor GetByIndex(PyContainer* self, int index) {
- return GetDescriptor(self)->extension(index);
-}
-
-static PyObject* NewObjectFromItem(ItemDescriptor item) {
- return PyFieldDescriptor_FromDescriptor(item);
-}
-
-static const string& GetItemName(ItemDescriptor item) {
- return item->name();
-}
-
-static int GetItemIndex(ItemDescriptor item) {
- return item->index();
-}
-
-static DescriptorContainerDef ContainerDef = {
- "MessageExtensions",
- (CountMethod)Count,
- (GetByIndexMethod)GetByIndex,
- (GetByNameMethod)GetByName,
- (GetByCamelcaseNameMethod)NULL,
- (GetByNumberMethod)NULL,
- (NewObjectFromItemMethod)NewObjectFromItem,
- (GetItemNameMethod)GetItemName,
- (GetItemCamelcaseNameMethod)NULL,
- (GetItemNumberMethod)NULL,
- (GetItemIndexMethod)GetItemIndex,
-};
-
-} // namespace extensions
-
-PyObject* NewMessageExtensionsByName(ParentDescriptor descriptor) {
- return descriptor::NewMappingByName(&extensions::ContainerDef, descriptor);
-}
-
-PyObject* NewMessageExtensionsSeq(ParentDescriptor descriptor) {
- return descriptor::NewSequence(&extensions::ContainerDef, descriptor);
-}
-
-namespace oneofs {
-
-typedef const OneofDescriptor* ItemDescriptor;
-
-static int Count(PyContainer* self) {
- return GetDescriptor(self)->oneof_decl_count();
-}
-
-static ItemDescriptor GetByName(PyContainer* self, const string& name) {
- return GetDescriptor(self)->FindOneofByName(name);
-}
-
-static ItemDescriptor GetByIndex(PyContainer* self, int index) {
- return GetDescriptor(self)->oneof_decl(index);
-}
-
-static PyObject* NewObjectFromItem(ItemDescriptor item) {
- return PyOneofDescriptor_FromDescriptor(item);
-}
-
-static const string& GetItemName(ItemDescriptor item) {
- return item->name();
-}
-
-static int GetItemIndex(ItemDescriptor item) {
- return item->index();
-}
-
-static DescriptorContainerDef ContainerDef = {
- "MessageOneofs",
- (CountMethod)Count,
- (GetByIndexMethod)GetByIndex,
- (GetByNameMethod)GetByName,
- (GetByCamelcaseNameMethod)NULL,
- (GetByNumberMethod)NULL,
- (NewObjectFromItemMethod)NewObjectFromItem,
- (GetItemNameMethod)GetItemName,
- (GetItemCamelcaseNameMethod)NULL,
- (GetItemNumberMethod)NULL,
- (GetItemIndexMethod)GetItemIndex,
-};
-
-} // namespace oneofs
-
-PyObject* NewMessageOneofsByName(ParentDescriptor descriptor) {
- return descriptor::NewMappingByName(&oneofs::ContainerDef, descriptor);
-}
-
-PyObject* NewMessageOneofsSeq(ParentDescriptor descriptor) {
- return descriptor::NewSequence(&oneofs::ContainerDef, descriptor);
-}
-
-} // namespace message_descriptor
-
-namespace enum_descriptor {
-
-typedef const EnumDescriptor* ParentDescriptor;
-
-static ParentDescriptor GetDescriptor(PyContainer* self) {
- return reinterpret_cast<ParentDescriptor>(self->descriptor);
-}
-
-namespace enumvalues {
-
-typedef const EnumValueDescriptor* ItemDescriptor;
-
-static int Count(PyContainer* self) {
- return GetDescriptor(self)->value_count();
-}
-
-static ItemDescriptor GetByIndex(PyContainer* self, int index) {
- return GetDescriptor(self)->value(index);
-}
-
-static ItemDescriptor GetByName(PyContainer* self, const string& name) {
- return GetDescriptor(self)->FindValueByName(name);
-}
-
-static ItemDescriptor GetByNumber(PyContainer* self, int number) {
- return GetDescriptor(self)->FindValueByNumber(number);
-}
-
-static PyObject* NewObjectFromItem(ItemDescriptor item) {
- return PyEnumValueDescriptor_FromDescriptor(item);
-}
-
-static const string& GetItemName(ItemDescriptor item) {
- return item->name();
-}
-
-static int GetItemNumber(ItemDescriptor item) {
- return item->number();
-}
-
-static int GetItemIndex(ItemDescriptor item) {
- return item->index();
-}
-
-static DescriptorContainerDef ContainerDef = {
- "EnumValues",
- (CountMethod)Count,
- (GetByIndexMethod)GetByIndex,
- (GetByNameMethod)GetByName,
- (GetByCamelcaseNameMethod)NULL,
- (GetByNumberMethod)GetByNumber,
- (NewObjectFromItemMethod)NewObjectFromItem,
- (GetItemNameMethod)GetItemName,
- (GetItemCamelcaseNameMethod)NULL,
- (GetItemNumberMethod)GetItemNumber,
- (GetItemIndexMethod)GetItemIndex,
-};
-
-} // namespace enumvalues
-
-PyObject* NewEnumValuesByName(ParentDescriptor descriptor) {
- return descriptor::NewMappingByName(&enumvalues::ContainerDef, descriptor);
-}
-
-PyObject* NewEnumValuesByNumber(ParentDescriptor descriptor) {
- return descriptor::NewMappingByNumber(&enumvalues::ContainerDef, descriptor);
-}
-
-PyObject* NewEnumValuesSeq(ParentDescriptor descriptor) {
- return descriptor::NewSequence(&enumvalues::ContainerDef, descriptor);
-}
-
-} // namespace enum_descriptor
-
-namespace oneof_descriptor {
-
-typedef const OneofDescriptor* ParentDescriptor;
-
-static ParentDescriptor GetDescriptor(PyContainer* self) {
- return reinterpret_cast<ParentDescriptor>(self->descriptor);
-}
-
-namespace fields {
-
-typedef const FieldDescriptor* ItemDescriptor;
-
-static int Count(PyContainer* self) {
- return GetDescriptor(self)->field_count();
-}
-
-static ItemDescriptor GetByIndex(PyContainer* self, int index) {
- return GetDescriptor(self)->field(index);
-}
-
-static PyObject* NewObjectFromItem(ItemDescriptor item) {
- return PyFieldDescriptor_FromDescriptor(item);
-}
-
-static int GetItemIndex(ItemDescriptor item) {
- return item->index_in_oneof();
-}
-
-static DescriptorContainerDef ContainerDef = {
- "OneofFields",
- (CountMethod)Count,
- (GetByIndexMethod)GetByIndex,
- (GetByNameMethod)NULL,
- (GetByCamelcaseNameMethod)NULL,
- (GetByNumberMethod)NULL,
- (NewObjectFromItemMethod)NewObjectFromItem,
- (GetItemNameMethod)NULL,
- (GetItemCamelcaseNameMethod)NULL,
- (GetItemNumberMethod)NULL,
- (GetItemIndexMethod)GetItemIndex,
-};
-
-} // namespace fields
-
-PyObject* NewOneofFieldsSeq(ParentDescriptor descriptor) {
- return descriptor::NewSequence(&fields::ContainerDef, descriptor);
-}
-
-} // namespace oneof_descriptor
-
-namespace file_descriptor {
-
-typedef const FileDescriptor* ParentDescriptor;
-
-static ParentDescriptor GetDescriptor(PyContainer* self) {
- return reinterpret_cast<ParentDescriptor>(self->descriptor);
-}
-
-namespace messages {
-
-typedef const Descriptor* ItemDescriptor;
-
-static int Count(PyContainer* self) {
- return GetDescriptor(self)->message_type_count();
-}
-
-static ItemDescriptor GetByName(PyContainer* self, const string& name) {
- return GetDescriptor(self)->FindMessageTypeByName(name);
-}
-
-static ItemDescriptor GetByIndex(PyContainer* self, int index) {
- return GetDescriptor(self)->message_type(index);
-}
-
-static PyObject* NewObjectFromItem(ItemDescriptor item) {
- return PyMessageDescriptor_FromDescriptor(item);
-}
-
-static const string& GetItemName(ItemDescriptor item) {
- return item->name();
-}
-
-static int GetItemIndex(ItemDescriptor item) {
- return item->index();
-}
-
-static DescriptorContainerDef ContainerDef = {
- "FileMessages",
- (CountMethod)Count,
- (GetByIndexMethod)GetByIndex,
- (GetByNameMethod)GetByName,
- (GetByCamelcaseNameMethod)NULL,
- (GetByNumberMethod)NULL,
- (NewObjectFromItemMethod)NewObjectFromItem,
- (GetItemNameMethod)GetItemName,
- (GetItemCamelcaseNameMethod)NULL,
- (GetItemNumberMethod)NULL,
- (GetItemIndexMethod)GetItemIndex,
-};
-
-} // namespace messages
-
-PyObject* NewFileMessageTypesByName(const FileDescriptor* descriptor) {
- return descriptor::NewMappingByName(&messages::ContainerDef, descriptor);
-}
-
-namespace enums {
-
-typedef const EnumDescriptor* ItemDescriptor;
-
-static int Count(PyContainer* self) {
- return GetDescriptor(self)->enum_type_count();
-}
-
-static ItemDescriptor GetByName(PyContainer* self, const string& name) {
- return GetDescriptor(self)->FindEnumTypeByName(name);
-}
-
-static ItemDescriptor GetByIndex(PyContainer* self, int index) {
- return GetDescriptor(self)->enum_type(index);
-}
-
-static PyObject* NewObjectFromItem(ItemDescriptor item) {
- return PyEnumDescriptor_FromDescriptor(item);
-}
-
-static const string& GetItemName(ItemDescriptor item) {
- return item->name();
-}
-
-static int GetItemIndex(ItemDescriptor item) {
- return item->index();
-}
-
-static DescriptorContainerDef ContainerDef = {
- "FileEnums",
- (CountMethod)Count,
- (GetByIndexMethod)GetByIndex,
- (GetByNameMethod)GetByName,
- (GetByCamelcaseNameMethod)NULL,
- (GetByNumberMethod)NULL,
- (NewObjectFromItemMethod)NewObjectFromItem,
- (GetItemNameMethod)GetItemName,
- (GetItemCamelcaseNameMethod)NULL,
- (GetItemNumberMethod)NULL,
- (GetItemIndexMethod)GetItemIndex,
-};
-
-} // namespace enums
-
-PyObject* NewFileEnumTypesByName(const FileDescriptor* descriptor) {
- return descriptor::NewMappingByName(&enums::ContainerDef, descriptor);
-}
-
-namespace extensions {
-
-typedef const FieldDescriptor* ItemDescriptor;
-
-static int Count(PyContainer* self) {
- return GetDescriptor(self)->extension_count();
-}
-
-static ItemDescriptor GetByName(PyContainer* self, const string& name) {
- return GetDescriptor(self)->FindExtensionByName(name);
-}
-
-static ItemDescriptor GetByIndex(PyContainer* self, int index) {
- return GetDescriptor(self)->extension(index);
-}
-
-static PyObject* NewObjectFromItem(ItemDescriptor item) {
- return PyFieldDescriptor_FromDescriptor(item);
-}
-
-static const string& GetItemName(ItemDescriptor item) {
- return item->name();
-}
-
-static int GetItemIndex(ItemDescriptor item) {
- return item->index();
-}
-
-static DescriptorContainerDef ContainerDef = {
- "FileExtensions",
- (CountMethod)Count,
- (GetByIndexMethod)GetByIndex,
- (GetByNameMethod)GetByName,
- (GetByCamelcaseNameMethod)NULL,
- (GetByNumberMethod)NULL,
- (NewObjectFromItemMethod)NewObjectFromItem,
- (GetItemNameMethod)GetItemName,
- (GetItemCamelcaseNameMethod)NULL,
- (GetItemNumberMethod)NULL,
- (GetItemIndexMethod)GetItemIndex,
-};
-
-} // namespace extensions
-
-PyObject* NewFileExtensionsByName(const FileDescriptor* descriptor) {
- return descriptor::NewMappingByName(&extensions::ContainerDef, descriptor);
-}
-
-namespace dependencies {
-
-typedef const FileDescriptor* ItemDescriptor;
-
-static int Count(PyContainer* self) {
- return GetDescriptor(self)->dependency_count();
-}
-
-static ItemDescriptor GetByIndex(PyContainer* self, int index) {
- return GetDescriptor(self)->dependency(index);
-}
-
-static PyObject* NewObjectFromItem(ItemDescriptor item) {
- return PyFileDescriptor_FromDescriptor(item);
-}
-
-static DescriptorContainerDef ContainerDef = {
- "FileDependencies",
- (CountMethod)Count,
- (GetByIndexMethod)GetByIndex,
- (GetByNameMethod)NULL,
- (GetByCamelcaseNameMethod)NULL,
- (GetByNumberMethod)NULL,
- (NewObjectFromItemMethod)NewObjectFromItem,
- (GetItemNameMethod)NULL,
- (GetItemCamelcaseNameMethod)NULL,
- (GetItemNumberMethod)NULL,
- (GetItemIndexMethod)NULL,
-};
-
-} // namespace dependencies
-
-PyObject* NewFileDependencies(const FileDescriptor* descriptor) {
- return descriptor::NewSequence(&dependencies::ContainerDef, descriptor);
-}
-
-namespace public_dependencies {
-
-typedef const FileDescriptor* ItemDescriptor;
-
-static int Count(PyContainer* self) {
- return GetDescriptor(self)->public_dependency_count();
-}
-
-static ItemDescriptor GetByIndex(PyContainer* self, int index) {
- return GetDescriptor(self)->public_dependency(index);
-}
-
-static PyObject* NewObjectFromItem(ItemDescriptor item) {
- return PyFileDescriptor_FromDescriptor(item);
-}
-
-static DescriptorContainerDef ContainerDef = {
- "FilePublicDependencies",
- (CountMethod)Count,
- (GetByIndexMethod)GetByIndex,
- (GetByNameMethod)NULL,
- (GetByCamelcaseNameMethod)NULL,
- (GetByNumberMethod)NULL,
- (NewObjectFromItemMethod)NewObjectFromItem,
- (GetItemNameMethod)NULL,
- (GetItemCamelcaseNameMethod)NULL,
- (GetItemNumberMethod)NULL,
- (GetItemIndexMethod)NULL,
-};
-
-} // namespace public_dependencies
-
-PyObject* NewFilePublicDependencies(const FileDescriptor* descriptor) {
- return descriptor::NewSequence(&public_dependencies::ContainerDef,
- descriptor);
-}
-
-} // namespace file_descriptor
-
-
-// Register all implementations
-
-bool InitDescriptorMappingTypes() {
- if (PyType_Ready(&descriptor::DescriptorMapping_Type) < 0)
- return false;
- if (PyType_Ready(&descriptor::DescriptorSequence_Type) < 0)
- return false;
- if (PyType_Ready(&descriptor::ContainerIterator_Type) < 0)
- return false;
- return true;
-}
-
-} // namespace python
-} // namespace protobuf
-} // namespace google
diff --git a/third_party/protobuf/python/google/protobuf/pyext/descriptor_containers.h b/third_party/protobuf/python/google/protobuf/pyext/descriptor_containers.h
deleted file mode 100644
index ce40747d57..0000000000
--- a/third_party/protobuf/python/google/protobuf/pyext/descriptor_containers.h
+++ /dev/null
@@ -1,101 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-#ifndef GOOGLE_PROTOBUF_PYTHON_CPP_DESCRIPTOR_CONTAINERS_H__
-#define GOOGLE_PROTOBUF_PYTHON_CPP_DESCRIPTOR_CONTAINERS_H__
-
-// Mappings and Sequences of descriptors.
-// They implement containers like fields_by_name, EnumDescriptor.values...
-// See descriptor_containers.cc for more description.
-#include <Python.h>
-
-namespace google {
-namespace protobuf {
-
-class Descriptor;
-class FileDescriptor;
-class EnumDescriptor;
-class OneofDescriptor;
-
-namespace python {
-
-// Initialize the various types and objects.
-bool InitDescriptorMappingTypes();
-
-// Each function below returns a Mapping, or a Sequence of descriptors.
-// They all return a new reference.
-
-namespace message_descriptor {
-PyObject* NewMessageFieldsByName(const Descriptor* descriptor);
-PyObject* NewMessageFieldsByCamelcaseName(const Descriptor* descriptor);
-PyObject* NewMessageFieldsByNumber(const Descriptor* descriptor);
-PyObject* NewMessageFieldsSeq(const Descriptor* descriptor);
-
-PyObject* NewMessageNestedTypesSeq(const Descriptor* descriptor);
-PyObject* NewMessageNestedTypesByName(const Descriptor* descriptor);
-
-PyObject* NewMessageEnumsByName(const Descriptor* descriptor);
-PyObject* NewMessageEnumsSeq(const Descriptor* descriptor);
-PyObject* NewMessageEnumValuesByName(const Descriptor* descriptor);
-
-PyObject* NewMessageExtensionsByName(const Descriptor* descriptor);
-PyObject* NewMessageExtensionsSeq(const Descriptor* descriptor);
-
-PyObject* NewMessageOneofsByName(const Descriptor* descriptor);
-PyObject* NewMessageOneofsSeq(const Descriptor* descriptor);
-} // namespace message_descriptor
-
-namespace enum_descriptor {
-PyObject* NewEnumValuesByName(const EnumDescriptor* descriptor);
-PyObject* NewEnumValuesByNumber(const EnumDescriptor* descriptor);
-PyObject* NewEnumValuesSeq(const EnumDescriptor* descriptor);
-} // namespace enum_descriptor
-
-namespace oneof_descriptor {
-PyObject* NewOneofFieldsSeq(const OneofDescriptor* descriptor);
-} // namespace oneof_descriptor
-
-namespace file_descriptor {
-PyObject* NewFileMessageTypesByName(const FileDescriptor* descriptor);
-
-PyObject* NewFileEnumTypesByName(const FileDescriptor* descriptor);
-
-PyObject* NewFileExtensionsByName(const FileDescriptor* descriptor);
-
-PyObject* NewFileDependencies(const FileDescriptor* descriptor);
-PyObject* NewFilePublicDependencies(const FileDescriptor* descriptor);
-} // namespace file_descriptor
-
-
-} // namespace python
-} // namespace protobuf
-
-} // namespace google
-#endif // GOOGLE_PROTOBUF_PYTHON_CPP_DESCRIPTOR_CONTAINERS_H__
diff --git a/third_party/protobuf/python/google/protobuf/pyext/descriptor_database.cc b/third_party/protobuf/python/google/protobuf/pyext/descriptor_database.cc
deleted file mode 100644
index 514722b4c3..0000000000
--- a/third_party/protobuf/python/google/protobuf/pyext/descriptor_database.cc
+++ /dev/null
@@ -1,145 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// This file defines a C++ DescriptorDatabase, which wraps a Python Database
-// and delegate all its operations to Python methods.
-
-#include <google/protobuf/pyext/descriptor_database.h>
-
-#include <google/protobuf/stubs/logging.h>
-#include <google/protobuf/stubs/common.h>
-#include <google/protobuf/descriptor.pb.h>
-#include <google/protobuf/pyext/message.h>
-#include <google/protobuf/pyext/scoped_pyobject_ptr.h>
-
-namespace google {
-namespace protobuf {
-namespace python {
-
-PyDescriptorDatabase::PyDescriptorDatabase(PyObject* py_database)
- : py_database_(py_database) {
- Py_INCREF(py_database_);
-}
-
-PyDescriptorDatabase::~PyDescriptorDatabase() { Py_DECREF(py_database_); }
-
-// Convert a Python object to a FileDescriptorProto pointer.
-// Handles all kinds of Python errors, which are simply logged.
-static bool GetFileDescriptorProto(PyObject* py_descriptor,
- FileDescriptorProto* output) {
- if (py_descriptor == NULL) {
- if (PyErr_ExceptionMatches(PyExc_KeyError)) {
- // Expected error: item was simply not found.
- PyErr_Clear();
- } else {
- GOOGLE_LOG(ERROR) << "DescriptorDatabase method raised an error";
- PyErr_Print();
- }
- return false;
- }
- const Descriptor* filedescriptor_descriptor =
- FileDescriptorProto::default_instance().GetDescriptor();
- CMessage* message = reinterpret_cast<CMessage*>(py_descriptor);
- if (PyObject_TypeCheck(py_descriptor, &CMessage_Type) &&
- message->message->GetDescriptor() == filedescriptor_descriptor) {
- // Fast path: Just use the pointer.
- FileDescriptorProto* file_proto =
- static_cast<FileDescriptorProto*>(message->message);
- *output = *file_proto;
- return true;
- } else {
- // Slow path: serialize the message. This allows to use databases which
- // use a different implementation of FileDescriptorProto.
- ScopedPyObjectPtr serialized_pb(
- PyObject_CallMethod(py_descriptor, "SerializeToString", NULL));
- if (serialized_pb == NULL) {
- GOOGLE_LOG(ERROR)
- << "DescriptorDatabase method did not return a FileDescriptorProto";
- PyErr_Print();
- return false;
- }
- char* str;
- Py_ssize_t len;
- if (PyBytes_AsStringAndSize(serialized_pb.get(), &str, &len) < 0) {
- GOOGLE_LOG(ERROR)
- << "DescriptorDatabase method did not return a FileDescriptorProto";
- PyErr_Print();
- return false;
- }
- FileDescriptorProto file_proto;
- if (!file_proto.ParseFromArray(str, len)) {
- GOOGLE_LOG(ERROR)
- << "DescriptorDatabase method did not return a FileDescriptorProto";
- return false;
- }
- *output = file_proto;
- return true;
- }
-}
-
-// Find a file by file name.
-bool PyDescriptorDatabase::FindFileByName(const string& filename,
- FileDescriptorProto* output) {
- ScopedPyObjectPtr py_descriptor(PyObject_CallMethod(
- py_database_, "FindFileByName", "s#", filename.c_str(), filename.size()));
- return GetFileDescriptorProto(py_descriptor.get(), output);
-}
-
-// Find the file that declares the given fully-qualified symbol name.
-bool PyDescriptorDatabase::FindFileContainingSymbol(
- const string& symbol_name, FileDescriptorProto* output) {
- ScopedPyObjectPtr py_descriptor(
- PyObject_CallMethod(py_database_, "FindFileContainingSymbol", "s#",
- symbol_name.c_str(), symbol_name.size()));
- return GetFileDescriptorProto(py_descriptor.get(), output);
-}
-
-// Find the file which defines an extension extending the given message type
-// with the given field number.
-// Python DescriptorDatabases are not required to implement this method.
-bool PyDescriptorDatabase::FindFileContainingExtension(
- const string& containing_type, int field_number,
- FileDescriptorProto* output) {
- ScopedPyObjectPtr py_method(
- PyObject_GetAttrString(py_database_, "FindFileContainingExtension"));
- if (py_method == NULL) {
- // This method is not implemented, returns without error.
- PyErr_Clear();
- return false;
- }
- ScopedPyObjectPtr py_descriptor(
- PyObject_CallFunction(py_method.get(), "s#i", containing_type.c_str(),
- containing_type.size(), field_number));
- return GetFileDescriptorProto(py_descriptor.get(), output);
-}
-
-} // namespace python
-} // namespace protobuf
-} // namespace google
diff --git a/third_party/protobuf/python/google/protobuf/pyext/descriptor_database.h b/third_party/protobuf/python/google/protobuf/pyext/descriptor_database.h
deleted file mode 100644
index fc71c4bcb0..0000000000
--- a/third_party/protobuf/python/google/protobuf/pyext/descriptor_database.h
+++ /dev/null
@@ -1,75 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-#ifndef GOOGLE_PROTOBUF_PYTHON_CPP_DESCRIPTOR_DATABASE_H__
-#define GOOGLE_PROTOBUF_PYTHON_CPP_DESCRIPTOR_DATABASE_H__
-
-#include <Python.h>
-
-#include <google/protobuf/descriptor_database.h>
-
-namespace google {
-namespace protobuf {
-namespace python {
-
-class PyDescriptorDatabase : public DescriptorDatabase {
- public:
- explicit PyDescriptorDatabase(PyObject* py_database);
- ~PyDescriptorDatabase();
-
- // Implement the abstract interface. All these functions fill the output
- // with a copy of FileDescriptorProto.
-
- // Find a file by file name.
- bool FindFileByName(const string& filename,
- FileDescriptorProto* output);
-
- // Find the file that declares the given fully-qualified symbol name.
- bool FindFileContainingSymbol(const string& symbol_name,
- FileDescriptorProto* output);
-
- // Find the file which defines an extension extending the given message type
- // with the given field number.
- // Containing_type must be a fully-qualified type name.
- // Python objects are not required to implement this method.
- bool FindFileContainingExtension(const string& containing_type,
- int field_number,
- FileDescriptorProto* output);
-
- private:
- // The python object that implements the database. The reference is owned.
- PyObject* py_database_;
-};
-
-} // namespace python
-} // namespace protobuf
-
-} // namespace google
-#endif // GOOGLE_PROTOBUF_PYTHON_CPP_DESCRIPTOR_DATABASE_H__
diff --git a/third_party/protobuf/python/google/protobuf/pyext/descriptor_pool.cc b/third_party/protobuf/python/google/protobuf/pyext/descriptor_pool.cc
deleted file mode 100644
index 0bc76bc90a..0000000000
--- a/third_party/protobuf/python/google/protobuf/pyext/descriptor_pool.cc
+++ /dev/null
@@ -1,593 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Implements the DescriptorPool, which collects all descriptors.
-
-#include <Python.h>
-
-#include <google/protobuf/descriptor.pb.h>
-#include <google/protobuf/dynamic_message.h>
-#include <google/protobuf/pyext/descriptor.h>
-#include <google/protobuf/pyext/descriptor_database.h>
-#include <google/protobuf/pyext/descriptor_pool.h>
-#include <google/protobuf/pyext/message.h>
-#include <google/protobuf/pyext/scoped_pyobject_ptr.h>
-
-#if PY_MAJOR_VERSION >= 3
- #define PyString_FromStringAndSize PyUnicode_FromStringAndSize
- #if PY_VERSION_HEX < 0x03030000
- #error "Python 3.0 - 3.2 are not supported."
- #endif
- #define PyString_AsStringAndSize(ob, charpp, sizep) \
- (PyUnicode_Check(ob)? \
- ((*(charpp) = PyUnicode_AsUTF8AndSize(ob, (sizep))) == NULL? -1: 0): \
- PyBytes_AsStringAndSize(ob, (charpp), (sizep)))
-#endif
-
-namespace google {
-namespace protobuf {
-namespace python {
-
-// A map to cache Python Pools per C++ pointer.
-// Pointers are not owned here, and belong to the PyDescriptorPool.
-static hash_map<const DescriptorPool*, PyDescriptorPool*> descriptor_pool_map;
-
-namespace cdescriptor_pool {
-
-// Create a Python DescriptorPool object, but does not fill the "pool"
-// attribute.
-static PyDescriptorPool* _CreateDescriptorPool() {
- PyDescriptorPool* cpool = PyObject_New(
- PyDescriptorPool, &PyDescriptorPool_Type);
- if (cpool == NULL) {
- return NULL;
- }
-
- cpool->underlay = NULL;
- cpool->database = NULL;
-
- DynamicMessageFactory* message_factory = new DynamicMessageFactory();
- // This option might be the default some day.
- message_factory->SetDelegateToGeneratedFactory(true);
- cpool->message_factory = message_factory;
-
- // TODO(amauryfa): Rewrite the SymbolDatabase in C so that it uses the same
- // storage.
- cpool->classes_by_descriptor =
- new PyDescriptorPool::ClassesByMessageMap();
- cpool->descriptor_options =
- new hash_map<const void*, PyObject *>();
-
- return cpool;
-}
-
-// Create a Python DescriptorPool, using the given pool as an underlay:
-// new messages will be added to a custom pool, not to the underlay.
-//
-// Ownership of the underlay is not transferred, its pointer should
-// stay alive.
-static PyDescriptorPool* PyDescriptorPool_NewWithUnderlay(
- const DescriptorPool* underlay) {
- PyDescriptorPool* cpool = _CreateDescriptorPool();
- if (cpool == NULL) {
- return NULL;
- }
- cpool->pool = new DescriptorPool(underlay);
- cpool->underlay = underlay;
-
- if (!descriptor_pool_map.insert(
- std::make_pair(cpool->pool, cpool)).second) {
- // Should never happen -- would indicate an internal error / bug.
- PyErr_SetString(PyExc_ValueError, "DescriptorPool already registered");
- return NULL;
- }
-
- return cpool;
-}
-
-static PyDescriptorPool* PyDescriptorPool_NewWithDatabase(
- DescriptorDatabase* database) {
- PyDescriptorPool* cpool = _CreateDescriptorPool();
- if (cpool == NULL) {
- return NULL;
- }
- if (database != NULL) {
- cpool->pool = new DescriptorPool(database);
- cpool->database = database;
- } else {
- cpool->pool = new DescriptorPool();
- }
-
- if (!descriptor_pool_map.insert(std::make_pair(cpool->pool, cpool)).second) {
- // Should never happen -- would indicate an internal error / bug.
- PyErr_SetString(PyExc_ValueError, "DescriptorPool already registered");
- return NULL;
- }
-
- return cpool;
-}
-
-// The public DescriptorPool constructor.
-static PyObject* New(PyTypeObject* type,
- PyObject* args, PyObject* kwargs) {
- static char* kwlist[] = {"descriptor_db", 0};
- PyObject* py_database = NULL;
- if (!PyArg_ParseTupleAndKeywords(args, kwargs, "|O", kwlist, &py_database)) {
- return NULL;
- }
- DescriptorDatabase* database = NULL;
- if (py_database && py_database != Py_None) {
- database = new PyDescriptorDatabase(py_database);
- }
- return reinterpret_cast<PyObject*>(
- PyDescriptorPool_NewWithDatabase(database));
-}
-
-static void Dealloc(PyDescriptorPool* self) {
- typedef PyDescriptorPool::ClassesByMessageMap::iterator iterator;
- descriptor_pool_map.erase(self->pool);
- for (iterator it = self->classes_by_descriptor->begin();
- it != self->classes_by_descriptor->end(); ++it) {
- Py_DECREF(it->second);
- }
- delete self->classes_by_descriptor;
- for (hash_map<const void*, PyObject*>::iterator it =
- self->descriptor_options->begin();
- it != self->descriptor_options->end(); ++it) {
- Py_DECREF(it->second);
- }
- delete self->descriptor_options;
- delete self->message_factory;
- delete self->database;
- delete self->pool;
- Py_TYPE(self)->tp_free(reinterpret_cast<PyObject*>(self));
-}
-
-PyObject* FindMessageByName(PyDescriptorPool* self, PyObject* arg) {
- Py_ssize_t name_size;
- char* name;
- if (PyString_AsStringAndSize(arg, &name, &name_size) < 0) {
- return NULL;
- }
-
- const Descriptor* message_descriptor =
- self->pool->FindMessageTypeByName(string(name, name_size));
-
- if (message_descriptor == NULL) {
- PyErr_Format(PyExc_KeyError, "Couldn't find message %.200s", name);
- return NULL;
- }
-
- return PyMessageDescriptor_FromDescriptor(message_descriptor);
-}
-
-// Add a message class to our database.
-int RegisterMessageClass(PyDescriptorPool* self,
- const Descriptor *message_descriptor,
- PyObject *message_class) {
- Py_INCREF(message_class);
- typedef PyDescriptorPool::ClassesByMessageMap::iterator iterator;
- std::pair<iterator, bool> ret = self->classes_by_descriptor->insert(
- std::make_pair(message_descriptor, message_class));
- if (!ret.second) {
- // Update case: DECREF the previous value.
- Py_DECREF(ret.first->second);
- ret.first->second = message_class;
- }
- return 0;
-}
-
-// Retrieve the message class added to our database.
-PyObject *GetMessageClass(PyDescriptorPool* self,
- const Descriptor *message_descriptor) {
- typedef PyDescriptorPool::ClassesByMessageMap::iterator iterator;
- iterator ret = self->classes_by_descriptor->find(message_descriptor);
- if (ret == self->classes_by_descriptor->end()) {
- PyErr_Format(PyExc_TypeError, "No message class registered for '%s'",
- message_descriptor->full_name().c_str());
- return NULL;
- } else {
- return ret->second;
- }
-}
-
-PyObject* FindFileByName(PyDescriptorPool* self, PyObject* arg) {
- Py_ssize_t name_size;
- char* name;
- if (PyString_AsStringAndSize(arg, &name, &name_size) < 0) {
- return NULL;
- }
-
- const FileDescriptor* file_descriptor =
- self->pool->FindFileByName(string(name, name_size));
- if (file_descriptor == NULL) {
- PyErr_Format(PyExc_KeyError, "Couldn't find file %.200s",
- name);
- return NULL;
- }
-
- return PyFileDescriptor_FromDescriptor(file_descriptor);
-}
-
-PyObject* FindFieldByName(PyDescriptorPool* self, PyObject* arg) {
- Py_ssize_t name_size;
- char* name;
- if (PyString_AsStringAndSize(arg, &name, &name_size) < 0) {
- return NULL;
- }
-
- const FieldDescriptor* field_descriptor =
- self->pool->FindFieldByName(string(name, name_size));
- if (field_descriptor == NULL) {
- PyErr_Format(PyExc_KeyError, "Couldn't find field %.200s",
- name);
- return NULL;
- }
-
- return PyFieldDescriptor_FromDescriptor(field_descriptor);
-}
-
-PyObject* FindExtensionByName(PyDescriptorPool* self, PyObject* arg) {
- Py_ssize_t name_size;
- char* name;
- if (PyString_AsStringAndSize(arg, &name, &name_size) < 0) {
- return NULL;
- }
-
- const FieldDescriptor* field_descriptor =
- self->pool->FindExtensionByName(string(name, name_size));
- if (field_descriptor == NULL) {
- PyErr_Format(PyExc_KeyError, "Couldn't find extension field %.200s", name);
- return NULL;
- }
-
- return PyFieldDescriptor_FromDescriptor(field_descriptor);
-}
-
-PyObject* FindEnumTypeByName(PyDescriptorPool* self, PyObject* arg) {
- Py_ssize_t name_size;
- char* name;
- if (PyString_AsStringAndSize(arg, &name, &name_size) < 0) {
- return NULL;
- }
-
- const EnumDescriptor* enum_descriptor =
- self->pool->FindEnumTypeByName(string(name, name_size));
- if (enum_descriptor == NULL) {
- PyErr_Format(PyExc_KeyError, "Couldn't find enum %.200s", name);
- return NULL;
- }
-
- return PyEnumDescriptor_FromDescriptor(enum_descriptor);
-}
-
-PyObject* FindOneofByName(PyDescriptorPool* self, PyObject* arg) {
- Py_ssize_t name_size;
- char* name;
- if (PyString_AsStringAndSize(arg, &name, &name_size) < 0) {
- return NULL;
- }
-
- const OneofDescriptor* oneof_descriptor =
- self->pool->FindOneofByName(string(name, name_size));
- if (oneof_descriptor == NULL) {
- PyErr_Format(PyExc_KeyError, "Couldn't find oneof %.200s", name);
- return NULL;
- }
-
- return PyOneofDescriptor_FromDescriptor(oneof_descriptor);
-}
-
-PyObject* FindFileContainingSymbol(PyDescriptorPool* self, PyObject* arg) {
- Py_ssize_t name_size;
- char* name;
- if (PyString_AsStringAndSize(arg, &name, &name_size) < 0) {
- return NULL;
- }
-
- const FileDescriptor* file_descriptor =
- self->pool->FindFileContainingSymbol(string(name, name_size));
- if (file_descriptor == NULL) {
- PyErr_Format(PyExc_KeyError, "Couldn't find symbol %.200s", name);
- return NULL;
- }
-
- return PyFileDescriptor_FromDescriptor(file_descriptor);
-}
-
-// These functions should not exist -- the only valid way to create
-// descriptors is to call Add() or AddSerializedFile().
-// But these AddDescriptor() functions were created in Python and some people
-// call them, so we support them for now for compatibility.
-// However we do check that the existing descriptor already exists in the pool,
-// which appears to always be true for existing calls -- but then why do people
-// call a function that will just be a no-op?
-// TODO(amauryfa): Need to investigate further.
-
-PyObject* AddFileDescriptor(PyDescriptorPool* self, PyObject* descriptor) {
- const FileDescriptor* file_descriptor =
- PyFileDescriptor_AsDescriptor(descriptor);
- if (!file_descriptor) {
- return NULL;
- }
- if (file_descriptor !=
- self->pool->FindFileByName(file_descriptor->name())) {
- PyErr_Format(PyExc_ValueError,
- "The file descriptor %s does not belong to this pool",
- file_descriptor->name().c_str());
- return NULL;
- }
- Py_RETURN_NONE;
-}
-
-PyObject* AddDescriptor(PyDescriptorPool* self, PyObject* descriptor) {
- const Descriptor* message_descriptor =
- PyMessageDescriptor_AsDescriptor(descriptor);
- if (!message_descriptor) {
- return NULL;
- }
- if (message_descriptor !=
- self->pool->FindMessageTypeByName(message_descriptor->full_name())) {
- PyErr_Format(PyExc_ValueError,
- "The message descriptor %s does not belong to this pool",
- message_descriptor->full_name().c_str());
- return NULL;
- }
- Py_RETURN_NONE;
-}
-
-PyObject* AddEnumDescriptor(PyDescriptorPool* self, PyObject* descriptor) {
- const EnumDescriptor* enum_descriptor =
- PyEnumDescriptor_AsDescriptor(descriptor);
- if (!enum_descriptor) {
- return NULL;
- }
- if (enum_descriptor !=
- self->pool->FindEnumTypeByName(enum_descriptor->full_name())) {
- PyErr_Format(PyExc_ValueError,
- "The enum descriptor %s does not belong to this pool",
- enum_descriptor->full_name().c_str());
- return NULL;
- }
- Py_RETURN_NONE;
-}
-
-// The code below loads new Descriptors from a serialized FileDescriptorProto.
-
-
-// Collects errors that occur during proto file building to allow them to be
-// propagated in the python exception instead of only living in ERROR logs.
-class BuildFileErrorCollector : public DescriptorPool::ErrorCollector {
- public:
- BuildFileErrorCollector() : error_message(""), had_errors(false) {}
-
- void AddError(const string& filename, const string& element_name,
- const Message* descriptor, ErrorLocation location,
- const string& message) {
- // Replicates the logging behavior that happens in the C++ implementation
- // when an error collector is not passed in.
- if (!had_errors) {
- error_message +=
- ("Invalid proto descriptor for file \"" + filename + "\":\n");
- had_errors = true;
- }
- // As this only happens on failure and will result in the program not
- // running at all, no effort is made to optimize this string manipulation.
- error_message += (" " + element_name + ": " + message + "\n");
- }
-
- string error_message;
- bool had_errors;
-};
-
-PyObject* AddSerializedFile(PyDescriptorPool* self, PyObject* serialized_pb) {
- char* message_type;
- Py_ssize_t message_len;
-
- if (self->database != NULL) {
- PyErr_SetString(
- PyExc_ValueError,
- "Cannot call Add on a DescriptorPool that uses a DescriptorDatabase. "
- "Add your file to the underlying database.");
- return NULL;
- }
-
- if (PyBytes_AsStringAndSize(serialized_pb, &message_type, &message_len) < 0) {
- return NULL;
- }
-
- FileDescriptorProto file_proto;
- if (!file_proto.ParseFromArray(message_type, message_len)) {
- PyErr_SetString(PyExc_TypeError, "Couldn't parse file content!");
- return NULL;
- }
-
- // If the file was already part of a C++ library, all its descriptors are in
- // the underlying pool. No need to do anything else.
- const FileDescriptor* generated_file = NULL;
- if (self->underlay) {
- generated_file = self->underlay->FindFileByName(file_proto.name());
- }
- if (generated_file != NULL) {
- return PyFileDescriptor_FromDescriptorWithSerializedPb(
- generated_file, serialized_pb);
- }
-
- BuildFileErrorCollector error_collector;
- const FileDescriptor* descriptor =
- self->pool->BuildFileCollectingErrors(file_proto,
- &error_collector);
- if (descriptor == NULL) {
- PyErr_Format(PyExc_TypeError,
- "Couldn't build proto file into descriptor pool!\n%s",
- error_collector.error_message.c_str());
- return NULL;
- }
-
- return PyFileDescriptor_FromDescriptorWithSerializedPb(
- descriptor, serialized_pb);
-}
-
-PyObject* Add(PyDescriptorPool* self, PyObject* file_descriptor_proto) {
- ScopedPyObjectPtr serialized_pb(
- PyObject_CallMethod(file_descriptor_proto, "SerializeToString", NULL));
- if (serialized_pb == NULL) {
- return NULL;
- }
- return AddSerializedFile(self, serialized_pb.get());
-}
-
-static PyMethodDef Methods[] = {
- { "Add", (PyCFunction)Add, METH_O,
- "Adds the FileDescriptorProto and its types to this pool." },
- { "AddSerializedFile", (PyCFunction)AddSerializedFile, METH_O,
- "Adds a serialized FileDescriptorProto to this pool." },
-
- // TODO(amauryfa): Understand why the Python implementation differs from
- // this one, ask users to use another API and deprecate these functions.
- { "AddFileDescriptor", (PyCFunction)AddFileDescriptor, METH_O,
- "No-op. Add() must have been called before." },
- { "AddDescriptor", (PyCFunction)AddDescriptor, METH_O,
- "No-op. Add() must have been called before." },
- { "AddEnumDescriptor", (PyCFunction)AddEnumDescriptor, METH_O,
- "No-op. Add() must have been called before." },
-
- { "FindFileByName", (PyCFunction)FindFileByName, METH_O,
- "Searches for a file descriptor by its .proto name." },
- { "FindMessageTypeByName", (PyCFunction)FindMessageByName, METH_O,
- "Searches for a message descriptor by full name." },
- { "FindFieldByName", (PyCFunction)FindFieldByName, METH_O,
- "Searches for a field descriptor by full name." },
- { "FindExtensionByName", (PyCFunction)FindExtensionByName, METH_O,
- "Searches for extension descriptor by full name." },
- { "FindEnumTypeByName", (PyCFunction)FindEnumTypeByName, METH_O,
- "Searches for enum type descriptor by full name." },
- { "FindOneofByName", (PyCFunction)FindOneofByName, METH_O,
- "Searches for oneof descriptor by full name." },
-
- { "FindFileContainingSymbol", (PyCFunction)FindFileContainingSymbol, METH_O,
- "Gets the FileDescriptor containing the specified symbol." },
- {NULL}
-};
-
-} // namespace cdescriptor_pool
-
-PyTypeObject PyDescriptorPool_Type = {
- PyVarObject_HEAD_INIT(&PyType_Type, 0)
- FULL_MODULE_NAME ".DescriptorPool", // tp_name
- sizeof(PyDescriptorPool), // tp_basicsize
- 0, // tp_itemsize
- (destructor)cdescriptor_pool::Dealloc, // tp_dealloc
- 0, // tp_print
- 0, // tp_getattr
- 0, // tp_setattr
- 0, // tp_compare
- 0, // tp_repr
- 0, // tp_as_number
- 0, // tp_as_sequence
- 0, // tp_as_mapping
- 0, // tp_hash
- 0, // tp_call
- 0, // tp_str
- 0, // tp_getattro
- 0, // tp_setattro
- 0, // tp_as_buffer
- Py_TPFLAGS_DEFAULT, // tp_flags
- "A Descriptor Pool", // tp_doc
- 0, // tp_traverse
- 0, // tp_clear
- 0, // tp_richcompare
- 0, // tp_weaklistoffset
- 0, // tp_iter
- 0, // tp_iternext
- cdescriptor_pool::Methods, // tp_methods
- 0, // tp_members
- 0, // tp_getset
- 0, // tp_base
- 0, // tp_dict
- 0, // tp_descr_get
- 0, // tp_descr_set
- 0, // tp_dictoffset
- 0, // tp_init
- 0, // tp_alloc
- cdescriptor_pool::New, // tp_new
- PyObject_Del, // tp_free
-};
-
-// This is the DescriptorPool which contains all the definitions from the
-// generated _pb2.py modules.
-static PyDescriptorPool* python_generated_pool = NULL;
-
-bool InitDescriptorPool() {
- if (PyType_Ready(&PyDescriptorPool_Type) < 0)
- return false;
-
- // The Pool of messages declared in Python libraries.
- // generated_pool() contains all messages already linked in C++ libraries, and
- // is used as underlay.
- python_generated_pool = cdescriptor_pool::PyDescriptorPool_NewWithUnderlay(
- DescriptorPool::generated_pool());
- if (python_generated_pool == NULL) {
- return false;
- }
- // Register this pool to be found for C++-generated descriptors.
- descriptor_pool_map.insert(
- std::make_pair(DescriptorPool::generated_pool(),
- python_generated_pool));
-
- return true;
-}
-
-// The default DescriptorPool used everywhere in this module.
-// Today it's the python_generated_pool.
-// TODO(amauryfa): Remove all usages of this function: the pool should be
-// derived from the context.
-PyDescriptorPool* GetDefaultDescriptorPool() {
- return python_generated_pool;
-}
-
-PyDescriptorPool* GetDescriptorPool_FromPool(const DescriptorPool* pool) {
- // Fast path for standard descriptors.
- if (pool == python_generated_pool->pool ||
- pool == DescriptorPool::generated_pool()) {
- return python_generated_pool;
- }
- hash_map<const DescriptorPool*, PyDescriptorPool*>::iterator it =
- descriptor_pool_map.find(pool);
- if (it == descriptor_pool_map.end()) {
- PyErr_SetString(PyExc_KeyError, "Unknown descriptor pool");
- return NULL;
- }
- return it->second;
-}
-
-} // namespace python
-} // namespace protobuf
-} // namespace google
diff --git a/third_party/protobuf/python/google/protobuf/pyext/descriptor_pool.h b/third_party/protobuf/python/google/protobuf/pyext/descriptor_pool.h
deleted file mode 100644
index 16bc910c45..0000000000
--- a/third_party/protobuf/python/google/protobuf/pyext/descriptor_pool.h
+++ /dev/null
@@ -1,164 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-#ifndef GOOGLE_PROTOBUF_PYTHON_CPP_DESCRIPTOR_POOL_H__
-#define GOOGLE_PROTOBUF_PYTHON_CPP_DESCRIPTOR_POOL_H__
-
-#include <Python.h>
-
-#include <google/protobuf/stubs/hash.h>
-#include <google/protobuf/descriptor.h>
-
-namespace google {
-namespace protobuf {
-class MessageFactory;
-
-namespace python {
-
-// Wraps operations to the global DescriptorPool which contains information
-// about all messages and fields.
-//
-// There is normally one pool per process. We make it a Python object only
-// because it contains many Python references.
-// TODO(amauryfa): See whether such objects can appear in reference cycles, and
-// consider adding support for the cyclic GC.
-//
-// "Methods" that interacts with this DescriptorPool are in the cdescriptor_pool
-// namespace.
-typedef struct PyDescriptorPool {
- PyObject_HEAD
-
- // The C++ pool containing Descriptors.
- DescriptorPool* pool;
-
- // The C++ pool acting as an underlay. Can be NULL.
- // This pointer is not owned and must stay alive.
- const DescriptorPool* underlay;
-
- // The C++ descriptor database used to fetch unknown protos. Can be NULL.
- // This pointer is owned.
- const DescriptorDatabase* database;
-
- // DynamicMessageFactory used to create C++ instances of messages.
- // This object cache the descriptors that were used, so the DescriptorPool
- // needs to get rid of it before it can delete itself.
- //
- // Note: A C++ MessageFactory is different from the Python MessageFactory.
- // The C++ one creates messages, when the Python one creates classes.
- MessageFactory* message_factory;
-
- // Make our own mapping to retrieve Python classes from C++ descriptors.
- //
- // Descriptor pointers stored here are owned by the DescriptorPool above.
- // Python references to classes are owned by this PyDescriptorPool.
- typedef hash_map<const Descriptor*, PyObject*> ClassesByMessageMap;
- ClassesByMessageMap* classes_by_descriptor;
-
- // Cache the options for any kind of descriptor.
- // Descriptor pointers are owned by the DescriptorPool above.
- // Python objects are owned by the map.
- hash_map<const void*, PyObject*>* descriptor_options;
-} PyDescriptorPool;
-
-
-extern PyTypeObject PyDescriptorPool_Type;
-
-namespace cdescriptor_pool {
-
-// Looks up a message by name.
-// Returns a message Descriptor, or NULL if not found.
-const Descriptor* FindMessageTypeByName(PyDescriptorPool* self,
- const string& name);
-
-// Registers a new Python class for the given message descriptor.
-// On error, returns -1 with a Python exception set.
-int RegisterMessageClass(PyDescriptorPool* self,
- const Descriptor* message_descriptor,
- PyObject* message_class);
-
-// Retrieves the Python class registered with the given message descriptor.
-//
-// Returns a *borrowed* reference if found, otherwise returns NULL with an
-// exception set.
-PyObject* GetMessageClass(PyDescriptorPool* self,
- const Descriptor* message_descriptor);
-
-// The functions below are also exposed as methods of the DescriptorPool type.
-
-// Looks up a message by name. Returns a PyMessageDescriptor corresponding to
-// the field on success, or NULL on failure.
-//
-// Returns a new reference.
-PyObject* FindMessageByName(PyDescriptorPool* self, PyObject* name);
-
-// Looks up a field by name. Returns a PyFieldDescriptor corresponding to
-// the field on success, or NULL on failure.
-//
-// Returns a new reference.
-PyObject* FindFieldByName(PyDescriptorPool* self, PyObject* name);
-
-// Looks up an extension by name. Returns a PyFieldDescriptor corresponding
-// to the field on success, or NULL on failure.
-//
-// Returns a new reference.
-PyObject* FindExtensionByName(PyDescriptorPool* self, PyObject* arg);
-
-// Looks up an enum type by name. Returns a PyEnumDescriptor corresponding
-// to the field on success, or NULL on failure.
-//
-// Returns a new reference.
-PyObject* FindEnumTypeByName(PyDescriptorPool* self, PyObject* arg);
-
-// Looks up a oneof by name. Returns a COneofDescriptor corresponding
-// to the oneof on success, or NULL on failure.
-//
-// Returns a new reference.
-PyObject* FindOneofByName(PyDescriptorPool* self, PyObject* arg);
-
-} // namespace cdescriptor_pool
-
-// Retrieve the global descriptor pool owned by the _message module.
-// This is the one used by pb2.py generated modules.
-// Returns a *borrowed* reference.
-// "Default" pool used to register messages from _pb2.py modules.
-PyDescriptorPool* GetDefaultDescriptorPool();
-
-// Retrieve the python descriptor pool owning a C++ descriptor pool.
-// Returns a *borrowed* reference.
-PyDescriptorPool* GetDescriptorPool_FromPool(const DescriptorPool* pool);
-
-// Initialize objects used by this module.
-bool InitDescriptorPool();
-
-} // namespace python
-} // namespace protobuf
-
-} // namespace google
-#endif // GOOGLE_PROTOBUF_PYTHON_CPP_DESCRIPTOR_POOL_H__
diff --git a/third_party/protobuf/python/google/protobuf/pyext/extension_dict.cc b/third_party/protobuf/python/google/protobuf/pyext/extension_dict.cc
deleted file mode 100644
index 555bd2937b..0000000000
--- a/third_party/protobuf/python/google/protobuf/pyext/extension_dict.cc
+++ /dev/null
@@ -1,320 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Author: anuraag@google.com (Anuraag Agrawal)
-// Author: tibell@google.com (Johan Tibell)
-
-#include <google/protobuf/pyext/extension_dict.h>
-
-#include <google/protobuf/stubs/logging.h>
-#include <google/protobuf/stubs/common.h>
-#include <google/protobuf/descriptor.h>
-#include <google/protobuf/dynamic_message.h>
-#include <google/protobuf/message.h>
-#include <google/protobuf/pyext/descriptor.h>
-#include <google/protobuf/pyext/descriptor_pool.h>
-#include <google/protobuf/pyext/message.h>
-#include <google/protobuf/pyext/repeated_composite_container.h>
-#include <google/protobuf/pyext/repeated_scalar_container.h>
-#include <google/protobuf/pyext/scoped_pyobject_ptr.h>
-#include <google/protobuf/stubs/shared_ptr.h>
-
-namespace google {
-namespace protobuf {
-namespace python {
-
-namespace extension_dict {
-
-PyObject* len(ExtensionDict* self) {
-#if PY_MAJOR_VERSION >= 3
- return PyLong_FromLong(PyDict_Size(self->values));
-#else
- return PyInt_FromLong(PyDict_Size(self->values));
-#endif
-}
-
-// TODO(tibell): Use VisitCompositeField.
-int ReleaseExtension(ExtensionDict* self,
- PyObject* extension,
- const FieldDescriptor* descriptor) {
- if (descriptor->label() == FieldDescriptor::LABEL_REPEATED) {
- if (descriptor->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE) {
- if (repeated_composite_container::Release(
- reinterpret_cast<RepeatedCompositeContainer*>(
- extension)) < 0) {
- return -1;
- }
- } else {
- if (repeated_scalar_container::Release(
- reinterpret_cast<RepeatedScalarContainer*>(
- extension)) < 0) {
- return -1;
- }
- }
- } else if (descriptor->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE) {
- if (cmessage::ReleaseSubMessage(
- self->parent, descriptor,
- reinterpret_cast<CMessage*>(extension)) < 0) {
- return -1;
- }
- }
-
- return 0;
-}
-
-PyObject* subscript(ExtensionDict* self, PyObject* key) {
- const FieldDescriptor* descriptor = cmessage::GetExtensionDescriptor(key);
- if (descriptor == NULL) {
- return NULL;
- }
- if (!CheckFieldBelongsToMessage(descriptor, self->message)) {
- return NULL;
- }
-
- if (descriptor->label() != FieldDescriptor::LABEL_REPEATED &&
- descriptor->cpp_type() != FieldDescriptor::CPPTYPE_MESSAGE) {
- return cmessage::InternalGetScalar(self->message, descriptor);
- }
-
- PyObject* value = PyDict_GetItem(self->values, key);
- if (value != NULL) {
- Py_INCREF(value);
- return value;
- }
-
- if (self->parent == NULL) {
- // We are in "detached" state. Don't allow further modifications.
- // TODO(amauryfa): Support adding non-scalars to a detached extension dict.
- // This probably requires to store the type of the main message.
- PyErr_SetObject(PyExc_KeyError, key);
- return NULL;
- }
-
- if (descriptor->label() != FieldDescriptor::LABEL_REPEATED &&
- descriptor->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE) {
- PyObject* sub_message = cmessage::InternalGetSubMessage(
- self->parent, descriptor);
- if (sub_message == NULL) {
- return NULL;
- }
- PyDict_SetItem(self->values, key, sub_message);
- return sub_message;
- }
-
- if (descriptor->label() == FieldDescriptor::LABEL_REPEATED) {
- if (descriptor->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE) {
- PyObject *message_class = cdescriptor_pool::GetMessageClass(
- cmessage::GetDescriptorPoolForMessage(self->parent),
- descriptor->message_type());
- if (message_class == NULL) {
- return NULL;
- }
- PyObject* py_container = repeated_composite_container::NewContainer(
- self->parent, descriptor, message_class);
- if (py_container == NULL) {
- return NULL;
- }
- PyDict_SetItem(self->values, key, py_container);
- return py_container;
- } else {
- PyObject* py_container = repeated_scalar_container::NewContainer(
- self->parent, descriptor);
- if (py_container == NULL) {
- return NULL;
- }
- PyDict_SetItem(self->values, key, py_container);
- return py_container;
- }
- }
- PyErr_SetString(PyExc_ValueError, "control reached unexpected line");
- return NULL;
-}
-
-int ass_subscript(ExtensionDict* self, PyObject* key, PyObject* value) {
- const FieldDescriptor* descriptor = cmessage::GetExtensionDescriptor(key);
- if (descriptor == NULL) {
- return -1;
- }
- if (!CheckFieldBelongsToMessage(descriptor, self->message)) {
- return -1;
- }
-
- if (descriptor->label() != FieldDescriptor::LABEL_OPTIONAL ||
- descriptor->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE) {
- PyErr_SetString(PyExc_TypeError, "Extension is repeated and/or composite "
- "type");
- return -1;
- }
- if (self->parent) {
- cmessage::AssureWritable(self->parent);
- if (cmessage::InternalSetScalar(self->parent, descriptor, value) < 0) {
- return -1;
- }
- }
- // TODO(tibell): We shouldn't write scalars to the cache.
- PyDict_SetItem(self->values, key, value);
- return 0;
-}
-
-PyObject* ClearExtension(ExtensionDict* self, PyObject* extension) {
- const FieldDescriptor* descriptor =
- cmessage::GetExtensionDescriptor(extension);
- if (descriptor == NULL) {
- return NULL;
- }
- PyObject* value = PyDict_GetItem(self->values, extension);
- if (self->parent) {
- if (value != NULL) {
- if (ReleaseExtension(self, value, descriptor) < 0) {
- return NULL;
- }
- }
- if (ScopedPyObjectPtr(cmessage::ClearFieldByDescriptor(
- self->parent, descriptor)) == NULL) {
- return NULL;
- }
- }
- if (PyDict_DelItem(self->values, extension) < 0) {
- PyErr_Clear();
- }
- Py_RETURN_NONE;
-}
-
-PyObject* HasExtension(ExtensionDict* self, PyObject* extension) {
- const FieldDescriptor* descriptor =
- cmessage::GetExtensionDescriptor(extension);
- if (descriptor == NULL) {
- return NULL;
- }
- if (self->parent) {
- return cmessage::HasFieldByDescriptor(self->parent, descriptor);
- } else {
- int exists = PyDict_Contains(self->values, extension);
- if (exists < 0) {
- return NULL;
- }
- return PyBool_FromLong(exists);
- }
-}
-
-PyObject* _FindExtensionByName(ExtensionDict* self, PyObject* name) {
- ScopedPyObjectPtr extensions_by_name(PyObject_GetAttrString(
- reinterpret_cast<PyObject*>(self->parent), "_extensions_by_name"));
- if (extensions_by_name == NULL) {
- return NULL;
- }
- PyObject* result = PyDict_GetItem(extensions_by_name.get(), name);
- if (result == NULL) {
- Py_RETURN_NONE;
- } else {
- Py_INCREF(result);
- return result;
- }
-}
-
-ExtensionDict* NewExtensionDict(CMessage *parent) {
- ExtensionDict* self = reinterpret_cast<ExtensionDict*>(
- PyType_GenericAlloc(&ExtensionDict_Type, 0));
- if (self == NULL) {
- return NULL;
- }
-
- self->parent = parent; // Store a borrowed reference.
- self->message = parent->message;
- self->owner = parent->owner;
- self->values = PyDict_New();
- return self;
-}
-
-void dealloc(ExtensionDict* self) {
- Py_CLEAR(self->values);
- self->owner.reset();
- Py_TYPE(self)->tp_free(reinterpret_cast<PyObject*>(self));
-}
-
-static PyMappingMethods MpMethods = {
- (lenfunc)len, /* mp_length */
- (binaryfunc)subscript, /* mp_subscript */
- (objobjargproc)ass_subscript,/* mp_ass_subscript */
-};
-
-#define EDMETHOD(name, args, doc) { #name, (PyCFunction)name, args, doc }
-static PyMethodDef Methods[] = {
- EDMETHOD(ClearExtension, METH_O, "Clears an extension from the object."),
- EDMETHOD(HasExtension, METH_O, "Checks if the object has an extension."),
- EDMETHOD(_FindExtensionByName, METH_O,
- "Finds an extension by name."),
- { NULL, NULL }
-};
-
-} // namespace extension_dict
-
-PyTypeObject ExtensionDict_Type = {
- PyVarObject_HEAD_INIT(&PyType_Type, 0)
- FULL_MODULE_NAME ".ExtensionDict", // tp_name
- sizeof(ExtensionDict), // tp_basicsize
- 0, // tp_itemsize
- (destructor)extension_dict::dealloc, // tp_dealloc
- 0, // tp_print
- 0, // tp_getattr
- 0, // tp_setattr
- 0, // tp_compare
- 0, // tp_repr
- 0, // tp_as_number
- 0, // tp_as_sequence
- &extension_dict::MpMethods, // tp_as_mapping
- PyObject_HashNotImplemented, // tp_hash
- 0, // tp_call
- 0, // tp_str
- 0, // tp_getattro
- 0, // tp_setattro
- 0, // tp_as_buffer
- Py_TPFLAGS_DEFAULT, // tp_flags
- "An extension dict", // tp_doc
- 0, // tp_traverse
- 0, // tp_clear
- 0, // tp_richcompare
- 0, // tp_weaklistoffset
- 0, // tp_iter
- 0, // tp_iternext
- extension_dict::Methods, // tp_methods
- 0, // tp_members
- 0, // tp_getset
- 0, // tp_base
- 0, // tp_dict
- 0, // tp_descr_get
- 0, // tp_descr_set
- 0, // tp_dictoffset
- 0, // tp_init
-};
-
-} // namespace python
-} // namespace protobuf
-} // namespace google
diff --git a/third_party/protobuf/python/google/protobuf/pyext/extension_dict.h b/third_party/protobuf/python/google/protobuf/pyext/extension_dict.h
deleted file mode 100644
index d92cf956bd..0000000000
--- a/third_party/protobuf/python/google/protobuf/pyext/extension_dict.h
+++ /dev/null
@@ -1,131 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Author: anuraag@google.com (Anuraag Agrawal)
-// Author: tibell@google.com (Johan Tibell)
-
-#ifndef GOOGLE_PROTOBUF_PYTHON_CPP_EXTENSION_DICT_H__
-#define GOOGLE_PROTOBUF_PYTHON_CPP_EXTENSION_DICT_H__
-
-#include <Python.h>
-
-#include <memory>
-#ifndef _SHARED_PTR_H
-#include <google/protobuf/stubs/shared_ptr.h>
-#endif
-
-namespace google {
-namespace protobuf {
-
-class Message;
-class FieldDescriptor;
-
-#ifdef _SHARED_PTR_H
-using std::shared_ptr;
-#else
-using internal::shared_ptr;
-#endif
-
-namespace python {
-
-struct CMessage;
-
-typedef struct ExtensionDict {
- PyObject_HEAD;
-
- // This is the top-level C++ Message object that owns the whole
- // proto tree. Every Python container class holds a
- // reference to it in order to keep it alive as long as there's a
- // Python object that references any part of the tree.
- shared_ptr<Message> owner;
-
- // Weak reference to parent message. Used to make sure
- // the parent is writable when an extension field is modified.
- CMessage* parent;
-
- // Pointer to the C++ Message that this ExtensionDict extends.
- // Not owned by us.
- Message* message;
-
- // A dict of child messages, indexed by Extension descriptors.
- // Similar to CMessage::composite_fields.
- PyObject* values;
-} ExtensionDict;
-
-extern PyTypeObject ExtensionDict_Type;
-
-namespace extension_dict {
-
-// Builds an Extensions dict for a specific message.
-ExtensionDict* NewExtensionDict(CMessage *parent);
-
-// Gets the number of extension values in this ExtensionDict as a python object.
-//
-// Returns a new reference.
-PyObject* len(ExtensionDict* self);
-
-// Releases extensions referenced outside this dictionary to keep outside
-// references alive.
-//
-// Returns 0 on success, -1 on failure.
-int ReleaseExtension(ExtensionDict* self,
- PyObject* extension,
- const FieldDescriptor* descriptor);
-
-// Gets an extension from the dict for the given extension descriptor.
-//
-// Returns a new reference.
-PyObject* subscript(ExtensionDict* self, PyObject* key);
-
-// Assigns a value to an extension in the dict. Can only be used for singular
-// simple types.
-//
-// Returns 0 on success, -1 on failure.
-int ass_subscript(ExtensionDict* self, PyObject* key, PyObject* value);
-
-// Clears an extension from the dict. Will release the extension if there
-// is still an external reference left to it.
-//
-// Returns None on success.
-PyObject* ClearExtension(ExtensionDict* self,
- PyObject* extension);
-
-// Gets an extension from the dict given the extension name as opposed to
-// descriptor.
-//
-// Returns a new reference.
-PyObject* _FindExtensionByName(ExtensionDict* self, PyObject* name);
-
-} // namespace extension_dict
-} // namespace python
-} // namespace protobuf
-
-} // namespace google
-#endif // GOOGLE_PROTOBUF_PYTHON_CPP_EXTENSION_DICT_H__
diff --git a/third_party/protobuf/python/google/protobuf/pyext/map_container.cc b/third_party/protobuf/python/google/protobuf/pyext/map_container.cc
deleted file mode 100644
index df9138a4d8..0000000000
--- a/third_party/protobuf/python/google/protobuf/pyext/map_container.cc
+++ /dev/null
@@ -1,965 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Author: haberman@google.com (Josh Haberman)
-
-#include <google/protobuf/pyext/map_container.h>
-
-#include <google/protobuf/stubs/logging.h>
-#include <google/protobuf/stubs/common.h>
-#include <google/protobuf/stubs/scoped_ptr.h>
-#include <google/protobuf/map_field.h>
-#include <google/protobuf/map.h>
-#include <google/protobuf/message.h>
-#include <google/protobuf/pyext/message.h>
-#include <google/protobuf/pyext/scoped_pyobject_ptr.h>
-
-#if PY_MAJOR_VERSION >= 3
- #define PyInt_FromLong PyLong_FromLong
- #define PyInt_FromSize_t PyLong_FromSize_t
-#endif
-
-namespace google {
-namespace protobuf {
-namespace python {
-
-// Functions that need access to map reflection functionality.
-// They need to be contained in this class because it is friended.
-class MapReflectionFriend {
- public:
- // Methods that are in common between the map types.
- static PyObject* Contains(PyObject* _self, PyObject* key);
- static Py_ssize_t Length(PyObject* _self);
- static PyObject* GetIterator(PyObject *_self);
- static PyObject* IterNext(PyObject* _self);
-
- // Methods that differ between the map types.
- static PyObject* ScalarMapGetItem(PyObject* _self, PyObject* key);
- static PyObject* MessageMapGetItem(PyObject* _self, PyObject* key);
- static int ScalarMapSetItem(PyObject* _self, PyObject* key, PyObject* v);
- static int MessageMapSetItem(PyObject* _self, PyObject* key, PyObject* v);
-};
-
-struct MapIterator {
- PyObject_HEAD;
-
- scoped_ptr< ::google::protobuf::MapIterator> iter;
-
- // A pointer back to the container, so we can notice changes to the version.
- // We own a ref on this.
- MapContainer* container;
-
- // We need to keep a ref on the Message* too, because
- // MapIterator::~MapIterator() accesses it. Normally this would be ok because
- // the ref on container (above) would guarantee outlive semantics. However in
- // the case of ClearField(), InitializeAndCopyToParentContainer() resets the
- // message pointer (and the owner) to a different message, a copy of the
- // original. But our iterator still points to the original, which could now
- // get deleted before us.
- //
- // To prevent this, we ensure that the Message will always stay alive as long
- // as this iterator does. This is solely for the benefit of the MapIterator
- // destructor -- we should never actually access the iterator in this state
- // except to delete it.
- shared_ptr<Message> owner;
-
- // The version of the map when we took the iterator to it.
- //
- // We store this so that if the map is modified during iteration we can throw
- // an error.
- uint64 version;
-
- // True if the container is empty. We signal this separately to avoid calling
- // any of the iteration methods, which are non-const.
- bool empty;
-};
-
-Message* MapContainer::GetMutableMessage() {
- cmessage::AssureWritable(parent);
- return const_cast<Message*>(message);
-}
-
-// Consumes a reference on the Python string object.
-static bool PyStringToSTL(PyObject* py_string, string* stl_string) {
- char *value;
- Py_ssize_t value_len;
-
- if (!py_string) {
- return false;
- }
- if (PyBytes_AsStringAndSize(py_string, &value, &value_len) < 0) {
- Py_DECREF(py_string);
- return false;
- } else {
- stl_string->assign(value, value_len);
- Py_DECREF(py_string);
- return true;
- }
-}
-
-static bool PythonToMapKey(PyObject* obj,
- const FieldDescriptor* field_descriptor,
- MapKey* key) {
- switch (field_descriptor->cpp_type()) {
- case FieldDescriptor::CPPTYPE_INT32: {
- GOOGLE_CHECK_GET_INT32(obj, value, false);
- key->SetInt32Value(value);
- break;
- }
- case FieldDescriptor::CPPTYPE_INT64: {
- GOOGLE_CHECK_GET_INT64(obj, value, false);
- key->SetInt64Value(value);
- break;
- }
- case FieldDescriptor::CPPTYPE_UINT32: {
- GOOGLE_CHECK_GET_UINT32(obj, value, false);
- key->SetUInt32Value(value);
- break;
- }
- case FieldDescriptor::CPPTYPE_UINT64: {
- GOOGLE_CHECK_GET_UINT64(obj, value, false);
- key->SetUInt64Value(value);
- break;
- }
- case FieldDescriptor::CPPTYPE_BOOL: {
- GOOGLE_CHECK_GET_BOOL(obj, value, false);
- key->SetBoolValue(value);
- break;
- }
- case FieldDescriptor::CPPTYPE_STRING: {
- string str;
- if (!PyStringToSTL(CheckString(obj, field_descriptor), &str)) {
- return false;
- }
- key->SetStringValue(str);
- break;
- }
- default:
- PyErr_Format(
- PyExc_SystemError, "Type %d cannot be a map key",
- field_descriptor->cpp_type());
- return false;
- }
- return true;
-}
-
-static PyObject* MapKeyToPython(const FieldDescriptor* field_descriptor,
- const MapKey& key) {
- switch (field_descriptor->cpp_type()) {
- case FieldDescriptor::CPPTYPE_INT32:
- return PyInt_FromLong(key.GetInt32Value());
- case FieldDescriptor::CPPTYPE_INT64:
- return PyLong_FromLongLong(key.GetInt64Value());
- case FieldDescriptor::CPPTYPE_UINT32:
- return PyInt_FromSize_t(key.GetUInt32Value());
- case FieldDescriptor::CPPTYPE_UINT64:
- return PyLong_FromUnsignedLongLong(key.GetUInt64Value());
- case FieldDescriptor::CPPTYPE_BOOL:
- return PyBool_FromLong(key.GetBoolValue());
- case FieldDescriptor::CPPTYPE_STRING:
- return ToStringObject(field_descriptor, key.GetStringValue());
- default:
- PyErr_Format(
- PyExc_SystemError, "Couldn't convert type %d to value",
- field_descriptor->cpp_type());
- return NULL;
- }
-}
-
-// This is only used for ScalarMap, so we don't need to handle the
-// CPPTYPE_MESSAGE case.
-PyObject* MapValueRefToPython(const FieldDescriptor* field_descriptor,
- MapValueRef* value) {
- switch (field_descriptor->cpp_type()) {
- case FieldDescriptor::CPPTYPE_INT32:
- return PyInt_FromLong(value->GetInt32Value());
- case FieldDescriptor::CPPTYPE_INT64:
- return PyLong_FromLongLong(value->GetInt64Value());
- case FieldDescriptor::CPPTYPE_UINT32:
- return PyInt_FromSize_t(value->GetUInt32Value());
- case FieldDescriptor::CPPTYPE_UINT64:
- return PyLong_FromUnsignedLongLong(value->GetUInt64Value());
- case FieldDescriptor::CPPTYPE_FLOAT:
- return PyFloat_FromDouble(value->GetFloatValue());
- case FieldDescriptor::CPPTYPE_DOUBLE:
- return PyFloat_FromDouble(value->GetDoubleValue());
- case FieldDescriptor::CPPTYPE_BOOL:
- return PyBool_FromLong(value->GetBoolValue());
- case FieldDescriptor::CPPTYPE_STRING:
- return ToStringObject(field_descriptor, value->GetStringValue());
- case FieldDescriptor::CPPTYPE_ENUM:
- return PyInt_FromLong(value->GetEnumValue());
- default:
- PyErr_Format(
- PyExc_SystemError, "Couldn't convert type %d to value",
- field_descriptor->cpp_type());
- return NULL;
- }
-}
-
-// This is only used for ScalarMap, so we don't need to handle the
-// CPPTYPE_MESSAGE case.
-static bool PythonToMapValueRef(PyObject* obj,
- const FieldDescriptor* field_descriptor,
- bool allow_unknown_enum_values,
- MapValueRef* value_ref) {
- switch (field_descriptor->cpp_type()) {
- case FieldDescriptor::CPPTYPE_INT32: {
- GOOGLE_CHECK_GET_INT32(obj, value, false);
- value_ref->SetInt32Value(value);
- return true;
- }
- case FieldDescriptor::CPPTYPE_INT64: {
- GOOGLE_CHECK_GET_INT64(obj, value, false);
- value_ref->SetInt64Value(value);
- return true;
- }
- case FieldDescriptor::CPPTYPE_UINT32: {
- GOOGLE_CHECK_GET_UINT32(obj, value, false);
- value_ref->SetUInt32Value(value);
- return true;
- }
- case FieldDescriptor::CPPTYPE_UINT64: {
- GOOGLE_CHECK_GET_UINT64(obj, value, false);
- value_ref->SetUInt64Value(value);
- return true;
- }
- case FieldDescriptor::CPPTYPE_FLOAT: {
- GOOGLE_CHECK_GET_FLOAT(obj, value, false);
- value_ref->SetFloatValue(value);
- return true;
- }
- case FieldDescriptor::CPPTYPE_DOUBLE: {
- GOOGLE_CHECK_GET_DOUBLE(obj, value, false);
- value_ref->SetDoubleValue(value);
- return true;
- }
- case FieldDescriptor::CPPTYPE_BOOL: {
- GOOGLE_CHECK_GET_BOOL(obj, value, false);
- value_ref->SetBoolValue(value);
- return true;;
- }
- case FieldDescriptor::CPPTYPE_STRING: {
- string str;
- if (!PyStringToSTL(CheckString(obj, field_descriptor), &str)) {
- return false;
- }
- value_ref->SetStringValue(str);
- return true;
- }
- case FieldDescriptor::CPPTYPE_ENUM: {
- GOOGLE_CHECK_GET_INT32(obj, value, false);
- if (allow_unknown_enum_values) {
- value_ref->SetEnumValue(value);
- return true;
- } else {
- const EnumDescriptor* enum_descriptor = field_descriptor->enum_type();
- const EnumValueDescriptor* enum_value =
- enum_descriptor->FindValueByNumber(value);
- if (enum_value != NULL) {
- value_ref->SetEnumValue(value);
- return true;
- } else {
- PyErr_Format(PyExc_ValueError, "Unknown enum value: %d", value);
- return false;
- }
- }
- break;
- }
- default:
- PyErr_Format(
- PyExc_SystemError, "Setting value to a field of unknown type %d",
- field_descriptor->cpp_type());
- return false;
- }
-}
-
-// Map methods common to ScalarMap and MessageMap //////////////////////////////
-
-static MapContainer* GetMap(PyObject* obj) {
- return reinterpret_cast<MapContainer*>(obj);
-}
-
-Py_ssize_t MapReflectionFriend::Length(PyObject* _self) {
- MapContainer* self = GetMap(_self);
- const google::protobuf::Message* message = self->message;
- return message->GetReflection()->MapSize(*message,
- self->parent_field_descriptor);
-}
-
-PyObject* Clear(PyObject* _self) {
- MapContainer* self = GetMap(_self);
- Message* message = self->GetMutableMessage();
- const Reflection* reflection = message->GetReflection();
-
- reflection->ClearField(message, self->parent_field_descriptor);
-
- Py_RETURN_NONE;
-}
-
-PyObject* MapReflectionFriend::Contains(PyObject* _self, PyObject* key) {
- MapContainer* self = GetMap(_self);
-
- const Message* message = self->message;
- const Reflection* reflection = message->GetReflection();
- MapKey map_key;
-
- if (!PythonToMapKey(key, self->key_field_descriptor, &map_key)) {
- return NULL;
- }
-
- if (reflection->ContainsMapKey(*message, self->parent_field_descriptor,
- map_key)) {
- Py_RETURN_TRUE;
- } else {
- Py_RETURN_FALSE;
- }
-}
-
-// Initializes the underlying Message object of "to" so it becomes a new parent
-// repeated scalar, and copies all the values from "from" to it. A child scalar
-// container can be released by passing it as both from and to (e.g. making it
-// the recipient of the new parent message and copying the values from itself).
-static int InitializeAndCopyToParentContainer(MapContainer* from,
- MapContainer* to) {
- // For now we require from == to, re-evaluate if we want to support deep copy
- // as in repeated_scalar_container.cc.
- GOOGLE_DCHECK(from == to);
- Message* new_message = from->message->New();
-
- if (MapReflectionFriend::Length(reinterpret_cast<PyObject*>(from)) > 0) {
- // A somewhat roundabout way of copying just one field from old_message to
- // new_message. This is the best we can do with what Reflection gives us.
- Message* mutable_old = from->GetMutableMessage();
- vector<const FieldDescriptor*> fields;
- fields.push_back(from->parent_field_descriptor);
-
- // Move the map field into the new message.
- mutable_old->GetReflection()->SwapFields(mutable_old, new_message, fields);
-
- // If/when we support from != to, this will be required also to copy the
- // map field back into the existing message:
- // mutable_old->MergeFrom(*new_message);
- }
-
- // If from == to this could delete old_message.
- to->owner.reset(new_message);
-
- to->parent = NULL;
- to->parent_field_descriptor = from->parent_field_descriptor;
- to->message = new_message;
-
- // Invalidate iterators, since they point to the old copy of the field.
- to->version++;
-
- return 0;
-}
-
-int MapContainer::Release() {
- return InitializeAndCopyToParentContainer(this, this);
-}
-
-
-// ScalarMap ///////////////////////////////////////////////////////////////////
-
-PyObject *NewScalarMapContainer(
- CMessage* parent, const google::protobuf::FieldDescriptor* parent_field_descriptor) {
- if (!CheckFieldBelongsToMessage(parent_field_descriptor, parent->message)) {
- return NULL;
- }
-
-#if PY_MAJOR_VERSION >= 3
- ScopedPyObjectPtr obj(PyType_GenericAlloc(
- reinterpret_cast<PyTypeObject *>(ScalarMapContainer_Type), 0));
-#else
- ScopedPyObjectPtr obj(PyType_GenericAlloc(&ScalarMapContainer_Type, 0));
-#endif
- if (obj.get() == NULL) {
- return PyErr_Format(PyExc_RuntimeError,
- "Could not allocate new container.");
- }
-
- MapContainer* self = GetMap(obj.get());
-
- self->message = parent->message;
- self->parent = parent;
- self->parent_field_descriptor = parent_field_descriptor;
- self->owner = parent->owner;
- self->version = 0;
-
- self->key_field_descriptor =
- parent_field_descriptor->message_type()->FindFieldByName("key");
- self->value_field_descriptor =
- parent_field_descriptor->message_type()->FindFieldByName("value");
-
- if (self->key_field_descriptor == NULL ||
- self->value_field_descriptor == NULL) {
- return PyErr_Format(PyExc_KeyError,
- "Map entry descriptor did not have key/value fields");
- }
-
- return obj.release();
-}
-
-PyObject* MapReflectionFriend::ScalarMapGetItem(PyObject* _self,
- PyObject* key) {
- MapContainer* self = GetMap(_self);
-
- Message* message = self->GetMutableMessage();
- const Reflection* reflection = message->GetReflection();
- MapKey map_key;
- MapValueRef value;
-
- if (!PythonToMapKey(key, self->key_field_descriptor, &map_key)) {
- return NULL;
- }
-
- if (reflection->InsertOrLookupMapValue(message, self->parent_field_descriptor,
- map_key, &value)) {
- self->version++;
- }
-
- return MapValueRefToPython(self->value_field_descriptor, &value);
-}
-
-int MapReflectionFriend::ScalarMapSetItem(PyObject* _self, PyObject* key,
- PyObject* v) {
- MapContainer* self = GetMap(_self);
-
- Message* message = self->GetMutableMessage();
- const Reflection* reflection = message->GetReflection();
- MapKey map_key;
- MapValueRef value;
-
- if (!PythonToMapKey(key, self->key_field_descriptor, &map_key)) {
- return -1;
- }
-
- self->version++;
-
- if (v) {
- // Set item to v.
- reflection->InsertOrLookupMapValue(message, self->parent_field_descriptor,
- map_key, &value);
-
- return PythonToMapValueRef(v, self->value_field_descriptor,
- reflection->SupportsUnknownEnumValues(), &value)
- ? 0
- : -1;
- } else {
- // Delete key from map.
- if (reflection->DeleteMapValue(message, self->parent_field_descriptor,
- map_key)) {
- return 0;
- } else {
- PyErr_Format(PyExc_KeyError, "Key not present in map");
- return -1;
- }
- }
-}
-
-static PyObject* ScalarMapGet(PyObject* self, PyObject* args) {
- PyObject* key;
- PyObject* default_value = NULL;
- if (PyArg_ParseTuple(args, "O|O", &key, &default_value) < 0) {
- return NULL;
- }
-
- ScopedPyObjectPtr is_present(MapReflectionFriend::Contains(self, key));
- if (is_present.get() == NULL) {
- return NULL;
- }
-
- if (PyObject_IsTrue(is_present.get())) {
- return MapReflectionFriend::ScalarMapGetItem(self, key);
- } else {
- if (default_value != NULL) {
- Py_INCREF(default_value);
- return default_value;
- } else {
- Py_RETURN_NONE;
- }
- }
-}
-
-static void ScalarMapDealloc(PyObject* _self) {
- MapContainer* self = GetMap(_self);
- self->owner.reset();
- Py_TYPE(_self)->tp_free(_self);
-}
-
-static PyMethodDef ScalarMapMethods[] = {
- { "__contains__", MapReflectionFriend::Contains, METH_O,
- "Tests whether a key is a member of the map." },
- { "clear", (PyCFunction)Clear, METH_NOARGS,
- "Removes all elements from the map." },
- { "get", ScalarMapGet, METH_VARARGS,
- "Gets the value for the given key if present, or otherwise a default" },
- /*
- { "__deepcopy__", (PyCFunction)DeepCopy, METH_VARARGS,
- "Makes a deep copy of the class." },
- { "__reduce__", (PyCFunction)Reduce, METH_NOARGS,
- "Outputs picklable representation of the repeated field." },
- */
- {NULL, NULL},
-};
-
-#if PY_MAJOR_VERSION >= 3
- static PyType_Slot ScalarMapContainer_Type_slots[] = {
- {Py_tp_dealloc, (void *)ScalarMapDealloc},
- {Py_mp_length, (void *)MapReflectionFriend::Length},
- {Py_mp_subscript, (void *)MapReflectionFriend::ScalarMapGetItem},
- {Py_mp_ass_subscript, (void *)MapReflectionFriend::ScalarMapSetItem},
- {Py_tp_methods, (void *)ScalarMapMethods},
- {Py_tp_iter, (void *)MapReflectionFriend::GetIterator},
- {0, 0},
- };
-
- PyType_Spec ScalarMapContainer_Type_spec = {
- FULL_MODULE_NAME ".ScalarMapContainer",
- sizeof(MapContainer),
- 0,
- Py_TPFLAGS_DEFAULT,
- ScalarMapContainer_Type_slots
- };
- PyObject *ScalarMapContainer_Type;
-#else
- static PyMappingMethods ScalarMapMappingMethods = {
- MapReflectionFriend::Length, // mp_length
- MapReflectionFriend::ScalarMapGetItem, // mp_subscript
- MapReflectionFriend::ScalarMapSetItem, // mp_ass_subscript
- };
-
- PyTypeObject ScalarMapContainer_Type = {
- PyVarObject_HEAD_INIT(&PyType_Type, 0)
- FULL_MODULE_NAME ".ScalarMapContainer", // tp_name
- sizeof(MapContainer), // tp_basicsize
- 0, // tp_itemsize
- ScalarMapDealloc, // tp_dealloc
- 0, // tp_print
- 0, // tp_getattr
- 0, // tp_setattr
- 0, // tp_compare
- 0, // tp_repr
- 0, // tp_as_number
- 0, // tp_as_sequence
- &ScalarMapMappingMethods, // tp_as_mapping
- 0, // tp_hash
- 0, // tp_call
- 0, // tp_str
- 0, // tp_getattro
- 0, // tp_setattro
- 0, // tp_as_buffer
- Py_TPFLAGS_DEFAULT, // tp_flags
- "A scalar map container", // tp_doc
- 0, // tp_traverse
- 0, // tp_clear
- 0, // tp_richcompare
- 0, // tp_weaklistoffset
- MapReflectionFriend::GetIterator, // tp_iter
- 0, // tp_iternext
- ScalarMapMethods, // tp_methods
- 0, // tp_members
- 0, // tp_getset
- 0, // tp_base
- 0, // tp_dict
- 0, // tp_descr_get
- 0, // tp_descr_set
- 0, // tp_dictoffset
- 0, // tp_init
- };
-#endif
-
-
-// MessageMap //////////////////////////////////////////////////////////////////
-
-static MessageMapContainer* GetMessageMap(PyObject* obj) {
- return reinterpret_cast<MessageMapContainer*>(obj);
-}
-
-static PyObject* GetCMessage(MessageMapContainer* self, Message* message) {
- // Get or create the CMessage object corresponding to this message.
- ScopedPyObjectPtr key(PyLong_FromVoidPtr(message));
- PyObject* ret = PyDict_GetItem(self->message_dict, key.get());
-
- if (ret == NULL) {
- CMessage* cmsg = cmessage::NewEmptyMessage(self->subclass_init,
- message->GetDescriptor());
- ret = reinterpret_cast<PyObject*>(cmsg);
-
- if (cmsg == NULL) {
- return NULL;
- }
- cmsg->owner = self->owner;
- cmsg->message = message;
- cmsg->parent = self->parent;
-
- if (PyDict_SetItem(self->message_dict, key.get(), ret) < 0) {
- Py_DECREF(ret);
- return NULL;
- }
- } else {
- Py_INCREF(ret);
- }
-
- return ret;
-}
-
-PyObject* NewMessageMapContainer(
- CMessage* parent, const google::protobuf::FieldDescriptor* parent_field_descriptor,
- PyObject* concrete_class) {
- if (!CheckFieldBelongsToMessage(parent_field_descriptor, parent->message)) {
- return NULL;
- }
-
-#if PY_MAJOR_VERSION >= 3
- PyObject* obj = PyType_GenericAlloc(
- reinterpret_cast<PyTypeObject *>(MessageMapContainer_Type), 0);
-#else
- PyObject* obj = PyType_GenericAlloc(&MessageMapContainer_Type, 0);
-#endif
- if (obj == NULL) {
- return PyErr_Format(PyExc_RuntimeError,
- "Could not allocate new container.");
- }
-
- MessageMapContainer* self = GetMessageMap(obj);
-
- self->message = parent->message;
- self->parent = parent;
- self->parent_field_descriptor = parent_field_descriptor;
- self->owner = parent->owner;
- self->version = 0;
-
- self->key_field_descriptor =
- parent_field_descriptor->message_type()->FindFieldByName("key");
- self->value_field_descriptor =
- parent_field_descriptor->message_type()->FindFieldByName("value");
-
- self->message_dict = PyDict_New();
- if (self->message_dict == NULL) {
- return PyErr_Format(PyExc_RuntimeError,
- "Could not allocate message dict.");
- }
-
- Py_INCREF(concrete_class);
- self->subclass_init = concrete_class;
-
- if (self->key_field_descriptor == NULL ||
- self->value_field_descriptor == NULL) {
- Py_DECREF(obj);
- return PyErr_Format(PyExc_KeyError,
- "Map entry descriptor did not have key/value fields");
- }
-
- return obj;
-}
-
-int MapReflectionFriend::MessageMapSetItem(PyObject* _self, PyObject* key,
- PyObject* v) {
- if (v) {
- PyErr_Format(PyExc_ValueError,
- "Direct assignment of submessage not allowed");
- return -1;
- }
-
- // Now we know that this is a delete, not a set.
-
- MessageMapContainer* self = GetMessageMap(_self);
- Message* message = self->GetMutableMessage();
- const Reflection* reflection = message->GetReflection();
- MapKey map_key;
- MapValueRef value;
-
- self->version++;
-
- if (!PythonToMapKey(key, self->key_field_descriptor, &map_key)) {
- return -1;
- }
-
- // Delete key from map.
- if (reflection->DeleteMapValue(message, self->parent_field_descriptor,
- map_key)) {
- return 0;
- } else {
- PyErr_Format(PyExc_KeyError, "Key not present in map");
- return -1;
- }
-}
-
-PyObject* MapReflectionFriend::MessageMapGetItem(PyObject* _self,
- PyObject* key) {
- MessageMapContainer* self = GetMessageMap(_self);
-
- Message* message = self->GetMutableMessage();
- const Reflection* reflection = message->GetReflection();
- MapKey map_key;
- MapValueRef value;
-
- if (!PythonToMapKey(key, self->key_field_descriptor, &map_key)) {
- return NULL;
- }
-
- if (reflection->InsertOrLookupMapValue(message, self->parent_field_descriptor,
- map_key, &value)) {
- self->version++;
- }
-
- return GetCMessage(self, value.MutableMessageValue());
-}
-
-PyObject* MessageMapGet(PyObject* self, PyObject* args) {
- PyObject* key;
- PyObject* default_value = NULL;
- if (PyArg_ParseTuple(args, "O|O", &key, &default_value) < 0) {
- return NULL;
- }
-
- ScopedPyObjectPtr is_present(MapReflectionFriend::Contains(self, key));
- if (is_present.get() == NULL) {
- return NULL;
- }
-
- if (PyObject_IsTrue(is_present.get())) {
- return MapReflectionFriend::MessageMapGetItem(self, key);
- } else {
- if (default_value != NULL) {
- Py_INCREF(default_value);
- return default_value;
- } else {
- Py_RETURN_NONE;
- }
- }
-}
-
-static void MessageMapDealloc(PyObject* _self) {
- MessageMapContainer* self = GetMessageMap(_self);
- self->owner.reset();
- Py_DECREF(self->message_dict);
- Py_TYPE(_self)->tp_free(_self);
-}
-
-static PyMethodDef MessageMapMethods[] = {
- { "__contains__", (PyCFunction)MapReflectionFriend::Contains, METH_O,
- "Tests whether the map contains this element."},
- { "clear", (PyCFunction)Clear, METH_NOARGS,
- "Removes all elements from the map."},
- { "get", MessageMapGet, METH_VARARGS,
- "Gets the value for the given key if present, or otherwise a default" },
- { "get_or_create", MapReflectionFriend::MessageMapGetItem, METH_O,
- "Alias for getitem, useful to make explicit that the map is mutated." },
- /*
- { "__deepcopy__", (PyCFunction)DeepCopy, METH_VARARGS,
- "Makes a deep copy of the class." },
- { "__reduce__", (PyCFunction)Reduce, METH_NOARGS,
- "Outputs picklable representation of the repeated field." },
- */
- {NULL, NULL},
-};
-
-#if PY_MAJOR_VERSION >= 3
- static PyType_Slot MessageMapContainer_Type_slots[] = {
- {Py_tp_dealloc, (void *)MessageMapDealloc},
- {Py_mp_length, (void *)MapReflectionFriend::Length},
- {Py_mp_subscript, (void *)MapReflectionFriend::MessageMapGetItem},
- {Py_mp_ass_subscript, (void *)MapReflectionFriend::MessageMapSetItem},
- {Py_tp_methods, (void *)MessageMapMethods},
- {Py_tp_iter, (void *)MapReflectionFriend::GetIterator},
- {0, 0}
- };
-
- PyType_Spec MessageMapContainer_Type_spec = {
- FULL_MODULE_NAME ".MessageMapContainer",
- sizeof(MessageMapContainer),
- 0,
- Py_TPFLAGS_DEFAULT,
- MessageMapContainer_Type_slots
- };
-
- PyObject *MessageMapContainer_Type;
-#else
- static PyMappingMethods MessageMapMappingMethods = {
- MapReflectionFriend::Length, // mp_length
- MapReflectionFriend::MessageMapGetItem, // mp_subscript
- MapReflectionFriend::MessageMapSetItem, // mp_ass_subscript
- };
-
- PyTypeObject MessageMapContainer_Type = {
- PyVarObject_HEAD_INIT(&PyType_Type, 0)
- FULL_MODULE_NAME ".MessageMapContainer", // tp_name
- sizeof(MessageMapContainer), // tp_basicsize
- 0, // tp_itemsize
- MessageMapDealloc, // tp_dealloc
- 0, // tp_print
- 0, // tp_getattr
- 0, // tp_setattr
- 0, // tp_compare
- 0, // tp_repr
- 0, // tp_as_number
- 0, // tp_as_sequence
- &MessageMapMappingMethods, // tp_as_mapping
- 0, // tp_hash
- 0, // tp_call
- 0, // tp_str
- 0, // tp_getattro
- 0, // tp_setattro
- 0, // tp_as_buffer
- Py_TPFLAGS_DEFAULT, // tp_flags
- "A map container for message", // tp_doc
- 0, // tp_traverse
- 0, // tp_clear
- 0, // tp_richcompare
- 0, // tp_weaklistoffset
- MapReflectionFriend::GetIterator, // tp_iter
- 0, // tp_iternext
- MessageMapMethods, // tp_methods
- 0, // tp_members
- 0, // tp_getset
- 0, // tp_base
- 0, // tp_dict
- 0, // tp_descr_get
- 0, // tp_descr_set
- 0, // tp_dictoffset
- 0, // tp_init
- };
-#endif
-
-// MapIterator /////////////////////////////////////////////////////////////////
-
-static MapIterator* GetIter(PyObject* obj) {
- return reinterpret_cast<MapIterator*>(obj);
-}
-
-PyObject* MapReflectionFriend::GetIterator(PyObject *_self) {
- MapContainer* self = GetMap(_self);
-
- ScopedPyObjectPtr obj(PyType_GenericAlloc(&MapIterator_Type, 0));
- if (obj == NULL) {
- return PyErr_Format(PyExc_KeyError, "Could not allocate iterator");
- }
-
- MapIterator* iter = GetIter(obj.get());
-
- Py_INCREF(self);
- iter->container = self;
- iter->version = self->version;
- iter->owner = self->owner;
-
- if (MapReflectionFriend::Length(_self) > 0) {
- Message* message = self->GetMutableMessage();
- const Reflection* reflection = message->GetReflection();
-
- iter->iter.reset(new ::google::protobuf::MapIterator(
- reflection->MapBegin(message, self->parent_field_descriptor)));
- }
-
- return obj.release();
-}
-
-PyObject* MapReflectionFriend::IterNext(PyObject* _self) {
- MapIterator* self = GetIter(_self);
-
- // This won't catch mutations to the map performed by MergeFrom(); no easy way
- // to address that.
- if (self->version != self->container->version) {
- return PyErr_Format(PyExc_RuntimeError,
- "Map modified during iteration.");
- }
-
- if (self->iter.get() == NULL) {
- return NULL;
- }
-
- Message* message = self->container->GetMutableMessage();
- const Reflection* reflection = message->GetReflection();
-
- if (*self->iter ==
- reflection->MapEnd(message, self->container->parent_field_descriptor)) {
- return NULL;
- }
-
- PyObject* ret = MapKeyToPython(self->container->key_field_descriptor,
- self->iter->GetKey());
-
- ++(*self->iter);
-
- return ret;
-}
-
-static void DeallocMapIterator(PyObject* _self) {
- MapIterator* self = GetIter(_self);
- self->iter.reset();
- self->owner.reset();
- Py_XDECREF(self->container);
- Py_TYPE(_self)->tp_free(_self);
-}
-
-PyTypeObject MapIterator_Type = {
- PyVarObject_HEAD_INIT(&PyType_Type, 0)
- FULL_MODULE_NAME ".MapIterator", // tp_name
- sizeof(MapIterator), // tp_basicsize
- 0, // tp_itemsize
- DeallocMapIterator, // tp_dealloc
- 0, // tp_print
- 0, // tp_getattr
- 0, // tp_setattr
- 0, // tp_compare
- 0, // tp_repr
- 0, // tp_as_number
- 0, // tp_as_sequence
- 0, // tp_as_mapping
- 0, // tp_hash
- 0, // tp_call
- 0, // tp_str
- 0, // tp_getattro
- 0, // tp_setattro
- 0, // tp_as_buffer
- Py_TPFLAGS_DEFAULT, // tp_flags
- "A scalar map iterator", // tp_doc
- 0, // tp_traverse
- 0, // tp_clear
- 0, // tp_richcompare
- 0, // tp_weaklistoffset
- PyObject_SelfIter, // tp_iter
- MapReflectionFriend::IterNext, // tp_iternext
- 0, // tp_methods
- 0, // tp_members
- 0, // tp_getset
- 0, // tp_base
- 0, // tp_dict
- 0, // tp_descr_get
- 0, // tp_descr_set
- 0, // tp_dictoffset
- 0, // tp_init
-};
-
-} // namespace python
-} // namespace protobuf
-} // namespace google
diff --git a/third_party/protobuf/python/google/protobuf/pyext/map_container.h b/third_party/protobuf/python/google/protobuf/pyext/map_container.h
deleted file mode 100644
index ddf94be71b..0000000000
--- a/third_party/protobuf/python/google/protobuf/pyext/map_container.h
+++ /dev/null
@@ -1,141 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-#ifndef GOOGLE_PROTOBUF_PYTHON_CPP_MAP_CONTAINER_H__
-#define GOOGLE_PROTOBUF_PYTHON_CPP_MAP_CONTAINER_H__
-
-#include <Python.h>
-
-#include <memory>
-#ifndef _SHARED_PTR_H
-#include <google/protobuf/stubs/shared_ptr.h>
-#endif
-
-#include <google/protobuf/descriptor.h>
-#include <google/protobuf/message.h>
-
-namespace google {
-namespace protobuf {
-
-class Message;
-
-#ifdef _SHARED_PTR_H
-using std::shared_ptr;
-#else
-using internal::shared_ptr;
-#endif
-
-namespace python {
-
-struct CMessage;
-
-// This struct is used directly for ScalarMap, and is the base class of
-// MessageMapContainer, which is used for MessageMap.
-struct MapContainer {
- PyObject_HEAD;
-
- // This is the top-level C++ Message object that owns the whole
- // proto tree. Every Python MapContainer holds a
- // reference to it in order to keep it alive as long as there's a
- // Python object that references any part of the tree.
- shared_ptr<Message> owner;
-
- // Pointer to the C++ Message that contains this container. The
- // MapContainer does not own this pointer.
- const Message* message;
-
- // Use to get a mutable message when necessary.
- Message* GetMutableMessage();
-
- // Weak reference to a parent CMessage object (i.e. may be NULL.)
- //
- // Used to make sure all ancestors are also mutable when first
- // modifying the container.
- CMessage* parent;
-
- // Pointer to the parent's descriptor that describes this
- // field. Used together with the parent's message when making a
- // default message instance mutable.
- // The pointer is owned by the global DescriptorPool.
- const FieldDescriptor* parent_field_descriptor;
- const FieldDescriptor* key_field_descriptor;
- const FieldDescriptor* value_field_descriptor;
-
- // We bump this whenever we perform a mutation, to invalidate existing
- // iterators.
- uint64 version;
-
- // Releases the messages in the container to a new message.
- //
- // Returns 0 on success, -1 on failure.
- int Release();
-
- // Set the owner field of self and any children of self.
- void SetOwner(const shared_ptr<Message>& new_owner) {
- owner = new_owner;
- }
-};
-
-struct MessageMapContainer : public MapContainer {
- // A callable that is used to create new child messages.
- PyObject* subclass_init;
-
- // A dict mapping Message* -> CMessage.
- PyObject* message_dict;
-};
-
-#if PY_MAJOR_VERSION >= 3
- extern PyObject *MessageMapContainer_Type;
- extern PyType_Spec MessageMapContainer_Type_spec;
- extern PyObject *ScalarMapContainer_Type;
- extern PyType_Spec ScalarMapContainer_Type_spec;
-#else
- extern PyTypeObject MessageMapContainer_Type;
- extern PyTypeObject ScalarMapContainer_Type;
-#endif
-
-extern PyTypeObject MapIterator_Type; // Both map types use the same iterator.
-
-// Builds a MapContainer object, from a parent message and a
-// field descriptor.
-extern PyObject* NewScalarMapContainer(
- CMessage* parent, const FieldDescriptor* parent_field_descriptor);
-
-// Builds a MessageMap object, from a parent message and a
-// field descriptor.
-extern PyObject* NewMessageMapContainer(
- CMessage* parent, const FieldDescriptor* parent_field_descriptor,
- PyObject* concrete_class);
-
-} // namespace python
-} // namespace protobuf
-
-} // namespace google
-#endif // GOOGLE_PROTOBUF_PYTHON_CPP_MAP_CONTAINER_H__
diff --git a/third_party/protobuf/python/google/protobuf/pyext/message.cc b/third_party/protobuf/python/google/protobuf/pyext/message.cc
deleted file mode 100644
index 60ec9c1b96..0000000000
--- a/third_party/protobuf/python/google/protobuf/pyext/message.cc
+++ /dev/null
@@ -1,3091 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Author: anuraag@google.com (Anuraag Agrawal)
-// Author: tibell@google.com (Johan Tibell)
-
-#include <google/protobuf/pyext/message.h>
-
-#include <map>
-#include <memory>
-#ifndef _SHARED_PTR_H
-#include <google/protobuf/stubs/shared_ptr.h>
-#endif
-#include <string>
-#include <vector>
-#include <structmember.h> // A Python header file.
-
-#ifndef PyVarObject_HEAD_INIT
-#define PyVarObject_HEAD_INIT(type, size) PyObject_HEAD_INIT(type) size,
-#endif
-#ifndef Py_TYPE
-#define Py_TYPE(ob) (((PyObject*)(ob))->ob_type)
-#endif
-#include <google/protobuf/descriptor.pb.h>
-#include <google/protobuf/stubs/common.h>
-#include <google/protobuf/stubs/logging.h>
-#include <google/protobuf/io/coded_stream.h>
-#include <google/protobuf/util/message_differencer.h>
-#include <google/protobuf/descriptor.h>
-#include <google/protobuf/message.h>
-#include <google/protobuf/text_format.h>
-#include <google/protobuf/unknown_field_set.h>
-#include <google/protobuf/pyext/descriptor.h>
-#include <google/protobuf/pyext/descriptor_pool.h>
-#include <google/protobuf/pyext/extension_dict.h>
-#include <google/protobuf/pyext/repeated_composite_container.h>
-#include <google/protobuf/pyext/repeated_scalar_container.h>
-#include <google/protobuf/pyext/map_container.h>
-#include <google/protobuf/pyext/scoped_pyobject_ptr.h>
-#include <google/protobuf/stubs/strutil.h>
-
-#if PY_MAJOR_VERSION >= 3
- #define PyInt_Check PyLong_Check
- #define PyInt_AsLong PyLong_AsLong
- #define PyInt_FromLong PyLong_FromLong
- #define PyInt_FromSize_t PyLong_FromSize_t
- #define PyString_Check PyUnicode_Check
- #define PyString_FromString PyUnicode_FromString
- #define PyString_FromStringAndSize PyUnicode_FromStringAndSize
- #if PY_VERSION_HEX < 0x03030000
- #error "Python 3.0 - 3.2 are not supported."
- #else
- #define PyString_AsString(ob) \
- (PyUnicode_Check(ob)? PyUnicode_AsUTF8(ob): PyBytes_AsString(ob))
- #define PyString_AsStringAndSize(ob, charpp, sizep) \
- (PyUnicode_Check(ob)? \
- ((*(charpp) = PyUnicode_AsUTF8AndSize(ob, (sizep))) == NULL? -1: 0): \
- PyBytes_AsStringAndSize(ob, (charpp), (sizep)))
- #endif
-#endif
-
-namespace google {
-namespace protobuf {
-namespace python {
-
-static PyObject* kDESCRIPTOR;
-static PyObject* k_extensions_by_name;
-static PyObject* k_extensions_by_number;
-PyObject* EnumTypeWrapper_class;
-static PyObject* PythonMessage_class;
-static PyObject* kEmptyWeakref;
-static PyObject* WKT_classes = NULL;
-
-// Defines the Metaclass of all Message classes.
-// It allows us to cache some C++ pointers in the class object itself, they are
-// faster to extract than from the type's dictionary.
-
-struct PyMessageMeta {
- // This is how CPython subclasses C structures: the base structure must be
- // the first member of the object.
- PyHeapTypeObject super;
-
- // C++ descriptor of this message.
- const Descriptor* message_descriptor;
-
- // Owned reference, used to keep the pointer above alive.
- PyObject* py_message_descriptor;
-
- // The Python DescriptorPool used to create the class. It is needed to resolve
- // fields descriptors, including extensions fields; its C++ MessageFactory is
- // used to instantiate submessages.
- // This can be different from DESCRIPTOR.file.pool, in the case of a custom
- // DescriptorPool which defines new extensions.
- // We own the reference, because it's important to keep the descriptors and
- // factory alive.
- PyDescriptorPool* py_descriptor_pool;
-};
-
-namespace message_meta {
-
-static int InsertEmptyWeakref(PyTypeObject* base);
-
-// Add the number of a field descriptor to the containing message class.
-// Equivalent to:
-// _cls.<field>_FIELD_NUMBER = <number>
-static bool AddFieldNumberToClass(
- PyObject* cls, const FieldDescriptor* field_descriptor) {
- string constant_name = field_descriptor->name() + "_FIELD_NUMBER";
- UpperString(&constant_name);
- ScopedPyObjectPtr attr_name(PyString_FromStringAndSize(
- constant_name.c_str(), constant_name.size()));
- if (attr_name == NULL) {
- return false;
- }
- ScopedPyObjectPtr number(PyInt_FromLong(field_descriptor->number()));
- if (number == NULL) {
- return false;
- }
- if (PyObject_SetAttr(cls, attr_name.get(), number.get()) == -1) {
- return false;
- }
- return true;
-}
-
-
-// Finalize the creation of the Message class.
-static int AddDescriptors(PyObject* cls, const Descriptor* descriptor) {
- // If there are extension_ranges, the message is "extendable", and extension
- // classes will register themselves in this class.
- if (descriptor->extension_range_count() > 0) {
- ScopedPyObjectPtr by_name(PyDict_New());
- if (PyObject_SetAttr(cls, k_extensions_by_name, by_name.get()) < 0) {
- return -1;
- }
- ScopedPyObjectPtr by_number(PyDict_New());
- if (PyObject_SetAttr(cls, k_extensions_by_number, by_number.get()) < 0) {
- return -1;
- }
- }
-
- // For each field set: cls.<field>_FIELD_NUMBER = <number>
- for (int i = 0; i < descriptor->field_count(); ++i) {
- if (!AddFieldNumberToClass(cls, descriptor->field(i))) {
- return -1;
- }
- }
-
- // For each enum set cls.<enum name> = EnumTypeWrapper(<enum descriptor>).
- //
- // The enum descriptor we get from
- // <messagedescriptor>.enum_types_by_name[name]
- // which was built previously.
- for (int i = 0; i < descriptor->enum_type_count(); ++i) {
- const EnumDescriptor* enum_descriptor = descriptor->enum_type(i);
- ScopedPyObjectPtr enum_type(
- PyEnumDescriptor_FromDescriptor(enum_descriptor));
- if (enum_type == NULL) {
- return -1;
- }
- // Add wrapped enum type to message class.
- ScopedPyObjectPtr wrapped(PyObject_CallFunctionObjArgs(
- EnumTypeWrapper_class, enum_type.get(), NULL));
- if (wrapped == NULL) {
- return -1;
- }
- if (PyObject_SetAttrString(
- cls, enum_descriptor->name().c_str(), wrapped.get()) == -1) {
- return -1;
- }
-
- // For each enum value add cls.<name> = <number>
- for (int j = 0; j < enum_descriptor->value_count(); ++j) {
- const EnumValueDescriptor* enum_value_descriptor =
- enum_descriptor->value(j);
- ScopedPyObjectPtr value_number(PyInt_FromLong(
- enum_value_descriptor->number()));
- if (value_number == NULL) {
- return -1;
- }
- if (PyObject_SetAttrString(cls, enum_value_descriptor->name().c_str(),
- value_number.get()) == -1) {
- return -1;
- }
- }
- }
-
- // For each extension set cls.<extension name> = <extension descriptor>.
- //
- // Extension descriptors come from
- // <message descriptor>.extensions_by_name[name]
- // which was defined previously.
- for (int i = 0; i < descriptor->extension_count(); ++i) {
- const google::protobuf::FieldDescriptor* field = descriptor->extension(i);
- ScopedPyObjectPtr extension_field(PyFieldDescriptor_FromDescriptor(field));
- if (extension_field == NULL) {
- return -1;
- }
-
- // Add the extension field to the message class.
- if (PyObject_SetAttrString(
- cls, field->name().c_str(), extension_field.get()) == -1) {
- return -1;
- }
-
- // For each extension set cls.<extension name>_FIELD_NUMBER = <number>.
- if (!AddFieldNumberToClass(cls, field)) {
- return -1;
- }
- }
-
- return 0;
-}
-
-static PyObject* New(PyTypeObject* type,
- PyObject* args, PyObject* kwargs) {
- static char *kwlist[] = {"name", "bases", "dict", 0};
- PyObject *bases, *dict;
- const char* name;
-
- // Check arguments: (name, bases, dict)
- if (!PyArg_ParseTupleAndKeywords(args, kwargs, "sO!O!:type", kwlist,
- &name,
- &PyTuple_Type, &bases,
- &PyDict_Type, &dict)) {
- return NULL;
- }
-
- // Check bases: only (), or (message.Message,) are allowed
- if (!(PyTuple_GET_SIZE(bases) == 0 ||
- (PyTuple_GET_SIZE(bases) == 1 &&
- PyTuple_GET_ITEM(bases, 0) == PythonMessage_class))) {
- PyErr_SetString(PyExc_TypeError,
- "A Message class can only inherit from Message");
- return NULL;
- }
-
- // Check dict['DESCRIPTOR']
- PyObject* py_descriptor = PyDict_GetItem(dict, kDESCRIPTOR);
- if (py_descriptor == NULL) {
- PyErr_SetString(PyExc_TypeError, "Message class has no DESCRIPTOR");
- return NULL;
- }
- if (!PyObject_TypeCheck(py_descriptor, &PyMessageDescriptor_Type)) {
- PyErr_Format(PyExc_TypeError, "Expected a message Descriptor, got %s",
- py_descriptor->ob_type->tp_name);
- return NULL;
- }
-
- // Build the arguments to the base metaclass.
- // We change the __bases__ classes.
- ScopedPyObjectPtr new_args;
- const Descriptor* message_descriptor =
- PyMessageDescriptor_AsDescriptor(py_descriptor);
- if (message_descriptor == NULL) {
- return NULL;
- }
-
- if (WKT_classes == NULL) {
- ScopedPyObjectPtr well_known_types(PyImport_ImportModule(
- "google.protobuf.internal.well_known_types"));
- GOOGLE_DCHECK(well_known_types != NULL);
-
- WKT_classes = PyObject_GetAttrString(well_known_types.get(), "WKTBASES");
- GOOGLE_DCHECK(WKT_classes != NULL);
- }
-
- PyObject* well_known_class = PyDict_GetItemString(
- WKT_classes, message_descriptor->full_name().c_str());
- if (well_known_class == NULL) {
- new_args.reset(Py_BuildValue("s(OO)O", name, &CMessage_Type,
- PythonMessage_class, dict));
- } else {
- new_args.reset(Py_BuildValue("s(OOO)O", name, &CMessage_Type,
- PythonMessage_class, well_known_class, dict));
- }
-
- if (new_args == NULL) {
- return NULL;
- }
- // Call the base metaclass.
- ScopedPyObjectPtr result(PyType_Type.tp_new(type, new_args.get(), NULL));
- if (result == NULL) {
- return NULL;
- }
- PyMessageMeta* newtype = reinterpret_cast<PyMessageMeta*>(result.get());
-
- // Insert the empty weakref into the base classes.
- if (InsertEmptyWeakref(
- reinterpret_cast<PyTypeObject*>(PythonMessage_class)) < 0 ||
- InsertEmptyWeakref(&CMessage_Type) < 0) {
- return NULL;
- }
-
- // Cache the descriptor, both as Python object and as C++ pointer.
- const Descriptor* descriptor =
- PyMessageDescriptor_AsDescriptor(py_descriptor);
- if (descriptor == NULL) {
- return NULL;
- }
- Py_INCREF(py_descriptor);
- newtype->py_message_descriptor = py_descriptor;
- newtype->message_descriptor = descriptor;
- // TODO(amauryfa): Don't always use the canonical pool of the descriptor,
- // use the MessageFactory optionally passed in the class dict.
- newtype->py_descriptor_pool = GetDescriptorPool_FromPool(
- descriptor->file()->pool());
- if (newtype->py_descriptor_pool == NULL) {
- return NULL;
- }
- Py_INCREF(newtype->py_descriptor_pool);
-
- // Add the message to the DescriptorPool.
- if (cdescriptor_pool::RegisterMessageClass(newtype->py_descriptor_pool,
- descriptor, result.get()) < 0) {
- return NULL;
- }
-
- // Continue with type initialization: add other descriptors, enum values...
- if (AddDescriptors(result.get(), descriptor) < 0) {
- return NULL;
- }
- return result.release();
-}
-
-static void Dealloc(PyMessageMeta *self) {
- Py_DECREF(self->py_message_descriptor);
- Py_DECREF(self->py_descriptor_pool);
- Py_TYPE(self)->tp_free(reinterpret_cast<PyObject*>(self));
-}
-
-
-// This function inserts and empty weakref at the end of the list of
-// subclasses for the main protocol buffer Message class.
-//
-// This eliminates a O(n^2) behaviour in the internal add_subclass
-// routine.
-static int InsertEmptyWeakref(PyTypeObject *base_type) {
-#if PY_MAJOR_VERSION >= 3
- // Python 3.4 has already included the fix for the issue that this
- // hack addresses. For further background and the fix please see
- // https://bugs.python.org/issue17936.
- return 0;
-#else
- PyObject *subclasses = base_type->tp_subclasses;
- if (subclasses && PyList_CheckExact(subclasses)) {
- return PyList_Append(subclasses, kEmptyWeakref);
- }
- return 0;
-#endif // PY_MAJOR_VERSION >= 3
-}
-
-} // namespace message_meta
-
-PyTypeObject PyMessageMeta_Type = {
- PyVarObject_HEAD_INIT(&PyType_Type, 0)
- FULL_MODULE_NAME ".MessageMeta", // tp_name
- sizeof(PyMessageMeta), // tp_basicsize
- 0, // tp_itemsize
- (destructor)message_meta::Dealloc, // tp_dealloc
- 0, // tp_print
- 0, // tp_getattr
- 0, // tp_setattr
- 0, // tp_compare
- 0, // tp_repr
- 0, // tp_as_number
- 0, // tp_as_sequence
- 0, // tp_as_mapping
- 0, // tp_hash
- 0, // tp_call
- 0, // tp_str
- 0, // tp_getattro
- 0, // tp_setattro
- 0, // tp_as_buffer
- Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, // tp_flags
- "The metaclass of ProtocolMessages", // tp_doc
- 0, // tp_traverse
- 0, // tp_clear
- 0, // tp_richcompare
- 0, // tp_weaklistoffset
- 0, // tp_iter
- 0, // tp_iternext
- 0, // tp_methods
- 0, // tp_members
- 0, // tp_getset
- 0, // tp_base
- 0, // tp_dict
- 0, // tp_descr_get
- 0, // tp_descr_set
- 0, // tp_dictoffset
- 0, // tp_init
- 0, // tp_alloc
- message_meta::New, // tp_new
-};
-
-static PyMessageMeta* CheckMessageClass(PyTypeObject* cls) {
- if (!PyObject_TypeCheck(cls, &PyMessageMeta_Type)) {
- PyErr_Format(PyExc_TypeError, "Class %s is not a Message", cls->tp_name);
- return NULL;
- }
- return reinterpret_cast<PyMessageMeta*>(cls);
-}
-
-static const Descriptor* GetMessageDescriptor(PyTypeObject* cls) {
- PyMessageMeta* type = CheckMessageClass(cls);
- if (type == NULL) {
- return NULL;
- }
- return type->message_descriptor;
-}
-
-// Forward declarations
-namespace cmessage {
-int InternalReleaseFieldByDescriptor(
- CMessage* self,
- const FieldDescriptor* field_descriptor,
- PyObject* composite_field);
-} // namespace cmessage
-
-// ---------------------------------------------------------------------
-// Visiting the composite children of a CMessage
-
-struct ChildVisitor {
- // Returns 0 on success, -1 on failure.
- int VisitRepeatedCompositeContainer(RepeatedCompositeContainer* container) {
- return 0;
- }
-
- // Returns 0 on success, -1 on failure.
- int VisitRepeatedScalarContainer(RepeatedScalarContainer* container) {
- return 0;
- }
-
- // Returns 0 on success, -1 on failure.
- int VisitCMessage(CMessage* cmessage,
- const FieldDescriptor* field_descriptor) {
- return 0;
- }
-};
-
-// Apply a function to a composite field. Does nothing if child is of
-// non-composite type.
-template<class Visitor>
-static int VisitCompositeField(const FieldDescriptor* descriptor,
- PyObject* child,
- Visitor visitor) {
- if (descriptor->label() == FieldDescriptor::LABEL_REPEATED) {
- if (descriptor->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE) {
- if (descriptor->is_map()) {
- MapContainer* container = reinterpret_cast<MapContainer*>(child);
- if (visitor.VisitMapContainer(container) == -1) {
- return -1;
- }
- } else {
- RepeatedCompositeContainer* container =
- reinterpret_cast<RepeatedCompositeContainer*>(child);
- if (visitor.VisitRepeatedCompositeContainer(container) == -1)
- return -1;
- }
- } else {
- RepeatedScalarContainer* container =
- reinterpret_cast<RepeatedScalarContainer*>(child);
- if (visitor.VisitRepeatedScalarContainer(container) == -1)
- return -1;
- }
- } else if (descriptor->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE) {
- CMessage* cmsg = reinterpret_cast<CMessage*>(child);
- if (visitor.VisitCMessage(cmsg, descriptor) == -1)
- return -1;
- }
- // The ExtensionDict might contain non-composite fields, which we
- // skip here.
- return 0;
-}
-
-// Visit each composite field and extension field of this CMessage.
-// Returns -1 on error and 0 on success.
-template<class Visitor>
-int ForEachCompositeField(CMessage* self, Visitor visitor) {
- Py_ssize_t pos = 0;
- PyObject* key;
- PyObject* field;
-
- // Visit normal fields.
- if (self->composite_fields) {
- // Never use self->message in this function, it may be already freed.
- const Descriptor* message_descriptor =
- GetMessageDescriptor(Py_TYPE(self));
- while (PyDict_Next(self->composite_fields, &pos, &key, &field)) {
- Py_ssize_t key_str_size;
- char *key_str_data;
- if (PyString_AsStringAndSize(key, &key_str_data, &key_str_size) != 0)
- return -1;
- const string key_str(key_str_data, key_str_size);
- const FieldDescriptor* descriptor =
- message_descriptor->FindFieldByName(key_str);
- if (descriptor != NULL) {
- if (VisitCompositeField(descriptor, field, visitor) == -1)
- return -1;
- }
- }
- }
-
- // Visit extension fields.
- if (self->extensions != NULL) {
- pos = 0;
- while (PyDict_Next(self->extensions->values, &pos, &key, &field)) {
- const FieldDescriptor* descriptor = cmessage::GetExtensionDescriptor(key);
- if (descriptor == NULL)
- return -1;
- if (VisitCompositeField(descriptor, field, visitor) == -1)
- return -1;
- }
- }
-
- return 0;
-}
-
-// ---------------------------------------------------------------------
-
-// Constants used for integer type range checking.
-PyObject* kPythonZero;
-PyObject* kint32min_py;
-PyObject* kint32max_py;
-PyObject* kuint32max_py;
-PyObject* kint64min_py;
-PyObject* kint64max_py;
-PyObject* kuint64max_py;
-
-PyObject* EncodeError_class;
-PyObject* DecodeError_class;
-PyObject* PickleError_class;
-
-// Constant PyString values used for GetAttr/GetItem.
-static PyObject* k_cdescriptor;
-static PyObject* kfull_name;
-
-/* Is 64bit */
-void FormatTypeError(PyObject* arg, char* expected_types) {
- PyObject* repr = PyObject_Repr(arg);
- if (repr) {
- PyErr_Format(PyExc_TypeError,
- "%.100s has type %.100s, but expected one of: %s",
- PyString_AsString(repr),
- Py_TYPE(arg)->tp_name,
- expected_types);
- Py_DECREF(repr);
- }
-}
-
-template<class T>
-bool CheckAndGetInteger(
- PyObject* arg, T* value, PyObject* min, PyObject* max) {
- bool is_long = PyLong_Check(arg);
-#if PY_MAJOR_VERSION < 3
- if (!PyInt_Check(arg) && !is_long) {
- FormatTypeError(arg, "int, long");
- return false;
- }
- if (PyObject_Compare(min, arg) > 0 || PyObject_Compare(max, arg) < 0) {
-#else
- if (!is_long) {
- FormatTypeError(arg, "int");
- return false;
- }
- if (PyObject_RichCompareBool(min, arg, Py_LE) != 1 ||
- PyObject_RichCompareBool(max, arg, Py_GE) != 1) {
-#endif
- if (!PyErr_Occurred()) {
- PyObject *s = PyObject_Str(arg);
- if (s) {
- PyErr_Format(PyExc_ValueError,
- "Value out of range: %s",
- PyString_AsString(s));
- Py_DECREF(s);
- }
- }
- return false;
- }
-#if PY_MAJOR_VERSION < 3
- if (!is_long) {
- *value = static_cast<T>(PyInt_AsLong(arg));
- } else // NOLINT
-#endif
- {
- if (min == kPythonZero) {
- *value = static_cast<T>(PyLong_AsUnsignedLongLong(arg));
- } else {
- *value = static_cast<T>(PyLong_AsLongLong(arg));
- }
- }
- return true;
-}
-
-// These are referenced by repeated_scalar_container, and must
-// be explicitly instantiated.
-template bool CheckAndGetInteger<int32>(
- PyObject*, int32*, PyObject*, PyObject*);
-template bool CheckAndGetInteger<int64>(
- PyObject*, int64*, PyObject*, PyObject*);
-template bool CheckAndGetInteger<uint32>(
- PyObject*, uint32*, PyObject*, PyObject*);
-template bool CheckAndGetInteger<uint64>(
- PyObject*, uint64*, PyObject*, PyObject*);
-
-bool CheckAndGetDouble(PyObject* arg, double* value) {
- if (!PyInt_Check(arg) && !PyLong_Check(arg) &&
- !PyFloat_Check(arg)) {
- FormatTypeError(arg, "int, long, float");
- return false;
- }
- *value = PyFloat_AsDouble(arg);
- return true;
-}
-
-bool CheckAndGetFloat(PyObject* arg, float* value) {
- double double_value;
- if (!CheckAndGetDouble(arg, &double_value)) {
- return false;
- }
- *value = static_cast<float>(double_value);
- return true;
-}
-
-bool CheckAndGetBool(PyObject* arg, bool* value) {
- if (!PyInt_Check(arg) && !PyBool_Check(arg) && !PyLong_Check(arg)) {
- FormatTypeError(arg, "int, long, bool");
- return false;
- }
- *value = static_cast<bool>(PyInt_AsLong(arg));
- return true;
-}
-
-// Checks whether the given object (which must be "bytes" or "unicode") contains
-// valid UTF-8.
-bool IsValidUTF8(PyObject* obj) {
- if (PyBytes_Check(obj)) {
- PyObject* unicode = PyUnicode_FromEncodedObject(obj, "utf-8", NULL);
-
- // Clear the error indicator; we report our own error when desired.
- PyErr_Clear();
-
- if (unicode) {
- Py_DECREF(unicode);
- return true;
- } else {
- return false;
- }
- } else {
- // Unicode object, known to be valid UTF-8.
- return true;
- }
-}
-
-bool AllowInvalidUTF8(const FieldDescriptor* field) { return false; }
-
-PyObject* CheckString(PyObject* arg, const FieldDescriptor* descriptor) {
- GOOGLE_DCHECK(descriptor->type() == FieldDescriptor::TYPE_STRING ||
- descriptor->type() == FieldDescriptor::TYPE_BYTES);
- if (descriptor->type() == FieldDescriptor::TYPE_STRING) {
- if (!PyBytes_Check(arg) && !PyUnicode_Check(arg)) {
- FormatTypeError(arg, "bytes, unicode");
- return NULL;
- }
-
- if (!IsValidUTF8(arg) && !AllowInvalidUTF8(descriptor)) {
- PyObject* repr = PyObject_Repr(arg);
- PyErr_Format(PyExc_ValueError,
- "%s has type str, but isn't valid UTF-8 "
- "encoding. Non-UTF-8 strings must be converted to "
- "unicode objects before being added.",
- PyString_AsString(repr));
- Py_DECREF(repr);
- return NULL;
- }
- } else if (!PyBytes_Check(arg)) {
- FormatTypeError(arg, "bytes");
- return NULL;
- }
-
- PyObject* encoded_string = NULL;
- if (descriptor->type() == FieldDescriptor::TYPE_STRING) {
- if (PyBytes_Check(arg)) {
- // The bytes were already validated as correctly encoded UTF-8 above.
- encoded_string = arg; // Already encoded.
- Py_INCREF(encoded_string);
- } else {
- encoded_string = PyUnicode_AsEncodedObject(arg, "utf-8", NULL);
- }
- } else {
- // In this case field type is "bytes".
- encoded_string = arg;
- Py_INCREF(encoded_string);
- }
-
- return encoded_string;
-}
-
-bool CheckAndSetString(
- PyObject* arg, Message* message,
- const FieldDescriptor* descriptor,
- const Reflection* reflection,
- bool append,
- int index) {
- ScopedPyObjectPtr encoded_string(CheckString(arg, descriptor));
-
- if (encoded_string.get() == NULL) {
- return false;
- }
-
- char* value;
- Py_ssize_t value_len;
- if (PyBytes_AsStringAndSize(encoded_string.get(), &value, &value_len) < 0) {
- return false;
- }
-
- string value_string(value, value_len);
- if (append) {
- reflection->AddString(message, descriptor, value_string);
- } else if (index < 0) {
- reflection->SetString(message, descriptor, value_string);
- } else {
- reflection->SetRepeatedString(message, descriptor, index, value_string);
- }
- return true;
-}
-
-PyObject* ToStringObject(const FieldDescriptor* descriptor, string value) {
- if (descriptor->type() != FieldDescriptor::TYPE_STRING) {
- return PyBytes_FromStringAndSize(value.c_str(), value.length());
- }
-
- PyObject* result = PyUnicode_DecodeUTF8(value.c_str(), value.length(), NULL);
- // If the string can't be decoded in UTF-8, just return a string object that
- // contains the raw bytes. This can't happen if the value was assigned using
- // the members of the Python message object, but can happen if the values were
- // parsed from the wire (binary).
- if (result == NULL) {
- PyErr_Clear();
- result = PyBytes_FromStringAndSize(value.c_str(), value.length());
- }
- return result;
-}
-
-bool CheckFieldBelongsToMessage(const FieldDescriptor* field_descriptor,
- const Message* message) {
- if (message->GetDescriptor() == field_descriptor->containing_type()) {
- return true;
- }
- PyErr_Format(PyExc_KeyError, "Field '%s' does not belong to message '%s'",
- field_descriptor->full_name().c_str(),
- message->GetDescriptor()->full_name().c_str());
- return false;
-}
-
-namespace cmessage {
-
-PyDescriptorPool* GetDescriptorPoolForMessage(CMessage* message) {
- // No need to check the type: the type of instances of CMessage is always
- // an instance of PyMessageMeta. Let's prove it with a debug-only check.
- GOOGLE_DCHECK(PyObject_TypeCheck(message, &CMessage_Type));
- return reinterpret_cast<PyMessageMeta*>(Py_TYPE(message))->py_descriptor_pool;
-}
-
-MessageFactory* GetFactoryForMessage(CMessage* message) {
- return GetDescriptorPoolForMessage(message)->message_factory;
-}
-
-static int MaybeReleaseOverlappingOneofField(
- CMessage* cmessage,
- const FieldDescriptor* field) {
-#ifdef GOOGLE_PROTOBUF_HAS_ONEOF
- Message* message = cmessage->message;
- const Reflection* reflection = message->GetReflection();
- if (!field->containing_oneof() ||
- !reflection->HasOneof(*message, field->containing_oneof()) ||
- reflection->HasField(*message, field)) {
- // No other field in this oneof, no need to release.
- return 0;
- }
-
- const OneofDescriptor* oneof = field->containing_oneof();
- const FieldDescriptor* existing_field =
- reflection->GetOneofFieldDescriptor(*message, oneof);
- if (existing_field->cpp_type() != FieldDescriptor::CPPTYPE_MESSAGE) {
- // Non-message fields don't need to be released.
- return 0;
- }
- const char* field_name = existing_field->name().c_str();
- PyObject* child_message = cmessage->composite_fields ?
- PyDict_GetItemString(cmessage->composite_fields, field_name) : NULL;
- if (child_message == NULL) {
- // No python reference to this field so no need to release.
- return 0;
- }
-
- if (InternalReleaseFieldByDescriptor(
- cmessage, existing_field, child_message) < 0) {
- return -1;
- }
- return PyDict_DelItemString(cmessage->composite_fields, field_name);
-#else
- return 0;
-#endif
-}
-
-// ---------------------------------------------------------------------
-// Making a message writable
-
-static Message* GetMutableMessage(
- CMessage* parent,
- const FieldDescriptor* parent_field) {
- Message* parent_message = parent->message;
- const Reflection* reflection = parent_message->GetReflection();
- if (MaybeReleaseOverlappingOneofField(parent, parent_field) < 0) {
- return NULL;
- }
- return reflection->MutableMessage(
- parent_message, parent_field, GetFactoryForMessage(parent));
-}
-
-struct FixupMessageReference : public ChildVisitor {
- // message must outlive this object.
- explicit FixupMessageReference(Message* message) :
- message_(message) {}
-
- int VisitRepeatedCompositeContainer(RepeatedCompositeContainer* container) {
- container->message = message_;
- return 0;
- }
-
- int VisitRepeatedScalarContainer(RepeatedScalarContainer* container) {
- container->message = message_;
- return 0;
- }
-
- int VisitMapContainer(MapContainer* container) {
- container->message = message_;
- return 0;
- }
-
- private:
- Message* message_;
-};
-
-int AssureWritable(CMessage* self) {
- if (self == NULL || !self->read_only) {
- return 0;
- }
-
- if (self->parent == NULL) {
- // If parent is NULL but we are trying to modify a read-only message, this
- // is a reference to a constant default instance that needs to be replaced
- // with a mutable top-level message.
- self->message = self->message->New();
- self->owner.reset(self->message);
- // Cascade the new owner to eventual children: even if this message is
- // empty, some submessages or repeated containers might exist already.
- SetOwner(self, self->owner);
- } else {
- // Otherwise, we need a mutable child message.
- if (AssureWritable(self->parent) == -1)
- return -1;
-
- // Make self->message writable.
- Message* mutable_message = GetMutableMessage(
- self->parent,
- self->parent_field_descriptor);
- if (mutable_message == NULL) {
- return -1;
- }
- self->message = mutable_message;
- }
- self->read_only = false;
-
- // When a CMessage is made writable its Message pointer is updated
- // to point to a new mutable Message. When that happens we need to
- // update any references to the old, read-only CMessage. There are
- // four places such references occur: RepeatedScalarContainer,
- // RepeatedCompositeContainer, MapContainer, and ExtensionDict.
- if (self->extensions != NULL)
- self->extensions->message = self->message;
- if (ForEachCompositeField(self, FixupMessageReference(self->message)) == -1)
- return -1;
-
- return 0;
-}
-
-// --- Globals:
-
-// Retrieve a C++ FieldDescriptor for a message attribute.
-// The C++ message must be valid.
-// TODO(amauryfa): This function should stay internal, because exception
-// handling is not consistent.
-static const FieldDescriptor* GetFieldDescriptor(
- CMessage* self, PyObject* name) {
- const Descriptor *message_descriptor = self->message->GetDescriptor();
- char* field_name;
- Py_ssize_t size;
- if (PyString_AsStringAndSize(name, &field_name, &size) < 0) {
- return NULL;
- }
- const FieldDescriptor *field_descriptor =
- message_descriptor->FindFieldByName(string(field_name, size));
- if (field_descriptor == NULL) {
- // Note: No exception is set!
- return NULL;
- }
- return field_descriptor;
-}
-
-// Retrieve a C++ FieldDescriptor for an extension handle.
-const FieldDescriptor* GetExtensionDescriptor(PyObject* extension) {
- ScopedPyObjectPtr cdescriptor;
- if (!PyObject_TypeCheck(extension, &PyFieldDescriptor_Type)) {
- // Most callers consider extensions as a plain dictionary. We should
- // allow input which is not a field descriptor, and simply pretend it does
- // not exist.
- PyErr_SetObject(PyExc_KeyError, extension);
- return NULL;
- }
- return PyFieldDescriptor_AsDescriptor(extension);
-}
-
-// If value is a string, convert it into an enum value based on the labels in
-// descriptor, otherwise simply return value. Always returns a new reference.
-static PyObject* GetIntegerEnumValue(const FieldDescriptor& descriptor,
- PyObject* value) {
- if (PyString_Check(value) || PyUnicode_Check(value)) {
- const EnumDescriptor* enum_descriptor = descriptor.enum_type();
- if (enum_descriptor == NULL) {
- PyErr_SetString(PyExc_TypeError, "not an enum field");
- return NULL;
- }
- char* enum_label;
- Py_ssize_t size;
- if (PyString_AsStringAndSize(value, &enum_label, &size) < 0) {
- return NULL;
- }
- const EnumValueDescriptor* enum_value_descriptor =
- enum_descriptor->FindValueByName(string(enum_label, size));
- if (enum_value_descriptor == NULL) {
- PyErr_SetString(PyExc_ValueError, "unknown enum label");
- return NULL;
- }
- return PyInt_FromLong(enum_value_descriptor->number());
- }
- Py_INCREF(value);
- return value;
-}
-
-// If cmessage_list is not NULL, this function releases values into the
-// container CMessages instead of just removing. Repeated composite container
-// needs to do this to make sure CMessages stay alive if they're still
-// referenced after deletion. Repeated scalar container doesn't need to worry.
-int InternalDeleteRepeatedField(
- CMessage* self,
- const FieldDescriptor* field_descriptor,
- PyObject* slice,
- PyObject* cmessage_list) {
- Message* message = self->message;
- Py_ssize_t length, from, to, step, slice_length;
- const Reflection* reflection = message->GetReflection();
- int min, max;
- length = reflection->FieldSize(*message, field_descriptor);
-
- if (PyInt_Check(slice) || PyLong_Check(slice)) {
- from = to = PyLong_AsLong(slice);
- if (from < 0) {
- from = to = length + from;
- }
- step = 1;
- min = max = from;
-
- // Range check.
- if (from < 0 || from >= length) {
- PyErr_Format(PyExc_IndexError, "list assignment index out of range");
- return -1;
- }
- } else if (PySlice_Check(slice)) {
- from = to = step = slice_length = 0;
- PySlice_GetIndicesEx(
-#if PY_MAJOR_VERSION < 3
- reinterpret_cast<PySliceObject*>(slice),
-#else
- slice,
-#endif
- length, &from, &to, &step, &slice_length);
- if (from < to) {
- min = from;
- max = to - 1;
- } else {
- min = to + 1;
- max = from;
- }
- } else {
- PyErr_SetString(PyExc_TypeError, "list indices must be integers");
- return -1;
- }
-
- Py_ssize_t i = from;
- std::vector<bool> to_delete(length, false);
- while (i >= min && i <= max) {
- to_delete[i] = true;
- i += step;
- }
-
- to = 0;
- for (i = 0; i < length; ++i) {
- if (!to_delete[i]) {
- if (i != to) {
- reflection->SwapElements(message, field_descriptor, i, to);
- if (cmessage_list != NULL) {
- // If a list of cmessages is passed in (i.e. from a repeated
- // composite container), swap those as well to correspond to the
- // swaps in the underlying message so they're in the right order
- // when we start releasing.
- PyObject* tmp = PyList_GET_ITEM(cmessage_list, i);
- PyList_SET_ITEM(cmessage_list, i,
- PyList_GET_ITEM(cmessage_list, to));
- PyList_SET_ITEM(cmessage_list, to, tmp);
- }
- }
- ++to;
- }
- }
-
- while (i > to) {
- if (cmessage_list == NULL) {
- reflection->RemoveLast(message, field_descriptor);
- } else {
- CMessage* last_cmessage = reinterpret_cast<CMessage*>(
- PyList_GET_ITEM(cmessage_list, PyList_GET_SIZE(cmessage_list) - 1));
- repeated_composite_container::ReleaseLastTo(
- self, field_descriptor, last_cmessage);
- if (PySequence_DelItem(cmessage_list, -1) < 0) {
- return -1;
- }
- }
- --i;
- }
-
- return 0;
-}
-
-// Initializes fields of a message. Used in constructors.
-int InitAttributes(CMessage* self, PyObject* kwargs) {
- if (kwargs == NULL) {
- return 0;
- }
-
- Py_ssize_t pos = 0;
- PyObject* name;
- PyObject* value;
- while (PyDict_Next(kwargs, &pos, &name, &value)) {
- if (!PyString_Check(name)) {
- PyErr_SetString(PyExc_ValueError, "Field name must be a string");
- return -1;
- }
- const FieldDescriptor* descriptor = GetFieldDescriptor(self, name);
- if (descriptor == NULL) {
- PyErr_Format(PyExc_ValueError, "Protocol message %s has no \"%s\" field.",
- self->message->GetDescriptor()->name().c_str(),
- PyString_AsString(name));
- return -1;
- }
- if (descriptor->is_map()) {
- ScopedPyObjectPtr map(GetAttr(self, name));
- const FieldDescriptor* value_descriptor =
- descriptor->message_type()->FindFieldByName("value");
- if (value_descriptor->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE) {
- Py_ssize_t map_pos = 0;
- PyObject* map_key;
- PyObject* map_value;
- while (PyDict_Next(value, &map_pos, &map_key, &map_value)) {
- ScopedPyObjectPtr function_return;
- function_return.reset(PyObject_GetItem(map.get(), map_key));
- if (function_return.get() == NULL) {
- return -1;
- }
- ScopedPyObjectPtr ok(PyObject_CallMethod(
- function_return.get(), "MergeFrom", "O", map_value));
- if (ok.get() == NULL) {
- return -1;
- }
- }
- } else {
- ScopedPyObjectPtr function_return;
- function_return.reset(
- PyObject_CallMethod(map.get(), "update", "O", value));
- if (function_return.get() == NULL) {
- return -1;
- }
- }
- } else if (descriptor->label() == FieldDescriptor::LABEL_REPEATED) {
- ScopedPyObjectPtr container(GetAttr(self, name));
- if (container == NULL) {
- return -1;
- }
- if (descriptor->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE) {
- RepeatedCompositeContainer* rc_container =
- reinterpret_cast<RepeatedCompositeContainer*>(container.get());
- ScopedPyObjectPtr iter(PyObject_GetIter(value));
- if (iter == NULL) {
- PyErr_SetString(PyExc_TypeError, "Value must be iterable");
- return -1;
- }
- ScopedPyObjectPtr next;
- while ((next.reset(PyIter_Next(iter.get()))) != NULL) {
- PyObject* kwargs = (PyDict_Check(next.get()) ? next.get() : NULL);
- ScopedPyObjectPtr new_msg(
- repeated_composite_container::Add(rc_container, NULL, kwargs));
- if (new_msg == NULL) {
- return -1;
- }
- if (kwargs == NULL) {
- // next was not a dict, it's a message we need to merge
- ScopedPyObjectPtr merged(MergeFrom(
- reinterpret_cast<CMessage*>(new_msg.get()), next.get()));
- if (merged.get() == NULL) {
- return -1;
- }
- }
- }
- if (PyErr_Occurred()) {
- // Check to see how PyIter_Next() exited.
- return -1;
- }
- } else if (descriptor->cpp_type() == FieldDescriptor::CPPTYPE_ENUM) {
- RepeatedScalarContainer* rs_container =
- reinterpret_cast<RepeatedScalarContainer*>(container.get());
- ScopedPyObjectPtr iter(PyObject_GetIter(value));
- if (iter == NULL) {
- PyErr_SetString(PyExc_TypeError, "Value must be iterable");
- return -1;
- }
- ScopedPyObjectPtr next;
- while ((next.reset(PyIter_Next(iter.get()))) != NULL) {
- ScopedPyObjectPtr enum_value(
- GetIntegerEnumValue(*descriptor, next.get()));
- if (enum_value == NULL) {
- return -1;
- }
- ScopedPyObjectPtr new_msg(repeated_scalar_container::Append(
- rs_container, enum_value.get()));
- if (new_msg == NULL) {
- return -1;
- }
- }
- if (PyErr_Occurred()) {
- // Check to see how PyIter_Next() exited.
- return -1;
- }
- } else {
- if (ScopedPyObjectPtr(repeated_scalar_container::Extend(
- reinterpret_cast<RepeatedScalarContainer*>(container.get()),
- value)) ==
- NULL) {
- return -1;
- }
- }
- } else if (descriptor->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE) {
- ScopedPyObjectPtr message(GetAttr(self, name));
- if (message == NULL) {
- return -1;
- }
- CMessage* cmessage = reinterpret_cast<CMessage*>(message.get());
- if (PyDict_Check(value)) {
- if (InitAttributes(cmessage, value) < 0) {
- return -1;
- }
- } else {
- ScopedPyObjectPtr merged(MergeFrom(cmessage, value));
- if (merged == NULL) {
- return -1;
- }
- }
- } else {
- ScopedPyObjectPtr new_val;
- if (descriptor->cpp_type() == FieldDescriptor::CPPTYPE_ENUM) {
- new_val.reset(GetIntegerEnumValue(*descriptor, value));
- if (new_val == NULL) {
- return -1;
- }
- }
- if (SetAttr(self, name, (new_val.get() == NULL) ? value : new_val.get()) <
- 0) {
- return -1;
- }
- }
- }
- return 0;
-}
-
-// Allocates an incomplete Python Message: the caller must fill self->message,
-// self->owner and eventually self->parent.
-CMessage* NewEmptyMessage(PyObject* type, const Descriptor *descriptor) {
- CMessage* self = reinterpret_cast<CMessage*>(
- PyType_GenericAlloc(reinterpret_cast<PyTypeObject*>(type), 0));
- if (self == NULL) {
- return NULL;
- }
-
- self->message = NULL;
- self->parent = NULL;
- self->parent_field_descriptor = NULL;
- self->read_only = false;
- self->extensions = NULL;
-
- self->composite_fields = NULL;
-
- return self;
-}
-
-// The __new__ method of Message classes.
-// Creates a new C++ message and takes ownership.
-static PyObject* New(PyTypeObject* cls,
- PyObject* unused_args, PyObject* unused_kwargs) {
- PyMessageMeta* type = CheckMessageClass(cls);
- if (type == NULL) {
- return NULL;
- }
- // Retrieve the message descriptor and the default instance (=prototype).
- const Descriptor* message_descriptor = type->message_descriptor;
- if (message_descriptor == NULL) {
- return NULL;
- }
- const Message* default_message = type->py_descriptor_pool->message_factory
- ->GetPrototype(message_descriptor);
- if (default_message == NULL) {
- PyErr_SetString(PyExc_TypeError, message_descriptor->full_name().c_str());
- return NULL;
- }
-
- CMessage* self = NewEmptyMessage(reinterpret_cast<PyObject*>(type),
- message_descriptor);
- if (self == NULL) {
- return NULL;
- }
- self->message = default_message->New();
- self->owner.reset(self->message);
- return reinterpret_cast<PyObject*>(self);
-}
-
-// The __init__ method of Message classes.
-// It initializes fields from keywords passed to the constructor.
-static int Init(CMessage* self, PyObject* args, PyObject* kwargs) {
- if (PyTuple_Size(args) != 0) {
- PyErr_SetString(PyExc_TypeError, "No positional arguments allowed");
- return -1;
- }
-
- return InitAttributes(self, kwargs);
-}
-
-// ---------------------------------------------------------------------
-// Deallocating a CMessage
-//
-// Deallocating a CMessage requires that we clear any weak references
-// from children to the message being deallocated.
-
-// Clear the weak reference from the child to the parent.
-struct ClearWeakReferences : public ChildVisitor {
- int VisitRepeatedCompositeContainer(RepeatedCompositeContainer* container) {
- container->parent = NULL;
- // The elements in the container have the same parent as the
- // container itself, so NULL out that pointer as well.
- const Py_ssize_t n = PyList_GET_SIZE(container->child_messages);
- for (Py_ssize_t i = 0; i < n; ++i) {
- CMessage* child_cmessage = reinterpret_cast<CMessage*>(
- PyList_GET_ITEM(container->child_messages, i));
- child_cmessage->parent = NULL;
- }
- return 0;
- }
-
- int VisitRepeatedScalarContainer(RepeatedScalarContainer* container) {
- container->parent = NULL;
- return 0;
- }
-
- int VisitMapContainer(MapContainer* container) {
- container->parent = NULL;
- return 0;
- }
-
- int VisitCMessage(CMessage* cmessage,
- const FieldDescriptor* field_descriptor) {
- cmessage->parent = NULL;
- return 0;
- }
-};
-
-static void Dealloc(CMessage* self) {
- // Null out all weak references from children to this message.
- GOOGLE_CHECK_EQ(0, ForEachCompositeField(self, ClearWeakReferences()));
- if (self->extensions) {
- self->extensions->parent = NULL;
- }
-
- Py_CLEAR(self->extensions);
- Py_CLEAR(self->composite_fields);
- self->owner.reset();
- Py_TYPE(self)->tp_free(reinterpret_cast<PyObject*>(self));
-}
-
-// ---------------------------------------------------------------------
-
-
-PyObject* IsInitialized(CMessage* self, PyObject* args) {
- PyObject* errors = NULL;
- if (PyArg_ParseTuple(args, "|O", &errors) < 0) {
- return NULL;
- }
- if (self->message->IsInitialized()) {
- Py_RETURN_TRUE;
- }
- if (errors != NULL) {
- ScopedPyObjectPtr initialization_errors(
- FindInitializationErrors(self));
- if (initialization_errors == NULL) {
- return NULL;
- }
- ScopedPyObjectPtr extend_name(PyString_FromString("extend"));
- if (extend_name == NULL) {
- return NULL;
- }
- ScopedPyObjectPtr result(PyObject_CallMethodObjArgs(
- errors,
- extend_name.get(),
- initialization_errors.get(),
- NULL));
- if (result == NULL) {
- return NULL;
- }
- }
- Py_RETURN_FALSE;
-}
-
-PyObject* HasFieldByDescriptor(
- CMessage* self, const FieldDescriptor* field_descriptor) {
- Message* message = self->message;
- if (!CheckFieldBelongsToMessage(field_descriptor, message)) {
- return NULL;
- }
- if (field_descriptor->label() == FieldDescriptor::LABEL_REPEATED) {
- PyErr_SetString(PyExc_KeyError,
- "Field is repeated. A singular method is required.");
- return NULL;
- }
- bool has_field =
- message->GetReflection()->HasField(*message, field_descriptor);
- return PyBool_FromLong(has_field ? 1 : 0);
-}
-
-const FieldDescriptor* FindFieldWithOneofs(
- const Message* message, const string& field_name, bool* in_oneof) {
- *in_oneof = false;
- const Descriptor* descriptor = message->GetDescriptor();
- const FieldDescriptor* field_descriptor =
- descriptor->FindFieldByName(field_name);
- if (field_descriptor != NULL) {
- return field_descriptor;
- }
- const OneofDescriptor* oneof_desc =
- descriptor->FindOneofByName(field_name);
- if (oneof_desc != NULL) {
- *in_oneof = true;
- return message->GetReflection()->GetOneofFieldDescriptor(*message,
- oneof_desc);
- }
- return NULL;
-}
-
-bool CheckHasPresence(const FieldDescriptor* field_descriptor, bool in_oneof) {
- if (field_descriptor->label() == FieldDescriptor::LABEL_REPEATED) {
- PyErr_Format(PyExc_ValueError,
- "Protocol message has no singular \"%s\" field.",
- field_descriptor->name().c_str());
- return false;
- }
-
- if (field_descriptor->file()->syntax() == FileDescriptor::SYNTAX_PROTO3) {
- // HasField() for a oneof *itself* isn't supported.
- if (in_oneof) {
- PyErr_Format(PyExc_ValueError,
- "Can't test oneof field \"%s\" for presence in proto3, use "
- "WhichOneof instead.",
- field_descriptor->containing_oneof()->name().c_str());
- return false;
- }
-
- // ...but HasField() for fields *in* a oneof is supported.
- if (field_descriptor->containing_oneof() != NULL) {
- return true;
- }
-
- if (field_descriptor->cpp_type() != FieldDescriptor::CPPTYPE_MESSAGE) {
- PyErr_Format(
- PyExc_ValueError,
- "Can't test non-submessage field \"%s\" for presence in proto3.",
- field_descriptor->name().c_str());
- return false;
- }
- }
-
- return true;
-}
-
-PyObject* HasField(CMessage* self, PyObject* arg) {
- char* field_name;
- Py_ssize_t size;
-#if PY_MAJOR_VERSION < 3
- if (PyString_AsStringAndSize(arg, &field_name, &size) < 0) {
- return NULL;
- }
-#else
- field_name = PyUnicode_AsUTF8AndSize(arg, &size);
- if (!field_name) {
- return NULL;
- }
-#endif
-
- Message* message = self->message;
- bool is_in_oneof;
- const FieldDescriptor* field_descriptor =
- FindFieldWithOneofs(message, string(field_name, size), &is_in_oneof);
- if (field_descriptor == NULL) {
- if (!is_in_oneof) {
- PyErr_Format(PyExc_ValueError, "Unknown field %s.", field_name);
- return NULL;
- } else {
- Py_RETURN_FALSE;
- }
- }
-
- if (!CheckHasPresence(field_descriptor, is_in_oneof)) {
- return NULL;
- }
-
- if (message->GetReflection()->HasField(*message, field_descriptor)) {
- Py_RETURN_TRUE;
- }
- if (!message->GetReflection()->SupportsUnknownEnumValues() &&
- field_descriptor->cpp_type() == FieldDescriptor::CPPTYPE_ENUM) {
- // Special case: Python HasField() differs in semantics from C++
- // slightly: we return HasField('enum_field') == true if there is
- // an unknown enum value present. To implement this we have to
- // look in the UnknownFieldSet.
- const UnknownFieldSet& unknown_field_set =
- message->GetReflection()->GetUnknownFields(*message);
- for (int i = 0; i < unknown_field_set.field_count(); ++i) {
- if (unknown_field_set.field(i).number() == field_descriptor->number()) {
- Py_RETURN_TRUE;
- }
- }
- }
- Py_RETURN_FALSE;
-}
-
-PyObject* ClearExtension(CMessage* self, PyObject* extension) {
- if (self->extensions != NULL) {
- return extension_dict::ClearExtension(self->extensions, extension);
- } else {
- const FieldDescriptor* descriptor = GetExtensionDescriptor(extension);
- if (descriptor == NULL) {
- return NULL;
- }
- if (ScopedPyObjectPtr(ClearFieldByDescriptor(self, descriptor)) == NULL) {
- return NULL;
- }
- }
- Py_RETURN_NONE;
-}
-
-PyObject* HasExtension(CMessage* self, PyObject* extension) {
- const FieldDescriptor* descriptor = GetExtensionDescriptor(extension);
- if (descriptor == NULL) {
- return NULL;
- }
- return HasFieldByDescriptor(self, descriptor);
-}
-
-// ---------------------------------------------------------------------
-// Releasing messages
-//
-// The Python API's ClearField() and Clear() methods behave
-// differently than their C++ counterparts. While the C++ versions
-// clears the children the Python versions detaches the children,
-// without touching their content. This impedance mismatch causes
-// some complexity in the implementation, which is captured in this
-// section.
-//
-// When a CMessage field is cleared we need to:
-//
-// * Release the Message used as the backing store for the CMessage
-// from its parent.
-//
-// * Change the owner field of the released CMessage and all of its
-// children to point to the newly released Message.
-//
-// * Clear the weak references from the released CMessage to the
-// parent.
-//
-// When a RepeatedCompositeContainer field is cleared we need to:
-//
-// * Release all the Message used as the backing store for the
-// CMessages stored in the container.
-//
-// * Change the owner field of all the released CMessage and all of
-// their children to point to the newly released Messages.
-//
-// * Clear the weak references from the released container to the
-// parent.
-
-struct SetOwnerVisitor : public ChildVisitor {
- // new_owner must outlive this object.
- explicit SetOwnerVisitor(const shared_ptr<Message>& new_owner)
- : new_owner_(new_owner) {}
-
- int VisitRepeatedCompositeContainer(RepeatedCompositeContainer* container) {
- repeated_composite_container::SetOwner(container, new_owner_);
- return 0;
- }
-
- int VisitRepeatedScalarContainer(RepeatedScalarContainer* container) {
- repeated_scalar_container::SetOwner(container, new_owner_);
- return 0;
- }
-
- int VisitMapContainer(MapContainer* container) {
- container->SetOwner(new_owner_);
- return 0;
- }
-
- int VisitCMessage(CMessage* cmessage,
- const FieldDescriptor* field_descriptor) {
- return SetOwner(cmessage, new_owner_);
- }
-
- private:
- const shared_ptr<Message>& new_owner_;
-};
-
-// Change the owner of this CMessage and all its children, recursively.
-int SetOwner(CMessage* self, const shared_ptr<Message>& new_owner) {
- self->owner = new_owner;
- if (ForEachCompositeField(self, SetOwnerVisitor(new_owner)) == -1)
- return -1;
- return 0;
-}
-
-// Releases the message specified by 'field' and returns the
-// pointer. If the field does not exist a new message is created using
-// 'descriptor'. The caller takes ownership of the returned pointer.
-Message* ReleaseMessage(CMessage* self,
- const Descriptor* descriptor,
- const FieldDescriptor* field_descriptor) {
- MessageFactory* message_factory = GetFactoryForMessage(self);
- Message* released_message = self->message->GetReflection()->ReleaseMessage(
- self->message, field_descriptor, message_factory);
- // ReleaseMessage will return NULL which differs from
- // child_cmessage->message, if the field does not exist. In this case,
- // the latter points to the default instance via a const_cast<>, so we
- // have to reset it to a new mutable object since we are taking ownership.
- if (released_message == NULL) {
- const Message* prototype = message_factory->GetPrototype(descriptor);
- GOOGLE_DCHECK(prototype != NULL);
- released_message = prototype->New();
- }
-
- return released_message;
-}
-
-int ReleaseSubMessage(CMessage* self,
- const FieldDescriptor* field_descriptor,
- CMessage* child_cmessage) {
- // Release the Message
- shared_ptr<Message> released_message(ReleaseMessage(
- self, child_cmessage->message->GetDescriptor(), field_descriptor));
- child_cmessage->message = released_message.get();
- child_cmessage->owner.swap(released_message);
- child_cmessage->parent = NULL;
- child_cmessage->parent_field_descriptor = NULL;
- child_cmessage->read_only = false;
- return ForEachCompositeField(child_cmessage,
- SetOwnerVisitor(child_cmessage->owner));
-}
-
-struct ReleaseChild : public ChildVisitor {
- // message must outlive this object.
- explicit ReleaseChild(CMessage* parent) :
- parent_(parent) {}
-
- int VisitRepeatedCompositeContainer(RepeatedCompositeContainer* container) {
- return repeated_composite_container::Release(
- reinterpret_cast<RepeatedCompositeContainer*>(container));
- }
-
- int VisitRepeatedScalarContainer(RepeatedScalarContainer* container) {
- return repeated_scalar_container::Release(
- reinterpret_cast<RepeatedScalarContainer*>(container));
- }
-
- int VisitMapContainer(MapContainer* container) {
- return reinterpret_cast<MapContainer*>(container)->Release();
- }
-
- int VisitCMessage(CMessage* cmessage,
- const FieldDescriptor* field_descriptor) {
- return ReleaseSubMessage(parent_, field_descriptor,
- reinterpret_cast<CMessage*>(cmessage));
- }
-
- CMessage* parent_;
-};
-
-int InternalReleaseFieldByDescriptor(
- CMessage* self,
- const FieldDescriptor* field_descriptor,
- PyObject* composite_field) {
- return VisitCompositeField(
- field_descriptor,
- composite_field,
- ReleaseChild(self));
-}
-
-PyObject* ClearFieldByDescriptor(
- CMessage* self,
- const FieldDescriptor* descriptor) {
- if (!CheckFieldBelongsToMessage(descriptor, self->message)) {
- return NULL;
- }
- AssureWritable(self);
- self->message->GetReflection()->ClearField(self->message, descriptor);
- Py_RETURN_NONE;
-}
-
-PyObject* ClearField(CMessage* self, PyObject* arg) {
- if (!PyString_Check(arg)) {
- PyErr_SetString(PyExc_TypeError, "field name must be a string");
- return NULL;
- }
-#if PY_MAJOR_VERSION < 3
- const char* field_name = PyString_AS_STRING(arg);
- Py_ssize_t size = PyString_GET_SIZE(arg);
-#else
- Py_ssize_t size;
- const char* field_name = PyUnicode_AsUTF8AndSize(arg, &size);
-#endif
- AssureWritable(self);
- Message* message = self->message;
- ScopedPyObjectPtr arg_in_oneof;
- bool is_in_oneof;
- const FieldDescriptor* field_descriptor =
- FindFieldWithOneofs(message, string(field_name, size), &is_in_oneof);
- if (field_descriptor == NULL) {
- if (!is_in_oneof) {
- PyErr_Format(PyExc_ValueError,
- "Protocol message has no \"%s\" field.", field_name);
- return NULL;
- } else {
- Py_RETURN_NONE;
- }
- } else if (is_in_oneof) {
- const string& name = field_descriptor->name();
- arg_in_oneof.reset(PyString_FromStringAndSize(name.c_str(), name.size()));
- arg = arg_in_oneof.get();
- }
-
- PyObject* composite_field = self->composite_fields ?
- PyDict_GetItem(self->composite_fields, arg) : NULL;
-
- // Only release the field if there's a possibility that there are
- // references to it.
- if (composite_field != NULL) {
- if (InternalReleaseFieldByDescriptor(self, field_descriptor,
- composite_field) < 0) {
- return NULL;
- }
- PyDict_DelItem(self->composite_fields, arg);
- }
- message->GetReflection()->ClearField(message, field_descriptor);
- if (field_descriptor->cpp_type() == FieldDescriptor::CPPTYPE_ENUM &&
- !message->GetReflection()->SupportsUnknownEnumValues()) {
- UnknownFieldSet* unknown_field_set =
- message->GetReflection()->MutableUnknownFields(message);
- unknown_field_set->DeleteByNumber(field_descriptor->number());
- }
-
- Py_RETURN_NONE;
-}
-
-PyObject* Clear(CMessage* self) {
- AssureWritable(self);
- if (ForEachCompositeField(self, ReleaseChild(self)) == -1)
- return NULL;
- Py_CLEAR(self->extensions);
- if (self->composite_fields) {
- PyDict_Clear(self->composite_fields);
- }
- self->message->Clear();
- Py_RETURN_NONE;
-}
-
-// ---------------------------------------------------------------------
-
-static string GetMessageName(CMessage* self) {
- if (self->parent_field_descriptor != NULL) {
- return self->parent_field_descriptor->full_name();
- } else {
- return self->message->GetDescriptor()->full_name();
- }
-}
-
-static PyObject* SerializeToString(CMessage* self, PyObject* args) {
- if (!self->message->IsInitialized()) {
- ScopedPyObjectPtr errors(FindInitializationErrors(self));
- if (errors == NULL) {
- return NULL;
- }
- ScopedPyObjectPtr comma(PyString_FromString(","));
- if (comma == NULL) {
- return NULL;
- }
- ScopedPyObjectPtr joined(
- PyObject_CallMethod(comma.get(), "join", "O", errors.get()));
- if (joined == NULL) {
- return NULL;
- }
-
- // TODO(haberman): this is a (hopefully temporary) hack. The unit testing
- // infrastructure reloads all pure-Python modules for every test, but not
- // C++ modules (because that's generally impossible:
- // http://bugs.python.org/issue1144263). But if we cache EncodeError, we'll
- // return the EncodeError from a previous load of the module, which won't
- // match a user's attempt to catch EncodeError. So we have to look it up
- // again every time.
- ScopedPyObjectPtr message_module(PyImport_ImportModule(
- "google.protobuf.message"));
- if (message_module.get() == NULL) {
- return NULL;
- }
-
- ScopedPyObjectPtr encode_error(
- PyObject_GetAttrString(message_module.get(), "EncodeError"));
- if (encode_error.get() == NULL) {
- return NULL;
- }
- PyErr_Format(encode_error.get(),
- "Message %s is missing required fields: %s",
- GetMessageName(self).c_str(), PyString_AsString(joined.get()));
- return NULL;
- }
- int size = self->message->ByteSize();
- if (size <= 0) {
- return PyBytes_FromString("");
- }
- PyObject* result = PyBytes_FromStringAndSize(NULL, size);
- if (result == NULL) {
- return NULL;
- }
- char* buffer = PyBytes_AS_STRING(result);
- self->message->SerializeWithCachedSizesToArray(
- reinterpret_cast<uint8*>(buffer));
- return result;
-}
-
-static PyObject* SerializePartialToString(CMessage* self) {
- string contents;
- self->message->SerializePartialToString(&contents);
- return PyBytes_FromStringAndSize(contents.c_str(), contents.size());
-}
-
-// Formats proto fields for ascii dumps using python formatting functions where
-// appropriate.
-class PythonFieldValuePrinter : public TextFormat::FieldValuePrinter {
- public:
- // Python has some differences from C++ when printing floating point numbers.
- //
- // 1) Trailing .0 is always printed.
- // 2) (Python2) Output is rounded to 12 digits.
- // 3) (Python3) The full precision of the double is preserved (and Python uses
- // David M. Gay's dtoa(), when the C++ code uses SimpleDtoa. There are some
- // differences, but they rarely happen)
- //
- // We override floating point printing with the C-API function for printing
- // Python floats to ensure consistency.
- string PrintFloat(float value) const { return PrintDouble(value); }
- string PrintDouble(double value) const {
- // This implementation is not highly optimized (it allocates two temporary
- // Python objects) but it is simple and portable. If this is shown to be a
- // performance bottleneck, we can optimize it, but the results will likely
- // be more complicated to accommodate the differing behavior of double
- // formatting between Python 2 and Python 3.
- //
- // (Though a valid question is: do we really want to make out output
- // dependent on the Python version?)
- ScopedPyObjectPtr py_value(PyFloat_FromDouble(value));
- if (!py_value.get()) {
- return string();
- }
-
- ScopedPyObjectPtr py_str(PyObject_Str(py_value.get()));
- if (!py_str.get()) {
- return string();
- }
-
- return string(PyString_AsString(py_str.get()));
- }
-};
-
-static PyObject* ToStr(CMessage* self) {
- TextFormat::Printer printer;
- // Passes ownership
- printer.SetDefaultFieldValuePrinter(new PythonFieldValuePrinter());
- printer.SetHideUnknownFields(true);
- string output;
- if (!printer.PrintToString(*self->message, &output)) {
- PyErr_SetString(PyExc_ValueError, "Unable to convert message to str");
- return NULL;
- }
- return PyString_FromString(output.c_str());
-}
-
-PyObject* MergeFrom(CMessage* self, PyObject* arg) {
- CMessage* other_message;
- if (!PyObject_TypeCheck(reinterpret_cast<PyObject *>(arg), &CMessage_Type)) {
- PyErr_SetString(PyExc_TypeError, "Must be a message");
- return NULL;
- }
-
- other_message = reinterpret_cast<CMessage*>(arg);
- if (other_message->message->GetDescriptor() !=
- self->message->GetDescriptor()) {
- PyErr_Format(PyExc_TypeError,
- "Tried to merge from a message with a different type. "
- "to: %s, from: %s",
- self->message->GetDescriptor()->full_name().c_str(),
- other_message->message->GetDescriptor()->full_name().c_str());
- return NULL;
- }
- AssureWritable(self);
-
- // TODO(tibell): Message::MergeFrom might turn some child Messages
- // into mutable messages, invalidating the message field in the
- // corresponding CMessages. We should run a FixupMessageReferences
- // pass here.
-
- self->message->MergeFrom(*other_message->message);
- Py_RETURN_NONE;
-}
-
-static PyObject* CopyFrom(CMessage* self, PyObject* arg) {
- CMessage* other_message;
- if (!PyObject_TypeCheck(reinterpret_cast<PyObject *>(arg), &CMessage_Type)) {
- PyErr_SetString(PyExc_TypeError, "Must be a message");
- return NULL;
- }
-
- other_message = reinterpret_cast<CMessage*>(arg);
-
- if (self == other_message) {
- Py_RETURN_NONE;
- }
-
- if (other_message->message->GetDescriptor() !=
- self->message->GetDescriptor()) {
- PyErr_Format(PyExc_TypeError,
- "Tried to copy from a message with a different type. "
- "to: %s, from: %s",
- self->message->GetDescriptor()->full_name().c_str(),
- other_message->message->GetDescriptor()->full_name().c_str());
- return NULL;
- }
-
- AssureWritable(self);
-
- // CopyFrom on the message will not clean up self->composite_fields,
- // which can leave us in an inconsistent state, so clear it out here.
- (void)ScopedPyObjectPtr(Clear(self));
-
- self->message->CopyFrom(*other_message->message);
-
- Py_RETURN_NONE;
-}
-
-static PyObject* MergeFromString(CMessage* self, PyObject* arg) {
- const void* data;
- Py_ssize_t data_length;
- if (PyObject_AsReadBuffer(arg, &data, &data_length) < 0) {
- return NULL;
- }
-
- AssureWritable(self);
- io::CodedInputStream input(
- reinterpret_cast<const uint8*>(data), data_length);
-#if PROTOBUF_PYTHON_ALLOW_OVERSIZE_PROTOS
- // Protobuf has a 64MB limit built in, this code will override this. Please do
- // not enable this unless you fully understand the implications: protobufs
- // must all be kept in memory at the same time, so if they grow too big you
- // may get OOM errors. The protobuf APIs do not provide any tools for
- // processing protobufs in chunks. If you have protos this big you should
- // break them up if it is at all convenient to do so.
- input.SetTotalBytesLimit(INT_MAX, INT_MAX);
-#endif // PROTOBUF_PYTHON_ALLOW_OVERSIZE_PROTOS
- PyDescriptorPool* pool = GetDescriptorPoolForMessage(self);
- input.SetExtensionRegistry(pool->pool, pool->message_factory);
- bool success = self->message->MergePartialFromCodedStream(&input);
- if (success) {
- return PyInt_FromLong(input.CurrentPosition());
- } else {
- PyErr_Format(DecodeError_class, "Error parsing message");
- return NULL;
- }
-}
-
-static PyObject* ParseFromString(CMessage* self, PyObject* arg) {
- if (ScopedPyObjectPtr(Clear(self)) == NULL) {
- return NULL;
- }
- return MergeFromString(self, arg);
-}
-
-static PyObject* ByteSize(CMessage* self, PyObject* args) {
- return PyLong_FromLong(self->message->ByteSize());
-}
-
-static PyObject* RegisterExtension(PyObject* cls,
- PyObject* extension_handle) {
- const FieldDescriptor* descriptor =
- GetExtensionDescriptor(extension_handle);
- if (descriptor == NULL) {
- return NULL;
- }
-
- ScopedPyObjectPtr extensions_by_name(
- PyObject_GetAttr(cls, k_extensions_by_name));
- if (extensions_by_name == NULL) {
- PyErr_SetString(PyExc_TypeError, "no extensions_by_name on class");
- return NULL;
- }
- ScopedPyObjectPtr full_name(PyObject_GetAttr(extension_handle, kfull_name));
- if (full_name == NULL) {
- return NULL;
- }
-
- // If the extension was already registered, check that it is the same.
- PyObject* existing_extension =
- PyDict_GetItem(extensions_by_name.get(), full_name.get());
- if (existing_extension != NULL) {
- const FieldDescriptor* existing_extension_descriptor =
- GetExtensionDescriptor(existing_extension);
- if (existing_extension_descriptor != descriptor) {
- PyErr_SetString(PyExc_ValueError, "Double registration of Extensions");
- return NULL;
- }
- // Nothing else to do.
- Py_RETURN_NONE;
- }
-
- if (PyDict_SetItem(extensions_by_name.get(), full_name.get(),
- extension_handle) < 0) {
- return NULL;
- }
-
- // Also store a mapping from extension number to implementing class.
- ScopedPyObjectPtr extensions_by_number(
- PyObject_GetAttr(cls, k_extensions_by_number));
- if (extensions_by_number == NULL) {
- PyErr_SetString(PyExc_TypeError, "no extensions_by_number on class");
- return NULL;
- }
- ScopedPyObjectPtr number(PyObject_GetAttrString(extension_handle, "number"));
- if (number == NULL) {
- return NULL;
- }
- if (PyDict_SetItem(extensions_by_number.get(), number.get(),
- extension_handle) < 0) {
- return NULL;
- }
-
- // Check if it's a message set
- if (descriptor->is_extension() &&
- descriptor->containing_type()->options().message_set_wire_format() &&
- descriptor->type() == FieldDescriptor::TYPE_MESSAGE &&
- descriptor->label() == FieldDescriptor::LABEL_OPTIONAL) {
- ScopedPyObjectPtr message_name(PyString_FromStringAndSize(
- descriptor->message_type()->full_name().c_str(),
- descriptor->message_type()->full_name().size()));
- if (message_name == NULL) {
- return NULL;
- }
- PyDict_SetItem(extensions_by_name.get(), message_name.get(),
- extension_handle);
- }
-
- Py_RETURN_NONE;
-}
-
-static PyObject* SetInParent(CMessage* self, PyObject* args) {
- AssureWritable(self);
- Py_RETURN_NONE;
-}
-
-static PyObject* WhichOneof(CMessage* self, PyObject* arg) {
- Py_ssize_t name_size;
- char *name_data;
- if (PyString_AsStringAndSize(arg, &name_data, &name_size) < 0)
- return NULL;
- string oneof_name = string(name_data, name_size);
- const OneofDescriptor* oneof_desc =
- self->message->GetDescriptor()->FindOneofByName(oneof_name);
- if (oneof_desc == NULL) {
- PyErr_Format(PyExc_ValueError,
- "Protocol message has no oneof \"%s\" field.",
- oneof_name.c_str());
- return NULL;
- }
- const FieldDescriptor* field_in_oneof =
- self->message->GetReflection()->GetOneofFieldDescriptor(
- *self->message, oneof_desc);
- if (field_in_oneof == NULL) {
- Py_RETURN_NONE;
- } else {
- const string& name = field_in_oneof->name();
- return PyString_FromStringAndSize(name.c_str(), name.size());
- }
-}
-
-static PyObject* GetExtensionDict(CMessage* self, void *closure);
-
-static PyObject* ListFields(CMessage* self) {
- vector<const FieldDescriptor*> fields;
- self->message->GetReflection()->ListFields(*self->message, &fields);
-
- // Normally, the list will be exactly the size of the fields.
- ScopedPyObjectPtr all_fields(PyList_New(fields.size()));
- if (all_fields == NULL) {
- return NULL;
- }
-
- // When there are unknown extensions, the py list will *not* contain
- // the field information. Thus the actual size of the py list will be
- // smaller than the size of fields. Set the actual size at the end.
- Py_ssize_t actual_size = 0;
- for (size_t i = 0; i < fields.size(); ++i) {
- ScopedPyObjectPtr t(PyTuple_New(2));
- if (t == NULL) {
- return NULL;
- }
-
- if (fields[i]->is_extension()) {
- ScopedPyObjectPtr extension_field(
- PyFieldDescriptor_FromDescriptor(fields[i]));
- if (extension_field == NULL) {
- return NULL;
- }
- // With C++ descriptors, the field can always be retrieved, but for
- // unknown extensions which have not been imported in Python code, there
- // is no message class and we cannot retrieve the value.
- // TODO(amauryfa): consider building the class on the fly!
- if (fields[i]->message_type() != NULL &&
- cdescriptor_pool::GetMessageClass(
- GetDescriptorPoolForMessage(self),
- fields[i]->message_type()) == NULL) {
- PyErr_Clear();
- continue;
- }
- ScopedPyObjectPtr extensions(GetExtensionDict(self, NULL));
- if (extensions == NULL) {
- return NULL;
- }
- // 'extension' reference later stolen by PyTuple_SET_ITEM.
- PyObject* extension = PyObject_GetItem(
- extensions.get(), extension_field.get());
- if (extension == NULL) {
- return NULL;
- }
- PyTuple_SET_ITEM(t.get(), 0, extension_field.release());
- // Steals reference to 'extension'
- PyTuple_SET_ITEM(t.get(), 1, extension);
- } else {
- // Normal field
- const string& field_name = fields[i]->name();
- ScopedPyObjectPtr py_field_name(PyString_FromStringAndSize(
- field_name.c_str(), field_name.length()));
- if (py_field_name == NULL) {
- PyErr_SetString(PyExc_ValueError, "bad string");
- return NULL;
- }
- ScopedPyObjectPtr field_descriptor(
- PyFieldDescriptor_FromDescriptor(fields[i]));
- if (field_descriptor == NULL) {
- return NULL;
- }
-
- PyObject* field_value = GetAttr(self, py_field_name.get());
- if (field_value == NULL) {
- PyErr_SetObject(PyExc_ValueError, py_field_name.get());
- return NULL;
- }
- PyTuple_SET_ITEM(t.get(), 0, field_descriptor.release());
- PyTuple_SET_ITEM(t.get(), 1, field_value);
- }
- PyList_SET_ITEM(all_fields.get(), actual_size, t.release());
- ++actual_size;
- }
- Py_SIZE(all_fields.get()) = actual_size;
- return all_fields.release();
-}
-
-PyObject* FindInitializationErrors(CMessage* self) {
- Message* message = self->message;
- vector<string> errors;
- message->FindInitializationErrors(&errors);
-
- PyObject* error_list = PyList_New(errors.size());
- if (error_list == NULL) {
- return NULL;
- }
- for (size_t i = 0; i < errors.size(); ++i) {
- const string& error = errors[i];
- PyObject* error_string = PyString_FromStringAndSize(
- error.c_str(), error.length());
- if (error_string == NULL) {
- Py_DECREF(error_list);
- return NULL;
- }
- PyList_SET_ITEM(error_list, i, error_string);
- }
- return error_list;
-}
-
-static PyObject* RichCompare(CMessage* self, PyObject* other, int opid) {
- // Only equality comparisons are implemented.
- if (opid != Py_EQ && opid != Py_NE) {
- Py_INCREF(Py_NotImplemented);
- return Py_NotImplemented;
- }
- bool equals = true;
- // If other is not a message, it cannot be equal.
- if (!PyObject_TypeCheck(other, &CMessage_Type)) {
- equals = false;
- }
- const google::protobuf::Message* other_message =
- reinterpret_cast<CMessage*>(other)->message;
- // If messages don't have the same descriptors, they are not equal.
- if (equals &&
- self->message->GetDescriptor() != other_message->GetDescriptor()) {
- equals = false;
- }
- // Check the message contents.
- if (equals && !google::protobuf::util::MessageDifferencer::Equals(
- *self->message,
- *reinterpret_cast<CMessage*>(other)->message)) {
- equals = false;
- }
- if (equals ^ (opid == Py_EQ)) {
- Py_RETURN_FALSE;
- } else {
- Py_RETURN_TRUE;
- }
-}
-
-PyObject* InternalGetScalar(const Message* message,
- const FieldDescriptor* field_descriptor) {
- const Reflection* reflection = message->GetReflection();
-
- if (!CheckFieldBelongsToMessage(field_descriptor, message)) {
- return NULL;
- }
-
- PyObject* result = NULL;
- switch (field_descriptor->cpp_type()) {
- case FieldDescriptor::CPPTYPE_INT32: {
- int32 value = reflection->GetInt32(*message, field_descriptor);
- result = PyInt_FromLong(value);
- break;
- }
- case FieldDescriptor::CPPTYPE_INT64: {
- int64 value = reflection->GetInt64(*message, field_descriptor);
- result = PyLong_FromLongLong(value);
- break;
- }
- case FieldDescriptor::CPPTYPE_UINT32: {
- uint32 value = reflection->GetUInt32(*message, field_descriptor);
- result = PyInt_FromSize_t(value);
- break;
- }
- case FieldDescriptor::CPPTYPE_UINT64: {
- uint64 value = reflection->GetUInt64(*message, field_descriptor);
- result = PyLong_FromUnsignedLongLong(value);
- break;
- }
- case FieldDescriptor::CPPTYPE_FLOAT: {
- float value = reflection->GetFloat(*message, field_descriptor);
- result = PyFloat_FromDouble(value);
- break;
- }
- case FieldDescriptor::CPPTYPE_DOUBLE: {
- double value = reflection->GetDouble(*message, field_descriptor);
- result = PyFloat_FromDouble(value);
- break;
- }
- case FieldDescriptor::CPPTYPE_BOOL: {
- bool value = reflection->GetBool(*message, field_descriptor);
- result = PyBool_FromLong(value);
- break;
- }
- case FieldDescriptor::CPPTYPE_STRING: {
- string value = reflection->GetString(*message, field_descriptor);
- result = ToStringObject(field_descriptor, value);
- break;
- }
- case FieldDescriptor::CPPTYPE_ENUM: {
- if (!message->GetReflection()->SupportsUnknownEnumValues() &&
- !message->GetReflection()->HasField(*message, field_descriptor)) {
- // Look for the value in the unknown fields.
- const UnknownFieldSet& unknown_field_set =
- message->GetReflection()->GetUnknownFields(*message);
- for (int i = 0; i < unknown_field_set.field_count(); ++i) {
- if (unknown_field_set.field(i).number() ==
- field_descriptor->number() &&
- unknown_field_set.field(i).type() ==
- google::protobuf::UnknownField::TYPE_VARINT) {
- result = PyInt_FromLong(unknown_field_set.field(i).varint());
- break;
- }
- }
- }
-
- if (result == NULL) {
- const EnumValueDescriptor* enum_value =
- message->GetReflection()->GetEnum(*message, field_descriptor);
- result = PyInt_FromLong(enum_value->number());
- }
- break;
- }
- default:
- PyErr_Format(
- PyExc_SystemError, "Getting a value from a field of unknown type %d",
- field_descriptor->cpp_type());
- }
-
- return result;
-}
-
-PyObject* InternalGetSubMessage(
- CMessage* self, const FieldDescriptor* field_descriptor) {
- const Reflection* reflection = self->message->GetReflection();
- PyDescriptorPool* pool = GetDescriptorPoolForMessage(self);
- const Message& sub_message = reflection->GetMessage(
- *self->message, field_descriptor, pool->message_factory);
-
- PyObject *message_class = cdescriptor_pool::GetMessageClass(
- pool, field_descriptor->message_type());
- if (message_class == NULL) {
- return NULL;
- }
-
- CMessage* cmsg = cmessage::NewEmptyMessage(message_class,
- sub_message.GetDescriptor());
- if (cmsg == NULL) {
- return NULL;
- }
-
- cmsg->owner = self->owner;
- cmsg->parent = self;
- cmsg->parent_field_descriptor = field_descriptor;
- cmsg->read_only = !reflection->HasField(*self->message, field_descriptor);
- cmsg->message = const_cast<Message*>(&sub_message);
-
- return reinterpret_cast<PyObject*>(cmsg);
-}
-
-int InternalSetNonOneofScalar(
- Message* message,
- const FieldDescriptor* field_descriptor,
- PyObject* arg) {
- const Reflection* reflection = message->GetReflection();
-
- if (!CheckFieldBelongsToMessage(field_descriptor, message)) {
- return -1;
- }
-
- switch (field_descriptor->cpp_type()) {
- case FieldDescriptor::CPPTYPE_INT32: {
- GOOGLE_CHECK_GET_INT32(arg, value, -1);
- reflection->SetInt32(message, field_descriptor, value);
- break;
- }
- case FieldDescriptor::CPPTYPE_INT64: {
- GOOGLE_CHECK_GET_INT64(arg, value, -1);
- reflection->SetInt64(message, field_descriptor, value);
- break;
- }
- case FieldDescriptor::CPPTYPE_UINT32: {
- GOOGLE_CHECK_GET_UINT32(arg, value, -1);
- reflection->SetUInt32(message, field_descriptor, value);
- break;
- }
- case FieldDescriptor::CPPTYPE_UINT64: {
- GOOGLE_CHECK_GET_UINT64(arg, value, -1);
- reflection->SetUInt64(message, field_descriptor, value);
- break;
- }
- case FieldDescriptor::CPPTYPE_FLOAT: {
- GOOGLE_CHECK_GET_FLOAT(arg, value, -1);
- reflection->SetFloat(message, field_descriptor, value);
- break;
- }
- case FieldDescriptor::CPPTYPE_DOUBLE: {
- GOOGLE_CHECK_GET_DOUBLE(arg, value, -1);
- reflection->SetDouble(message, field_descriptor, value);
- break;
- }
- case FieldDescriptor::CPPTYPE_BOOL: {
- GOOGLE_CHECK_GET_BOOL(arg, value, -1);
- reflection->SetBool(message, field_descriptor, value);
- break;
- }
- case FieldDescriptor::CPPTYPE_STRING: {
- if (!CheckAndSetString(
- arg, message, field_descriptor, reflection, false, -1)) {
- return -1;
- }
- break;
- }
- case FieldDescriptor::CPPTYPE_ENUM: {
- GOOGLE_CHECK_GET_INT32(arg, value, -1);
- if (reflection->SupportsUnknownEnumValues()) {
- reflection->SetEnumValue(message, field_descriptor, value);
- } else {
- const EnumDescriptor* enum_descriptor = field_descriptor->enum_type();
- const EnumValueDescriptor* enum_value =
- enum_descriptor->FindValueByNumber(value);
- if (enum_value != NULL) {
- reflection->SetEnum(message, field_descriptor, enum_value);
- } else {
- PyErr_Format(PyExc_ValueError, "Unknown enum value: %d", value);
- return -1;
- }
- }
- break;
- }
- default:
- PyErr_Format(
- PyExc_SystemError, "Setting value to a field of unknown type %d",
- field_descriptor->cpp_type());
- return -1;
- }
-
- return 0;
-}
-
-int InternalSetScalar(
- CMessage* self,
- const FieldDescriptor* field_descriptor,
- PyObject* arg) {
- if (!CheckFieldBelongsToMessage(field_descriptor, self->message)) {
- return -1;
- }
-
- if (MaybeReleaseOverlappingOneofField(self, field_descriptor) < 0) {
- return -1;
- }
-
- return InternalSetNonOneofScalar(self->message, field_descriptor, arg);
-}
-
-PyObject* FromString(PyTypeObject* cls, PyObject* serialized) {
- PyObject* py_cmsg = PyObject_CallObject(
- reinterpret_cast<PyObject*>(cls), NULL);
- if (py_cmsg == NULL) {
- return NULL;
- }
- CMessage* cmsg = reinterpret_cast<CMessage*>(py_cmsg);
-
- ScopedPyObjectPtr py_length(MergeFromString(cmsg, serialized));
- if (py_length == NULL) {
- Py_DECREF(py_cmsg);
- return NULL;
- }
-
- return py_cmsg;
-}
-
-PyObject* DeepCopy(CMessage* self, PyObject* arg) {
- PyObject* clone = PyObject_CallObject(
- reinterpret_cast<PyObject*>(Py_TYPE(self)), NULL);
- if (clone == NULL) {
- return NULL;
- }
- if (!PyObject_TypeCheck(clone, &CMessage_Type)) {
- Py_DECREF(clone);
- return NULL;
- }
- if (ScopedPyObjectPtr(MergeFrom(
- reinterpret_cast<CMessage*>(clone),
- reinterpret_cast<PyObject*>(self))) == NULL) {
- Py_DECREF(clone);
- return NULL;
- }
- return clone;
-}
-
-PyObject* ToUnicode(CMessage* self) {
- // Lazy import to prevent circular dependencies
- ScopedPyObjectPtr text_format(
- PyImport_ImportModule("google.protobuf.text_format"));
- if (text_format == NULL) {
- return NULL;
- }
- ScopedPyObjectPtr method_name(PyString_FromString("MessageToString"));
- if (method_name == NULL) {
- return NULL;
- }
- Py_INCREF(Py_True);
- ScopedPyObjectPtr encoded(PyObject_CallMethodObjArgs(
- text_format.get(), method_name.get(), self, Py_True, NULL));
- Py_DECREF(Py_True);
- if (encoded == NULL) {
- return NULL;
- }
-#if PY_MAJOR_VERSION < 3
- PyObject* decoded = PyString_AsDecodedObject(encoded.get(), "utf-8", NULL);
-#else
- PyObject* decoded = PyUnicode_FromEncodedObject(encoded.get(), "utf-8", NULL);
-#endif
- if (decoded == NULL) {
- return NULL;
- }
- return decoded;
-}
-
-PyObject* Reduce(CMessage* self) {
- ScopedPyObjectPtr constructor(reinterpret_cast<PyObject*>(Py_TYPE(self)));
- constructor.inc();
- ScopedPyObjectPtr args(PyTuple_New(0));
- if (args == NULL) {
- return NULL;
- }
- ScopedPyObjectPtr state(PyDict_New());
- if (state == NULL) {
- return NULL;
- }
- ScopedPyObjectPtr serialized(SerializePartialToString(self));
- if (serialized == NULL) {
- return NULL;
- }
- if (PyDict_SetItemString(state.get(), "serialized", serialized.get()) < 0) {
- return NULL;
- }
- return Py_BuildValue("OOO", constructor.get(), args.get(), state.get());
-}
-
-PyObject* SetState(CMessage* self, PyObject* state) {
- if (!PyDict_Check(state)) {
- PyErr_SetString(PyExc_TypeError, "state not a dict");
- return NULL;
- }
- PyObject* serialized = PyDict_GetItemString(state, "serialized");
- if (serialized == NULL) {
- return NULL;
- }
- if (ScopedPyObjectPtr(ParseFromString(self, serialized)) == NULL) {
- return NULL;
- }
- Py_RETURN_NONE;
-}
-
-// CMessage static methods:
-PyObject* _CheckCalledFromGeneratedFile(PyObject* unused,
- PyObject* unused_arg) {
- if (!_CalledFromGeneratedFile(1)) {
- PyErr_SetString(PyExc_TypeError,
- "Descriptors should not be created directly, "
- "but only retrieved from their parent.");
- return NULL;
- }
- Py_RETURN_NONE;
-}
-
-static PyObject* GetExtensionDict(CMessage* self, void *closure) {
- if (self->extensions) {
- Py_INCREF(self->extensions);
- return reinterpret_cast<PyObject*>(self->extensions);
- }
-
- // If there are extension_ranges, the message is "extendable". Allocate a
- // dictionary to store the extension fields.
- const Descriptor* descriptor = GetMessageDescriptor(Py_TYPE(self));
- if (descriptor->extension_range_count() > 0) {
- ExtensionDict* extension_dict = extension_dict::NewExtensionDict(self);
- if (extension_dict == NULL) {
- return NULL;
- }
- self->extensions = extension_dict;
- Py_INCREF(self->extensions);
- return reinterpret_cast<PyObject*>(self->extensions);
- }
-
- PyErr_SetNone(PyExc_AttributeError);
- return NULL;
-}
-
-static PyGetSetDef Getters[] = {
- {"Extensions", (getter)GetExtensionDict, NULL, "Extension dict"},
- {NULL}
-};
-
-static PyMethodDef Methods[] = {
- { "__deepcopy__", (PyCFunction)DeepCopy, METH_VARARGS,
- "Makes a deep copy of the class." },
- { "__reduce__", (PyCFunction)Reduce, METH_NOARGS,
- "Outputs picklable representation of the message." },
- { "__setstate__", (PyCFunction)SetState, METH_O,
- "Inputs picklable representation of the message." },
- { "__unicode__", (PyCFunction)ToUnicode, METH_NOARGS,
- "Outputs a unicode representation of the message." },
- { "ByteSize", (PyCFunction)ByteSize, METH_NOARGS,
- "Returns the size of the message in bytes." },
- { "Clear", (PyCFunction)Clear, METH_NOARGS,
- "Clears the message." },
- { "ClearExtension", (PyCFunction)ClearExtension, METH_O,
- "Clears a message field." },
- { "ClearField", (PyCFunction)ClearField, METH_O,
- "Clears a message field." },
- { "CopyFrom", (PyCFunction)CopyFrom, METH_O,
- "Copies a protocol message into the current message." },
- { "FindInitializationErrors", (PyCFunction)FindInitializationErrors,
- METH_NOARGS,
- "Finds unset required fields." },
- { "FromString", (PyCFunction)FromString, METH_O | METH_CLASS,
- "Creates new method instance from given serialized data." },
- { "HasExtension", (PyCFunction)HasExtension, METH_O,
- "Checks if a message field is set." },
- { "HasField", (PyCFunction)HasField, METH_O,
- "Checks if a message field is set." },
- { "IsInitialized", (PyCFunction)IsInitialized, METH_VARARGS,
- "Checks if all required fields of a protocol message are set." },
- { "ListFields", (PyCFunction)ListFields, METH_NOARGS,
- "Lists all set fields of a message." },
- { "MergeFrom", (PyCFunction)MergeFrom, METH_O,
- "Merges a protocol message into the current message." },
- { "MergeFromString", (PyCFunction)MergeFromString, METH_O,
- "Merges a serialized message into the current message." },
- { "ParseFromString", (PyCFunction)ParseFromString, METH_O,
- "Parses a serialized message into the current message." },
- { "RegisterExtension", (PyCFunction)RegisterExtension, METH_O | METH_CLASS,
- "Registers an extension with the current message." },
- { "SerializePartialToString", (PyCFunction)SerializePartialToString,
- METH_NOARGS,
- "Serializes the message to a string, even if it isn't initialized." },
- { "SerializeToString", (PyCFunction)SerializeToString, METH_NOARGS,
- "Serializes the message to a string, only for initialized messages." },
- { "SetInParent", (PyCFunction)SetInParent, METH_NOARGS,
- "Sets the has bit of the given field in its parent message." },
- { "WhichOneof", (PyCFunction)WhichOneof, METH_O,
- "Returns the name of the field set inside a oneof, "
- "or None if no field is set." },
-
- // Static Methods.
- { "_CheckCalledFromGeneratedFile", (PyCFunction)_CheckCalledFromGeneratedFile,
- METH_NOARGS | METH_STATIC,
- "Raises TypeError if the caller is not in a _pb2.py file."},
- { NULL, NULL}
-};
-
-static bool SetCompositeField(
- CMessage* self, PyObject* name, PyObject* value) {
- if (self->composite_fields == NULL) {
- self->composite_fields = PyDict_New();
- if (self->composite_fields == NULL) {
- return false;
- }
- }
- return PyDict_SetItem(self->composite_fields, name, value) == 0;
-}
-
-PyObject* GetAttr(CMessage* self, PyObject* name) {
- PyObject* value = self->composite_fields ?
- PyDict_GetItem(self->composite_fields, name) : NULL;
- if (value != NULL) {
- Py_INCREF(value);
- return value;
- }
-
- const FieldDescriptor* field_descriptor = GetFieldDescriptor(self, name);
- if (field_descriptor == NULL) {
- return CMessage_Type.tp_base->tp_getattro(
- reinterpret_cast<PyObject*>(self), name);
- }
-
- if (field_descriptor->is_map()) {
- PyObject* py_container = NULL;
- const Descriptor* entry_type = field_descriptor->message_type();
- const FieldDescriptor* value_type = entry_type->FindFieldByName("value");
- if (value_type->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE) {
- PyObject* value_class = cdescriptor_pool::GetMessageClass(
- GetDescriptorPoolForMessage(self), value_type->message_type());
- if (value_class == NULL) {
- return NULL;
- }
- py_container =
- NewMessageMapContainer(self, field_descriptor, value_class);
- } else {
- py_container = NewScalarMapContainer(self, field_descriptor);
- }
- if (py_container == NULL) {
- return NULL;
- }
- if (!SetCompositeField(self, name, py_container)) {
- Py_DECREF(py_container);
- return NULL;
- }
- return py_container;
- }
-
- if (field_descriptor->label() == FieldDescriptor::LABEL_REPEATED) {
- PyObject* py_container = NULL;
- if (field_descriptor->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE) {
- PyObject *message_class = cdescriptor_pool::GetMessageClass(
- GetDescriptorPoolForMessage(self), field_descriptor->message_type());
- if (message_class == NULL) {
- return NULL;
- }
- py_container = repeated_composite_container::NewContainer(
- self, field_descriptor, message_class);
- } else {
- py_container = repeated_scalar_container::NewContainer(
- self, field_descriptor);
- }
- if (py_container == NULL) {
- return NULL;
- }
- if (!SetCompositeField(self, name, py_container)) {
- Py_DECREF(py_container);
- return NULL;
- }
- return py_container;
- }
-
- if (field_descriptor->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE) {
- PyObject* sub_message = InternalGetSubMessage(self, field_descriptor);
- if (sub_message == NULL) {
- return NULL;
- }
- if (!SetCompositeField(self, name, sub_message)) {
- Py_DECREF(sub_message);
- return NULL;
- }
- return sub_message;
- }
-
- return InternalGetScalar(self->message, field_descriptor);
-}
-
-int SetAttr(CMessage* self, PyObject* name, PyObject* value) {
- if (self->composite_fields && PyDict_Contains(self->composite_fields, name)) {
- PyErr_SetString(PyExc_TypeError, "Can't set composite field");
- return -1;
- }
-
- const FieldDescriptor* field_descriptor = GetFieldDescriptor(self, name);
- if (field_descriptor != NULL) {
- AssureWritable(self);
- if (field_descriptor->label() == FieldDescriptor::LABEL_REPEATED) {
- PyErr_Format(PyExc_AttributeError, "Assignment not allowed to repeated "
- "field \"%s\" in protocol message object.",
- field_descriptor->name().c_str());
- return -1;
- } else {
- if (field_descriptor->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE) {
- PyErr_Format(PyExc_AttributeError, "Assignment not allowed to "
- "field \"%s\" in protocol message object.",
- field_descriptor->name().c_str());
- return -1;
- } else {
- return InternalSetScalar(self, field_descriptor, value);
- }
- }
- }
-
- PyErr_Format(PyExc_AttributeError,
- "Assignment not allowed "
- "(no field \"%s\"in protocol message object).",
- PyString_AsString(name));
- return -1;
-}
-
-} // namespace cmessage
-
-PyTypeObject CMessage_Type = {
- PyVarObject_HEAD_INIT(&PyMessageMeta_Type, 0)
- FULL_MODULE_NAME ".CMessage", // tp_name
- sizeof(CMessage), // tp_basicsize
- 0, // tp_itemsize
- (destructor)cmessage::Dealloc, // tp_dealloc
- 0, // tp_print
- 0, // tp_getattr
- 0, // tp_setattr
- 0, // tp_compare
- 0, // tp_repr
- 0, // tp_as_number
- 0, // tp_as_sequence
- 0, // tp_as_mapping
- PyObject_HashNotImplemented, // tp_hash
- 0, // tp_call
- (reprfunc)cmessage::ToStr, // tp_str
- (getattrofunc)cmessage::GetAttr, // tp_getattro
- (setattrofunc)cmessage::SetAttr, // tp_setattro
- 0, // tp_as_buffer
- Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, // tp_flags
- "A ProtocolMessage", // tp_doc
- 0, // tp_traverse
- 0, // tp_clear
- (richcmpfunc)cmessage::RichCompare, // tp_richcompare
- 0, // tp_weaklistoffset
- 0, // tp_iter
- 0, // tp_iternext
- cmessage::Methods, // tp_methods
- 0, // tp_members
- cmessage::Getters, // tp_getset
- 0, // tp_base
- 0, // tp_dict
- 0, // tp_descr_get
- 0, // tp_descr_set
- 0, // tp_dictoffset
- (initproc)cmessage::Init, // tp_init
- 0, // tp_alloc
- cmessage::New, // tp_new
-};
-
-// --- Exposing the C proto living inside Python proto to C code:
-
-const Message* (*GetCProtoInsidePyProtoPtr)(PyObject* msg);
-Message* (*MutableCProtoInsidePyProtoPtr)(PyObject* msg);
-
-static const Message* GetCProtoInsidePyProtoImpl(PyObject* msg) {
- if (!PyObject_TypeCheck(msg, &CMessage_Type)) {
- return NULL;
- }
- CMessage* cmsg = reinterpret_cast<CMessage*>(msg);
- return cmsg->message;
-}
-
-static Message* MutableCProtoInsidePyProtoImpl(PyObject* msg) {
- if (!PyObject_TypeCheck(msg, &CMessage_Type)) {
- return NULL;
- }
- CMessage* cmsg = reinterpret_cast<CMessage*>(msg);
- if ((cmsg->composite_fields && PyDict_Size(cmsg->composite_fields) != 0) ||
- (cmsg->extensions != NULL &&
- PyDict_Size(cmsg->extensions->values) != 0)) {
- // There is currently no way of accurately syncing arbitrary changes to
- // the underlying C++ message back to the CMessage (e.g. removed repeated
- // composite containers). We only allow direct mutation of the underlying
- // C++ message if there is no child data in the CMessage.
- return NULL;
- }
- cmessage::AssureWritable(cmsg);
- return cmsg->message;
-}
-
-static const char module_docstring[] =
-"python-proto2 is a module that can be used to enhance proto2 Python API\n"
-"performance.\n"
-"\n"
-"It provides access to the protocol buffers C++ reflection API that\n"
-"implements the basic protocol buffer functions.";
-
-void InitGlobals() {
- // TODO(gps): Check all return values in this function for NULL and propagate
- // the error (MemoryError) on up to result in an import failure. These should
- // also be freed and reset to NULL during finalization.
- kPythonZero = PyInt_FromLong(0);
- kint32min_py = PyInt_FromLong(kint32min);
- kint32max_py = PyInt_FromLong(kint32max);
- kuint32max_py = PyLong_FromLongLong(kuint32max);
- kint64min_py = PyLong_FromLongLong(kint64min);
- kint64max_py = PyLong_FromLongLong(kint64max);
- kuint64max_py = PyLong_FromUnsignedLongLong(kuint64max);
-
- kDESCRIPTOR = PyString_FromString("DESCRIPTOR");
- k_cdescriptor = PyString_FromString("_cdescriptor");
- kfull_name = PyString_FromString("full_name");
- k_extensions_by_name = PyString_FromString("_extensions_by_name");
- k_extensions_by_number = PyString_FromString("_extensions_by_number");
-
- PyObject *dummy_obj = PySet_New(NULL);
- kEmptyWeakref = PyWeakref_NewRef(dummy_obj, NULL);
- Py_DECREF(dummy_obj);
-}
-
-bool InitProto2MessageModule(PyObject *m) {
- // Initialize types and globals in descriptor.cc
- if (!InitDescriptor()) {
- return false;
- }
-
- // Initialize types and globals in descriptor_pool.cc
- if (!InitDescriptorPool()) {
- return false;
- }
-
- // Initialize constants defined in this file.
- InitGlobals();
-
- PyMessageMeta_Type.tp_base = &PyType_Type;
- if (PyType_Ready(&PyMessageMeta_Type) < 0) {
- return false;
- }
- PyModule_AddObject(m, "MessageMeta",
- reinterpret_cast<PyObject*>(&PyMessageMeta_Type));
-
- if (PyType_Ready(&CMessage_Type) < 0) {
- return false;
- }
-
- // DESCRIPTOR is set on each protocol buffer message class elsewhere, but set
- // it here as well to document that subclasses need to set it.
- PyDict_SetItem(CMessage_Type.tp_dict, kDESCRIPTOR, Py_None);
- // Subclasses with message extensions will override _extensions_by_name and
- // _extensions_by_number with fresh mutable dictionaries in AddDescriptors.
- // All other classes can share this same immutable mapping.
- ScopedPyObjectPtr empty_dict(PyDict_New());
- if (empty_dict == NULL) {
- return false;
- }
- ScopedPyObjectPtr immutable_dict(PyDictProxy_New(empty_dict.get()));
- if (immutable_dict == NULL) {
- return false;
- }
- if (PyDict_SetItem(CMessage_Type.tp_dict,
- k_extensions_by_name, immutable_dict.get()) < 0) {
- return false;
- }
- if (PyDict_SetItem(CMessage_Type.tp_dict,
- k_extensions_by_number, immutable_dict.get()) < 0) {
- return false;
- }
-
- PyModule_AddObject(m, "Message", reinterpret_cast<PyObject*>(&CMessage_Type));
-
- // Initialize Repeated container types.
- {
- if (PyType_Ready(&RepeatedScalarContainer_Type) < 0) {
- return false;
- }
-
- PyModule_AddObject(m, "RepeatedScalarContainer",
- reinterpret_cast<PyObject*>(
- &RepeatedScalarContainer_Type));
-
- if (PyType_Ready(&RepeatedCompositeContainer_Type) < 0) {
- return false;
- }
-
- PyModule_AddObject(
- m, "RepeatedCompositeContainer",
- reinterpret_cast<PyObject*>(
- &RepeatedCompositeContainer_Type));
-
- // Register them as collections.Sequence
- ScopedPyObjectPtr collections(PyImport_ImportModule("collections"));
- if (collections == NULL) {
- return false;
- }
- ScopedPyObjectPtr mutable_sequence(
- PyObject_GetAttrString(collections.get(), "MutableSequence"));
- if (mutable_sequence == NULL) {
- return false;
- }
- if (ScopedPyObjectPtr(
- PyObject_CallMethod(mutable_sequence.get(), "register", "O",
- &RepeatedScalarContainer_Type)) == NULL) {
- return false;
- }
- if (ScopedPyObjectPtr(
- PyObject_CallMethod(mutable_sequence.get(), "register", "O",
- &RepeatedCompositeContainer_Type)) == NULL) {
- return false;
- }
- }
-
- // Initialize Map container types.
- {
- // ScalarMapContainer_Type derives from our MutableMapping type.
- ScopedPyObjectPtr containers(PyImport_ImportModule(
- "google.protobuf.internal.containers"));
- if (containers == NULL) {
- return false;
- }
-
- ScopedPyObjectPtr mutable_mapping(
- PyObject_GetAttrString(containers.get(), "MutableMapping"));
- if (mutable_mapping == NULL) {
- return false;
- }
-
- if (!PyObject_TypeCheck(mutable_mapping.get(), &PyType_Type)) {
- return false;
- }
-
- Py_INCREF(mutable_mapping.get());
-#if PY_MAJOR_VERSION >= 3
- PyObject* bases = PyTuple_New(1);
- PyTuple_SET_ITEM(bases, 0, mutable_mapping.get());
-
- ScalarMapContainer_Type =
- PyType_FromSpecWithBases(&ScalarMapContainer_Type_spec, bases);
- PyModule_AddObject(m, "ScalarMapContainer", ScalarMapContainer_Type);
-#else
- ScalarMapContainer_Type.tp_base =
- reinterpret_cast<PyTypeObject*>(mutable_mapping.get());
-
- if (PyType_Ready(&ScalarMapContainer_Type) < 0) {
- return false;
- }
-
- PyModule_AddObject(m, "ScalarMapContainer",
- reinterpret_cast<PyObject*>(&ScalarMapContainer_Type));
-#endif
-
- if (PyType_Ready(&MapIterator_Type) < 0) {
- return false;
- }
-
- PyModule_AddObject(m, "MapIterator",
- reinterpret_cast<PyObject*>(&MapIterator_Type));
-
-
-#if PY_MAJOR_VERSION >= 3
- MessageMapContainer_Type =
- PyType_FromSpecWithBases(&MessageMapContainer_Type_spec, bases);
- PyModule_AddObject(m, "MessageMapContainer", MessageMapContainer_Type);
-#else
- Py_INCREF(mutable_mapping.get());
- MessageMapContainer_Type.tp_base =
- reinterpret_cast<PyTypeObject*>(mutable_mapping.get());
-
- if (PyType_Ready(&MessageMapContainer_Type) < 0) {
- return false;
- }
-
- PyModule_AddObject(m, "MessageMapContainer",
- reinterpret_cast<PyObject*>(&MessageMapContainer_Type));
-#endif
- }
-
- if (PyType_Ready(&ExtensionDict_Type) < 0) {
- return false;
- }
- PyModule_AddObject(
- m, "ExtensionDict",
- reinterpret_cast<PyObject*>(&ExtensionDict_Type));
-
- // Expose the DescriptorPool used to hold all descriptors added from generated
- // pb2.py files.
- // PyModule_AddObject steals a reference.
- Py_INCREF(GetDefaultDescriptorPool());
- PyModule_AddObject(m, "default_pool",
- reinterpret_cast<PyObject*>(GetDefaultDescriptorPool()));
-
- PyModule_AddObject(m, "DescriptorPool", reinterpret_cast<PyObject*>(
- &PyDescriptorPool_Type));
-
- // This implementation provides full Descriptor types, we advertise it so that
- // descriptor.py can use them in replacement of the Python classes.
- PyModule_AddIntConstant(m, "_USE_C_DESCRIPTORS", 1);
-
- PyModule_AddObject(m, "Descriptor", reinterpret_cast<PyObject*>(
- &PyMessageDescriptor_Type));
- PyModule_AddObject(m, "FieldDescriptor", reinterpret_cast<PyObject*>(
- &PyFieldDescriptor_Type));
- PyModule_AddObject(m, "EnumDescriptor", reinterpret_cast<PyObject*>(
- &PyEnumDescriptor_Type));
- PyModule_AddObject(m, "EnumValueDescriptor", reinterpret_cast<PyObject*>(
- &PyEnumValueDescriptor_Type));
- PyModule_AddObject(m, "FileDescriptor", reinterpret_cast<PyObject*>(
- &PyFileDescriptor_Type));
- PyModule_AddObject(m, "OneofDescriptor", reinterpret_cast<PyObject*>(
- &PyOneofDescriptor_Type));
-
- PyObject* enum_type_wrapper = PyImport_ImportModule(
- "google.protobuf.internal.enum_type_wrapper");
- if (enum_type_wrapper == NULL) {
- return false;
- }
- EnumTypeWrapper_class =
- PyObject_GetAttrString(enum_type_wrapper, "EnumTypeWrapper");
- Py_DECREF(enum_type_wrapper);
-
- PyObject* message_module = PyImport_ImportModule(
- "google.protobuf.message");
- if (message_module == NULL) {
- return false;
- }
- EncodeError_class = PyObject_GetAttrString(message_module, "EncodeError");
- DecodeError_class = PyObject_GetAttrString(message_module, "DecodeError");
- PythonMessage_class = PyObject_GetAttrString(message_module, "Message");
- Py_DECREF(message_module);
-
- PyObject* pickle_module = PyImport_ImportModule("pickle");
- if (pickle_module == NULL) {
- return false;
- }
- PickleError_class = PyObject_GetAttrString(pickle_module, "PickleError");
- Py_DECREF(pickle_module);
-
- // Override {Get,Mutable}CProtoInsidePyProto.
- GetCProtoInsidePyProtoPtr = GetCProtoInsidePyProtoImpl;
- MutableCProtoInsidePyProtoPtr = MutableCProtoInsidePyProtoImpl;
-
- return true;
-}
-
-} // namespace python
-} // namespace protobuf
-
-
-#if PY_MAJOR_VERSION >= 3
-static struct PyModuleDef _module = {
- PyModuleDef_HEAD_INIT,
- "_message",
- google::protobuf::python::module_docstring,
- -1,
- NULL,
- NULL,
- NULL,
- NULL,
- NULL
-};
-#define INITFUNC PyInit__message
-#define INITFUNC_ERRORVAL NULL
-#else // Python 2
-#define INITFUNC init_message
-#define INITFUNC_ERRORVAL
-#endif
-
-extern "C" {
- PyMODINIT_FUNC INITFUNC(void) {
- PyObject* m;
-#if PY_MAJOR_VERSION >= 3
- m = PyModule_Create(&_module);
-#else
- m = Py_InitModule3("_message", NULL, google::protobuf::python::module_docstring);
-#endif
- if (m == NULL) {
- return INITFUNC_ERRORVAL;
- }
-
- if (!google::protobuf::python::InitProto2MessageModule(m)) {
- Py_DECREF(m);
- return INITFUNC_ERRORVAL;
- }
-
-#if PY_MAJOR_VERSION >= 3
- return m;
-#endif
- }
-}
-} // namespace google
diff --git a/third_party/protobuf/python/google/protobuf/pyext/message.h b/third_party/protobuf/python/google/protobuf/pyext/message.h
deleted file mode 100644
index cc0012e95e..0000000000
--- a/third_party/protobuf/python/google/protobuf/pyext/message.h
+++ /dev/null
@@ -1,330 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Author: anuraag@google.com (Anuraag Agrawal)
-// Author: tibell@google.com (Johan Tibell)
-
-#ifndef GOOGLE_PROTOBUF_PYTHON_CPP_MESSAGE_H__
-#define GOOGLE_PROTOBUF_PYTHON_CPP_MESSAGE_H__
-
-#include <Python.h>
-
-#include <memory>
-#ifndef _SHARED_PTR_H
-#include <google/protobuf/stubs/shared_ptr.h>
-#endif
-#include <string>
-
-namespace google {
-namespace protobuf {
-
-class Message;
-class Reflection;
-class FieldDescriptor;
-class Descriptor;
-class DescriptorPool;
-class MessageFactory;
-
-#ifdef _SHARED_PTR_H
-using std::shared_ptr;
-using std::string;
-#else
-using internal::shared_ptr;
-#endif
-
-namespace python {
-
-struct ExtensionDict;
-struct PyDescriptorPool;
-
-typedef struct CMessage {
- PyObject_HEAD;
-
- // This is the top-level C++ Message object that owns the whole
- // proto tree. Every Python CMessage holds a reference to it in
- // order to keep it alive as long as there's a Python object that
- // references any part of the tree.
- shared_ptr<Message> owner;
-
- // Weak reference to a parent CMessage object. This is NULL for any top-level
- // message and is set for any child message (i.e. a child submessage or a
- // part of a repeated composite field).
- //
- // Used to make sure all ancestors are also mutable when first modifying
- // a child submessage (in other words, turning a default message instance
- // into a mutable one).
- //
- // If a submessage is released (becomes a new top-level message), this field
- // MUST be set to NULL. The parent may get deallocated and further attempts
- // to use this pointer will result in a crash.
- struct CMessage* parent;
-
- // Pointer to the parent's descriptor that describes this submessage.
- // Used together with the parent's message when making a default message
- // instance mutable.
- // The pointer is owned by the global DescriptorPool.
- const FieldDescriptor* parent_field_descriptor;
-
- // Pointer to the C++ Message object for this CMessage. The
- // CMessage does not own this pointer.
- Message* message;
-
- // Indicates this submessage is pointing to a default instance of a message.
- // Submessages are always first created as read only messages and are then
- // made writable, at which point this field is set to false.
- bool read_only;
-
- // A reference to a Python dictionary containing CMessage,
- // RepeatedCompositeContainer, and RepeatedScalarContainer
- // objects. Used as a cache to make sure we don't have to make a
- // Python wrapper for the C++ Message objects on every access, or
- // deal with the synchronization nightmare that could create.
- PyObject* composite_fields;
-
- // A reference to the dictionary containing the message's extensions.
- // Similar to composite_fields, acting as a cache, but also contains the
- // required extension dict logic.
- ExtensionDict* extensions;
-} CMessage;
-
-extern PyTypeObject CMessage_Type;
-
-namespace cmessage {
-
-// Internal function to create a new empty Message Python object, but with empty
-// pointers to the C++ objects.
-// The caller must fill self->message, self->owner and eventually self->parent.
-CMessage* NewEmptyMessage(PyObject* type, const Descriptor* descriptor);
-
-// Release a submessage from its proto tree, making it a new top-level messgae.
-// A new message will be created if this is a read-only default instance.
-//
-// Corresponds to reflection api method ReleaseMessage.
-int ReleaseSubMessage(CMessage* self,
- const FieldDescriptor* field_descriptor,
- CMessage* child_cmessage);
-
-// Retrieves the C++ descriptor of a Python Extension descriptor.
-// On error, return NULL with an exception set.
-const FieldDescriptor* GetExtensionDescriptor(PyObject* extension);
-
-// Initializes a new CMessage instance for a submessage. Only called once per
-// submessage as the result is cached in composite_fields.
-//
-// Corresponds to reflection api method GetMessage.
-PyObject* InternalGetSubMessage(
- CMessage* self, const FieldDescriptor* field_descriptor);
-
-// Deletes a range of C++ submessages in a repeated field (following a
-// removal in a RepeatedCompositeContainer).
-//
-// Releases messages to the provided cmessage_list if it is not NULL rather
-// than just removing them from the underlying proto. This cmessage_list must
-// have a CMessage for each underlying submessage. The CMessages referred to
-// by slice will be removed from cmessage_list by this function.
-//
-// Corresponds to reflection api method RemoveLast.
-int InternalDeleteRepeatedField(CMessage* self,
- const FieldDescriptor* field_descriptor,
- PyObject* slice, PyObject* cmessage_list);
-
-// Sets the specified scalar value to the message.
-int InternalSetScalar(CMessage* self,
- const FieldDescriptor* field_descriptor,
- PyObject* value);
-
-// Sets the specified scalar value to the message. Requires it is not a Oneof.
-int InternalSetNonOneofScalar(Message* message,
- const FieldDescriptor* field_descriptor,
- PyObject* arg);
-
-// Retrieves the specified scalar value from the message.
-//
-// Returns a new python reference.
-PyObject* InternalGetScalar(const Message* message,
- const FieldDescriptor* field_descriptor);
-
-// Clears the message, removing all contained data. Extension dictionary and
-// submessages are released first if there are remaining external references.
-//
-// Corresponds to message api method Clear.
-PyObject* Clear(CMessage* self);
-
-// Clears the data described by the given descriptor. Used to clear extensions
-// (which don't have names). Extension release is handled by ExtensionDict
-// class, not this function.
-// TODO(anuraag): Try to make this discrepancy in release semantics with
-// ClearField less confusing.
-//
-// Corresponds to reflection api method ClearField.
-PyObject* ClearFieldByDescriptor(
- CMessage* self, const FieldDescriptor* descriptor);
-
-// Clears the data for the given field name. The message is released if there
-// are any external references.
-//
-// Corresponds to reflection api method ClearField.
-PyObject* ClearField(CMessage* self, PyObject* arg);
-
-// Checks if the message has the field described by the descriptor. Used for
-// extensions (which have no name).
-//
-// Corresponds to reflection api method HasField
-PyObject* HasFieldByDescriptor(
- CMessage* self, const FieldDescriptor* field_descriptor);
-
-// Checks if the message has the named field.
-//
-// Corresponds to reflection api method HasField.
-PyObject* HasField(CMessage* self, PyObject* arg);
-
-// Initializes values of fields on a newly constructed message.
-int InitAttributes(CMessage* self, PyObject* kwargs);
-
-PyObject* MergeFrom(CMessage* self, PyObject* arg);
-
-// Retrieves an attribute named 'name' from CMessage 'self'. Returns
-// the attribute value on success, or NULL on failure.
-//
-// Returns a new reference.
-PyObject* GetAttr(CMessage* self, PyObject* name);
-
-// Set the value of the attribute named 'name', for CMessage 'self',
-// to the value 'value'. Returns -1 on failure.
-int SetAttr(CMessage* self, PyObject* name, PyObject* value);
-
-PyObject* FindInitializationErrors(CMessage* self);
-
-// Set the owner field of self and any children of self, recursively.
-// Used when self is being released and thus has a new owner (the
-// released Message.)
-int SetOwner(CMessage* self, const shared_ptr<Message>& new_owner);
-
-int AssureWritable(CMessage* self);
-
-// Returns the "best" DescriptorPool for the given message.
-// This is often equivalent to message.DESCRIPTOR.pool, but not always, when
-// the message class was created from a MessageFactory using a custom pool which
-// uses the generated pool as an underlay.
-//
-// The returned pool is suitable for finding fields and building submessages,
-// even in the case of extensions.
-PyDescriptorPool* GetDescriptorPoolForMessage(CMessage* message);
-
-} // namespace cmessage
-
-
-/* Is 64bit */
-#define IS_64BIT (SIZEOF_LONG == 8)
-
-#define FIELD_IS_REPEATED(field_descriptor) \
- ((field_descriptor)->label() == FieldDescriptor::LABEL_REPEATED)
-
-#define GOOGLE_CHECK_GET_INT32(arg, value, err) \
- int32 value; \
- if (!CheckAndGetInteger(arg, &value, kint32min_py, kint32max_py)) { \
- return err; \
- }
-
-#define GOOGLE_CHECK_GET_INT64(arg, value, err) \
- int64 value; \
- if (!CheckAndGetInteger(arg, &value, kint64min_py, kint64max_py)) { \
- return err; \
- }
-
-#define GOOGLE_CHECK_GET_UINT32(arg, value, err) \
- uint32 value; \
- if (!CheckAndGetInteger(arg, &value, kPythonZero, kuint32max_py)) { \
- return err; \
- }
-
-#define GOOGLE_CHECK_GET_UINT64(arg, value, err) \
- uint64 value; \
- if (!CheckAndGetInteger(arg, &value, kPythonZero, kuint64max_py)) { \
- return err; \
- }
-
-#define GOOGLE_CHECK_GET_FLOAT(arg, value, err) \
- float value; \
- if (!CheckAndGetFloat(arg, &value)) { \
- return err; \
- } \
-
-#define GOOGLE_CHECK_GET_DOUBLE(arg, value, err) \
- double value; \
- if (!CheckAndGetDouble(arg, &value)) { \
- return err; \
- }
-
-#define GOOGLE_CHECK_GET_BOOL(arg, value, err) \
- bool value; \
- if (!CheckAndGetBool(arg, &value)) { \
- return err; \
- }
-
-
-extern PyObject* kPythonZero;
-extern PyObject* kint32min_py;
-extern PyObject* kint32max_py;
-extern PyObject* kuint32max_py;
-extern PyObject* kint64min_py;
-extern PyObject* kint64max_py;
-extern PyObject* kuint64max_py;
-
-#define FULL_MODULE_NAME "google.protobuf.pyext._message"
-
-void FormatTypeError(PyObject* arg, char* expected_types);
-template<class T>
-bool CheckAndGetInteger(
- PyObject* arg, T* value, PyObject* min, PyObject* max);
-bool CheckAndGetDouble(PyObject* arg, double* value);
-bool CheckAndGetFloat(PyObject* arg, float* value);
-bool CheckAndGetBool(PyObject* arg, bool* value);
-PyObject* CheckString(PyObject* arg, const FieldDescriptor* descriptor);
-bool CheckAndSetString(
- PyObject* arg, Message* message,
- const FieldDescriptor* descriptor,
- const Reflection* reflection,
- bool append,
- int index);
-PyObject* ToStringObject(const FieldDescriptor* descriptor, string value);
-
-// Check if the passed field descriptor belongs to the given message.
-// If not, return false and set a Python exception (a KeyError)
-bool CheckFieldBelongsToMessage(const FieldDescriptor* field_descriptor,
- const Message* message);
-
-extern PyObject* PickleError_class;
-
-} // namespace python
-} // namespace protobuf
-
-} // namespace google
-#endif // GOOGLE_PROTOBUF_PYTHON_CPP_MESSAGE_H__
diff --git a/third_party/protobuf/python/google/protobuf/pyext/proto2_api_test.proto b/third_party/protobuf/python/google/protobuf/pyext/proto2_api_test.proto
deleted file mode 100644
index 18aecfb7d6..0000000000
--- a/third_party/protobuf/python/google/protobuf/pyext/proto2_api_test.proto
+++ /dev/null
@@ -1,40 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-syntax = "proto2";
-
-import "google/protobuf/internal/cpp/proto1_api_test.proto";
-
-package google.protobuf.python.internal;
-
-message TestNestedProto1APIMessage {
- optional int32 a = 1;
- optional TestMessage.NestedMessage b = 2;
-}
diff --git a/third_party/protobuf/python/google/protobuf/pyext/python.proto b/third_party/protobuf/python/google/protobuf/pyext/python.proto
deleted file mode 100644
index cce645d71a..0000000000
--- a/third_party/protobuf/python/google/protobuf/pyext/python.proto
+++ /dev/null
@@ -1,68 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Author: tibell@google.com (Johan Tibell)
-//
-// These message definitions are used to exercises known corner cases
-// in the C++ implementation of the Python API.
-
-syntax = "proto2";
-
-package google.protobuf.python.internal;
-
-// Protos optimized for SPEED use a strict superset of the generated code
-// of equivalent ones optimized for CODE_SIZE, so we should optimize all our
-// tests for speed unless explicitly testing code size optimization.
-option optimize_for = SPEED;
-
-message TestAllTypes {
- message NestedMessage {
- optional int32 bb = 1;
- optional ForeignMessage cc = 2;
- }
-
- repeated NestedMessage repeated_nested_message = 1;
- optional NestedMessage optional_nested_message = 2;
- optional int32 optional_int32 = 3;
-}
-
-message ForeignMessage {
- optional int32 c = 1;
- repeated int32 d = 2;
-}
-
-message TestAllExtensions {
- extensions 1 to max;
-}
-
-extend TestAllExtensions {
- optional TestAllTypes.NestedMessage optional_nested_message_extension = 1;
- repeated TestAllTypes.NestedMessage repeated_nested_message_extension = 2;
-}
diff --git a/third_party/protobuf/python/google/protobuf/pyext/python_protobuf.h b/third_party/protobuf/python/google/protobuf/pyext/python_protobuf.h
deleted file mode 100644
index beb6e4604a..0000000000
--- a/third_party/protobuf/python/google/protobuf/pyext/python_protobuf.h
+++ /dev/null
@@ -1,57 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Author: qrczak@google.com (Marcin Kowalczyk)
-//
-// This module exposes the C proto inside the given Python proto, in
-// case the Python proto is implemented with a C proto.
-
-#ifndef GOOGLE_PROTOBUF_PYTHON_PYTHON_PROTOBUF_H__
-#define GOOGLE_PROTOBUF_PYTHON_PYTHON_PROTOBUF_H__
-
-#include <Python.h>
-
-namespace google {
-namespace protobuf {
-
-class Message;
-
-namespace python {
-
-// Return the pointer to the C proto inside the given Python proto,
-// or NULL when this is not a Python proto implemented with a C proto.
-const Message* GetCProtoInsidePyProto(PyObject* msg);
-Message* MutableCProtoInsidePyProto(PyObject* msg);
-
-} // namespace python
-} // namespace protobuf
-
-} // namespace google
-#endif // GOOGLE_PROTOBUF_PYTHON_PYTHON_PROTOBUF_H__
diff --git a/third_party/protobuf/python/google/protobuf/pyext/repeated_composite_container.cc b/third_party/protobuf/python/google/protobuf/pyext/repeated_composite_container.cc
deleted file mode 100644
index b01123b4c0..0000000000
--- a/third_party/protobuf/python/google/protobuf/pyext/repeated_composite_container.cc
+++ /dev/null
@@ -1,614 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Author: anuraag@google.com (Anuraag Agrawal)
-// Author: tibell@google.com (Johan Tibell)
-
-#include <google/protobuf/pyext/repeated_composite_container.h>
-
-#include <memory>
-#ifndef _SHARED_PTR_H
-#include <google/protobuf/stubs/shared_ptr.h>
-#endif
-
-#include <google/protobuf/stubs/logging.h>
-#include <google/protobuf/stubs/common.h>
-#include <google/protobuf/descriptor.h>
-#include <google/protobuf/dynamic_message.h>
-#include <google/protobuf/message.h>
-#include <google/protobuf/pyext/descriptor.h>
-#include <google/protobuf/pyext/descriptor_pool.h>
-#include <google/protobuf/pyext/message.h>
-#include <google/protobuf/pyext/scoped_pyobject_ptr.h>
-
-#if PY_MAJOR_VERSION >= 3
- #define PyInt_Check PyLong_Check
- #define PyInt_AsLong PyLong_AsLong
- #define PyInt_FromLong PyLong_FromLong
-#endif
-
-namespace google {
-namespace protobuf {
-namespace python {
-
-namespace repeated_composite_container {
-
-// TODO(tibell): We might also want to check:
-// GOOGLE_CHECK_NOTNULL((self)->owner.get());
-#define GOOGLE_CHECK_ATTACHED(self) \
- do { \
- GOOGLE_CHECK_NOTNULL((self)->message); \
- GOOGLE_CHECK_NOTNULL((self)->parent_field_descriptor); \
- } while (0);
-
-#define GOOGLE_CHECK_RELEASED(self) \
- do { \
- GOOGLE_CHECK((self)->owner.get() == NULL); \
- GOOGLE_CHECK((self)->message == NULL); \
- GOOGLE_CHECK((self)->parent_field_descriptor == NULL); \
- GOOGLE_CHECK((self)->parent == NULL); \
- } while (0);
-
-// ---------------------------------------------------------------------
-// len()
-
-static Py_ssize_t Length(RepeatedCompositeContainer* self) {
- Message* message = self->message;
- if (message != NULL) {
- return message->GetReflection()->FieldSize(*message,
- self->parent_field_descriptor);
- } else {
- // The container has been released (i.e. by a call to Clear() or
- // ClearField() on the parent) and thus there's no message.
- return PyList_GET_SIZE(self->child_messages);
- }
-}
-
-// Returns 0 if successful; returns -1 and sets an exception if
-// unsuccessful.
-static int UpdateChildMessages(RepeatedCompositeContainer* self) {
- if (self->message == NULL)
- return 0;
-
- // A MergeFrom on a parent message could have caused extra messages to be
- // added in the underlying protobuf so add them to our list. They can never
- // be removed in such a way so there's no need to worry about that.
- Py_ssize_t message_length = Length(self);
- Py_ssize_t child_length = PyList_GET_SIZE(self->child_messages);
- Message* message = self->message;
- const Reflection* reflection = message->GetReflection();
- for (Py_ssize_t i = child_length; i < message_length; ++i) {
- const Message& sub_message = reflection->GetRepeatedMessage(
- *(self->message), self->parent_field_descriptor, i);
- CMessage* cmsg = cmessage::NewEmptyMessage(self->subclass_init,
- sub_message.GetDescriptor());
- ScopedPyObjectPtr py_cmsg(reinterpret_cast<PyObject*>(cmsg));
- if (cmsg == NULL) {
- return -1;
- }
- cmsg->owner = self->owner;
- cmsg->message = const_cast<Message*>(&sub_message);
- cmsg->parent = self->parent;
- if (PyList_Append(self->child_messages, py_cmsg.get()) < 0) {
- return -1;
- }
- }
- return 0;
-}
-
-// ---------------------------------------------------------------------
-// add()
-
-static PyObject* AddToAttached(RepeatedCompositeContainer* self,
- PyObject* args,
- PyObject* kwargs) {
- GOOGLE_CHECK_ATTACHED(self);
-
- if (UpdateChildMessages(self) < 0) {
- return NULL;
- }
- if (cmessage::AssureWritable(self->parent) == -1)
- return NULL;
- Message* message = self->message;
- Message* sub_message =
- message->GetReflection()->AddMessage(message,
- self->parent_field_descriptor);
- CMessage* cmsg = cmessage::NewEmptyMessage(self->subclass_init,
- sub_message->GetDescriptor());
- if (cmsg == NULL)
- return NULL;
-
- cmsg->owner = self->owner;
- cmsg->message = sub_message;
- cmsg->parent = self->parent;
- if (cmessage::InitAttributes(cmsg, kwargs) < 0) {
- Py_DECREF(cmsg);
- return NULL;
- }
-
- PyObject* py_cmsg = reinterpret_cast<PyObject*>(cmsg);
- if (PyList_Append(self->child_messages, py_cmsg) < 0) {
- Py_DECREF(py_cmsg);
- return NULL;
- }
- return py_cmsg;
-}
-
-static PyObject* AddToReleased(RepeatedCompositeContainer* self,
- PyObject* args,
- PyObject* kwargs) {
- GOOGLE_CHECK_RELEASED(self);
-
- // Create a new Message detached from the rest.
- PyObject* py_cmsg = PyEval_CallObjectWithKeywords(
- self->subclass_init, NULL, kwargs);
- if (py_cmsg == NULL)
- return NULL;
-
- if (PyList_Append(self->child_messages, py_cmsg) < 0) {
- Py_DECREF(py_cmsg);
- return NULL;
- }
- return py_cmsg;
-}
-
-PyObject* Add(RepeatedCompositeContainer* self,
- PyObject* args,
- PyObject* kwargs) {
- if (self->message == NULL)
- return AddToReleased(self, args, kwargs);
- else
- return AddToAttached(self, args, kwargs);
-}
-
-// ---------------------------------------------------------------------
-// extend()
-
-PyObject* Extend(RepeatedCompositeContainer* self, PyObject* value) {
- cmessage::AssureWritable(self->parent);
- if (UpdateChildMessages(self) < 0) {
- return NULL;
- }
- ScopedPyObjectPtr iter(PyObject_GetIter(value));
- if (iter == NULL) {
- PyErr_SetString(PyExc_TypeError, "Value must be iterable");
- return NULL;
- }
- ScopedPyObjectPtr next;
- while ((next.reset(PyIter_Next(iter.get()))) != NULL) {
- if (!PyObject_TypeCheck(next.get(), &CMessage_Type)) {
- PyErr_SetString(PyExc_TypeError, "Not a cmessage");
- return NULL;
- }
- ScopedPyObjectPtr new_message(Add(self, NULL, NULL));
- if (new_message == NULL) {
- return NULL;
- }
- CMessage* new_cmessage = reinterpret_cast<CMessage*>(new_message.get());
- if (ScopedPyObjectPtr(cmessage::MergeFrom(new_cmessage, next.get())) ==
- NULL) {
- return NULL;
- }
- }
- if (PyErr_Occurred()) {
- return NULL;
- }
- Py_RETURN_NONE;
-}
-
-PyObject* MergeFrom(RepeatedCompositeContainer* self, PyObject* other) {
- if (UpdateChildMessages(self) < 0) {
- return NULL;
- }
- return Extend(self, other);
-}
-
-PyObject* Subscript(RepeatedCompositeContainer* self, PyObject* slice) {
- if (UpdateChildMessages(self) < 0) {
- return NULL;
- }
- // Just forward the call to the subscript-handling function of the
- // list containing the child messages.
- return PyObject_GetItem(self->child_messages, slice);
-}
-
-int AssignSubscript(RepeatedCompositeContainer* self,
- PyObject* slice,
- PyObject* value) {
- if (UpdateChildMessages(self) < 0) {
- return -1;
- }
- if (value != NULL) {
- PyErr_SetString(PyExc_TypeError, "does not support assignment");
- return -1;
- }
-
- // Delete from the underlying Message, if any.
- if (self->parent != NULL) {
- if (cmessage::InternalDeleteRepeatedField(self->parent,
- self->parent_field_descriptor,
- slice,
- self->child_messages) < 0) {
- return -1;
- }
- } else {
- Py_ssize_t from;
- Py_ssize_t to;
- Py_ssize_t step;
- Py_ssize_t length = Length(self);
- Py_ssize_t slicelength;
- if (PySlice_Check(slice)) {
-#if PY_MAJOR_VERSION >= 3
- if (PySlice_GetIndicesEx(slice,
-#else
- if (PySlice_GetIndicesEx(reinterpret_cast<PySliceObject*>(slice),
-#endif
- length, &from, &to, &step, &slicelength) == -1) {
- return -1;
- }
- return PySequence_DelSlice(self->child_messages, from, to);
- } else if (PyInt_Check(slice) || PyLong_Check(slice)) {
- from = to = PyLong_AsLong(slice);
- if (from < 0) {
- from = to = length + from;
- }
- return PySequence_DelItem(self->child_messages, from);
- }
- }
-
- return 0;
-}
-
-static PyObject* Remove(RepeatedCompositeContainer* self, PyObject* value) {
- if (UpdateChildMessages(self) < 0) {
- return NULL;
- }
- Py_ssize_t index = PySequence_Index(self->child_messages, value);
- if (index == -1) {
- return NULL;
- }
- ScopedPyObjectPtr py_index(PyLong_FromLong(index));
- if (AssignSubscript(self, py_index.get(), NULL) < 0) {
- return NULL;
- }
- Py_RETURN_NONE;
-}
-
-static PyObject* RichCompare(RepeatedCompositeContainer* self,
- PyObject* other,
- int opid) {
- if (UpdateChildMessages(self) < 0) {
- return NULL;
- }
- if (!PyObject_TypeCheck(other, &RepeatedCompositeContainer_Type)) {
- PyErr_SetString(PyExc_TypeError,
- "Can only compare repeated composite fields "
- "against other repeated composite fields.");
- return NULL;
- }
- if (opid == Py_EQ || opid == Py_NE) {
- // TODO(anuraag): Don't make new lists just for this...
- ScopedPyObjectPtr full_slice(PySlice_New(NULL, NULL, NULL));
- if (full_slice == NULL) {
- return NULL;
- }
- ScopedPyObjectPtr list(Subscript(self, full_slice.get()));
- if (list == NULL) {
- return NULL;
- }
- ScopedPyObjectPtr other_list(
- Subscript(reinterpret_cast<RepeatedCompositeContainer*>(other),
- full_slice.get()));
- if (other_list == NULL) {
- return NULL;
- }
- return PyObject_RichCompare(list.get(), other_list.get(), opid);
- } else {
- Py_INCREF(Py_NotImplemented);
- return Py_NotImplemented;
- }
-}
-
-// ---------------------------------------------------------------------
-// sort()
-
-static void ReorderAttached(RepeatedCompositeContainer* self) {
- Message* message = self->message;
- const Reflection* reflection = message->GetReflection();
- const FieldDescriptor* descriptor = self->parent_field_descriptor;
- const Py_ssize_t length = Length(self);
-
- // Since Python protobuf objects are never arena-allocated, adding and
- // removing message pointers to the underlying array is just updating
- // pointers.
- for (Py_ssize_t i = 0; i < length; ++i)
- reflection->ReleaseLast(message, descriptor);
-
- for (Py_ssize_t i = 0; i < length; ++i) {
- CMessage* py_cmsg = reinterpret_cast<CMessage*>(
- PyList_GET_ITEM(self->child_messages, i));
- reflection->AddAllocatedMessage(message, descriptor, py_cmsg->message);
- }
-}
-
-// Returns 0 if successful; returns -1 and sets an exception if
-// unsuccessful.
-static int SortPythonMessages(RepeatedCompositeContainer* self,
- PyObject* args,
- PyObject* kwds) {
- ScopedPyObjectPtr m(PyObject_GetAttrString(self->child_messages, "sort"));
- if (m == NULL)
- return -1;
- if (PyObject_Call(m.get(), args, kwds) == NULL)
- return -1;
- if (self->message != NULL) {
- ReorderAttached(self);
- }
- return 0;
-}
-
-static PyObject* Sort(RepeatedCompositeContainer* self,
- PyObject* args,
- PyObject* kwds) {
- // Support the old sort_function argument for backwards
- // compatibility.
- if (kwds != NULL) {
- PyObject* sort_func = PyDict_GetItemString(kwds, "sort_function");
- if (sort_func != NULL) {
- // Must set before deleting as sort_func is a borrowed reference
- // and kwds might be the only thing keeping it alive.
- PyDict_SetItemString(kwds, "cmp", sort_func);
- PyDict_DelItemString(kwds, "sort_function");
- }
- }
-
- if (UpdateChildMessages(self) < 0) {
- return NULL;
- }
- if (SortPythonMessages(self, args, kwds) < 0) {
- return NULL;
- }
- Py_RETURN_NONE;
-}
-
-// ---------------------------------------------------------------------
-
-static PyObject* Item(RepeatedCompositeContainer* self, Py_ssize_t index) {
- if (UpdateChildMessages(self) < 0) {
- return NULL;
- }
- Py_ssize_t length = Length(self);
- if (index < 0) {
- index = length + index;
- }
- PyObject* item = PyList_GetItem(self->child_messages, index);
- if (item == NULL) {
- return NULL;
- }
- Py_INCREF(item);
- return item;
-}
-
-static PyObject* Pop(RepeatedCompositeContainer* self,
- PyObject* args) {
- Py_ssize_t index = -1;
- if (!PyArg_ParseTuple(args, "|n", &index)) {
- return NULL;
- }
- PyObject* item = Item(self, index);
- if (item == NULL) {
- PyErr_Format(PyExc_IndexError,
- "list index (%zd) out of range",
- index);
- return NULL;
- }
- ScopedPyObjectPtr py_index(PyLong_FromSsize_t(index));
- if (AssignSubscript(self, py_index.get(), NULL) < 0) {
- return NULL;
- }
- return item;
-}
-
-// Release field of parent message and transfer the ownership to target.
-void ReleaseLastTo(CMessage* parent,
- const FieldDescriptor* field,
- CMessage* target) {
- GOOGLE_CHECK_NOTNULL(parent);
- GOOGLE_CHECK_NOTNULL(field);
- GOOGLE_CHECK_NOTNULL(target);
-
- shared_ptr<Message> released_message(
- parent->message->GetReflection()->ReleaseLast(parent->message, field));
- // TODO(tibell): Deal with proto1.
-
- target->parent = NULL;
- target->parent_field_descriptor = NULL;
- target->message = released_message.get();
- target->read_only = false;
- cmessage::SetOwner(target, released_message);
-}
-
-// Called to release a container using
-// ClearField('container_field_name') on the parent.
-int Release(RepeatedCompositeContainer* self) {
- if (UpdateChildMessages(self) < 0) {
- PyErr_WriteUnraisable(PyBytes_FromString("Failed to update released "
- "messages"));
- return -1;
- }
-
- Message* message = self->message;
- const FieldDescriptor* field = self->parent_field_descriptor;
-
- // The reflection API only lets us release the last message in a
- // repeated field. Therefore we iterate through the children
- // starting with the last one.
- const Py_ssize_t size = PyList_GET_SIZE(self->child_messages);
- GOOGLE_DCHECK_EQ(size, message->GetReflection()->FieldSize(*message, field));
- for (Py_ssize_t i = size - 1; i >= 0; --i) {
- CMessage* child_cmessage = reinterpret_cast<CMessage*>(
- PyList_GET_ITEM(self->child_messages, i));
- ReleaseLastTo(self->parent, field, child_cmessage);
- }
-
- // Detach from containing message.
- self->parent = NULL;
- self->parent_field_descriptor = NULL;
- self->message = NULL;
- self->owner.reset();
-
- return 0;
-}
-
-int SetOwner(RepeatedCompositeContainer* self,
- const shared_ptr<Message>& new_owner) {
- GOOGLE_CHECK_ATTACHED(self);
-
- self->owner = new_owner;
- const Py_ssize_t n = PyList_GET_SIZE(self->child_messages);
- for (Py_ssize_t i = 0; i < n; ++i) {
- PyObject* msg = PyList_GET_ITEM(self->child_messages, i);
- if (cmessage::SetOwner(reinterpret_cast<CMessage*>(msg), new_owner) == -1) {
- return -1;
- }
- }
- return 0;
-}
-
-// The private constructor of RepeatedCompositeContainer objects.
-PyObject *NewContainer(
- CMessage* parent,
- const FieldDescriptor* parent_field_descriptor,
- PyObject *concrete_class) {
- if (!CheckFieldBelongsToMessage(parent_field_descriptor, parent->message)) {
- return NULL;
- }
-
- RepeatedCompositeContainer* self =
- reinterpret_cast<RepeatedCompositeContainer*>(
- PyType_GenericAlloc(&RepeatedCompositeContainer_Type, 0));
- if (self == NULL) {
- return NULL;
- }
-
- self->message = parent->message;
- self->parent = parent;
- self->parent_field_descriptor = parent_field_descriptor;
- self->owner = parent->owner;
- Py_INCREF(concrete_class);
- self->subclass_init = concrete_class;
- self->child_messages = PyList_New(0);
-
- return reinterpret_cast<PyObject*>(self);
-}
-
-static void Dealloc(RepeatedCompositeContainer* self) {
- Py_CLEAR(self->child_messages);
- Py_CLEAR(self->subclass_init);
- // TODO(tibell): Do we need to call delete on these objects to make
- // sure their destructors are called?
- self->owner.reset();
-
- Py_TYPE(self)->tp_free(reinterpret_cast<PyObject*>(self));
-}
-
-static PySequenceMethods SqMethods = {
- (lenfunc)Length, /* sq_length */
- 0, /* sq_concat */
- 0, /* sq_repeat */
- (ssizeargfunc)Item /* sq_item */
-};
-
-static PyMappingMethods MpMethods = {
- (lenfunc)Length, /* mp_length */
- (binaryfunc)Subscript, /* mp_subscript */
- (objobjargproc)AssignSubscript,/* mp_ass_subscript */
-};
-
-static PyMethodDef Methods[] = {
- { "add", (PyCFunction) Add, METH_VARARGS | METH_KEYWORDS,
- "Adds an object to the repeated container." },
- { "extend", (PyCFunction) Extend, METH_O,
- "Adds objects to the repeated container." },
- { "pop", (PyCFunction)Pop, METH_VARARGS,
- "Removes an object from the repeated container and returns it." },
- { "remove", (PyCFunction) Remove, METH_O,
- "Removes an object from the repeated container." },
- { "sort", (PyCFunction) Sort, METH_VARARGS | METH_KEYWORDS,
- "Sorts the repeated container." },
- { "MergeFrom", (PyCFunction) MergeFrom, METH_O,
- "Adds objects to the repeated container." },
- { NULL, NULL }
-};
-
-} // namespace repeated_composite_container
-
-PyTypeObject RepeatedCompositeContainer_Type = {
- PyVarObject_HEAD_INIT(&PyType_Type, 0)
- FULL_MODULE_NAME ".RepeatedCompositeContainer", // tp_name
- sizeof(RepeatedCompositeContainer), // tp_basicsize
- 0, // tp_itemsize
- (destructor)repeated_composite_container::Dealloc, // tp_dealloc
- 0, // tp_print
- 0, // tp_getattr
- 0, // tp_setattr
- 0, // tp_compare
- 0, // tp_repr
- 0, // tp_as_number
- &repeated_composite_container::SqMethods, // tp_as_sequence
- &repeated_composite_container::MpMethods, // tp_as_mapping
- PyObject_HashNotImplemented, // tp_hash
- 0, // tp_call
- 0, // tp_str
- 0, // tp_getattro
- 0, // tp_setattro
- 0, // tp_as_buffer
- Py_TPFLAGS_DEFAULT, // tp_flags
- "A Repeated scalar container", // tp_doc
- 0, // tp_traverse
- 0, // tp_clear
- (richcmpfunc)repeated_composite_container::RichCompare, // tp_richcompare
- 0, // tp_weaklistoffset
- 0, // tp_iter
- 0, // tp_iternext
- repeated_composite_container::Methods, // tp_methods
- 0, // tp_members
- 0, // tp_getset
- 0, // tp_base
- 0, // tp_dict
- 0, // tp_descr_get
- 0, // tp_descr_set
- 0, // tp_dictoffset
- 0, // tp_init
-};
-
-} // namespace python
-} // namespace protobuf
-} // namespace google
diff --git a/third_party/protobuf/python/google/protobuf/pyext/repeated_composite_container.h b/third_party/protobuf/python/google/protobuf/pyext/repeated_composite_container.h
deleted file mode 100644
index 58d37b0207..0000000000
--- a/third_party/protobuf/python/google/protobuf/pyext/repeated_composite_container.h
+++ /dev/null
@@ -1,178 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Author: anuraag@google.com (Anuraag Agrawal)
-// Author: tibell@google.com (Johan Tibell)
-
-#ifndef GOOGLE_PROTOBUF_PYTHON_CPP_REPEATED_COMPOSITE_CONTAINER_H__
-#define GOOGLE_PROTOBUF_PYTHON_CPP_REPEATED_COMPOSITE_CONTAINER_H__
-
-#include <Python.h>
-
-#include <memory>
-#ifndef _SHARED_PTR_H
-#include <google/protobuf/stubs/shared_ptr.h>
-#endif
-#include <string>
-#include <vector>
-
-namespace google {
-namespace protobuf {
-
-class FieldDescriptor;
-class Message;
-
-#ifdef _SHARED_PTR_H
-using std::shared_ptr;
-#else
-using internal::shared_ptr;
-#endif
-
-namespace python {
-
-struct CMessage;
-
-// A RepeatedCompositeContainer can be in one of two states: attached
-// or released.
-//
-// When in the attached state all modifications to the container are
-// done both on the 'message' and on the 'child_messages'
-// list. In this state all Messages referred to by the children in
-// 'child_messages' are owner by the 'owner'.
-//
-// When in the released state 'message', 'owner', 'parent', and
-// 'parent_field_descriptor' are NULL.
-typedef struct RepeatedCompositeContainer {
- PyObject_HEAD;
-
- // This is the top-level C++ Message object that owns the whole
- // proto tree. Every Python RepeatedCompositeContainer holds a
- // reference to it in order to keep it alive as long as there's a
- // Python object that references any part of the tree.
- shared_ptr<Message> owner;
-
- // Weak reference to parent object. May be NULL. Used to make sure
- // the parent is writable before modifying the
- // RepeatedCompositeContainer.
- CMessage* parent;
-
- // A descriptor used to modify the underlying 'message'.
- // The pointer is owned by the global DescriptorPool.
- const FieldDescriptor* parent_field_descriptor;
-
- // Pointer to the C++ Message that contains this container. The
- // RepeatedCompositeContainer does not own this pointer.
- //
- // If NULL, this message has been released from its parent (by
- // calling Clear() or ClearField() on the parent.
- Message* message;
-
- // A callable that is used to create new child messages.
- PyObject* subclass_init;
-
- // A list of child messages.
- PyObject* child_messages;
-} RepeatedCompositeContainer;
-
-extern PyTypeObject RepeatedCompositeContainer_Type;
-
-namespace repeated_composite_container {
-
-// Builds a RepeatedCompositeContainer object, from a parent message and a
-// field descriptor.
-PyObject *NewContainer(
- CMessage* parent,
- const FieldDescriptor* parent_field_descriptor,
- PyObject *concrete_class);
-
-// Appends a new CMessage to the container and returns it. The
-// CMessage is initialized using the content of kwargs.
-//
-// Returns a new reference if successful; returns NULL and sets an
-// exception if unsuccessful.
-PyObject* Add(RepeatedCompositeContainer* self,
- PyObject* args,
- PyObject* kwargs);
-
-// Appends all the CMessages in the input iterator to the container.
-//
-// Returns None if successful; returns NULL and sets an exception if
-// unsuccessful.
-PyObject* Extend(RepeatedCompositeContainer* self, PyObject* value);
-
-// Appends a new message to the container for each message in the
-// input iterator, merging each data element in. Equivalent to extend.
-//
-// Returns None if successful; returns NULL and sets an exception if
-// unsuccessful.
-PyObject* MergeFrom(RepeatedCompositeContainer* self, PyObject* other);
-
-// Accesses messages in the container.
-//
-// Returns a new reference to the message for an integer parameter.
-// Returns a new reference to a list of messages for a slice.
-PyObject* Subscript(RepeatedCompositeContainer* self, PyObject* slice);
-
-// Deletes items from the container (cannot be used for assignment).
-//
-// Returns 0 on success, -1 on failure.
-int AssignSubscript(RepeatedCompositeContainer* self,
- PyObject* slice,
- PyObject* value);
-
-// Releases the messages in the container to the given message.
-//
-// Returns 0 on success, -1 on failure.
-int ReleaseToMessage(RepeatedCompositeContainer* self, Message* new_message);
-
-// Releases the messages in the container to a new message.
-//
-// Returns 0 on success, -1 on failure.
-int Release(RepeatedCompositeContainer* self);
-
-// Returns 0 on success, -1 on failure.
-int SetOwner(RepeatedCompositeContainer* self,
- const shared_ptr<Message>& new_owner);
-
-// Removes the last element of the repeated message field 'field' on
-// the Message 'parent', and transfers the ownership of the released
-// Message to 'target'.
-//
-// Corresponds to reflection api method ReleaseMessage.
-void ReleaseLastTo(CMessage* parent,
- const FieldDescriptor* field,
- CMessage* target);
-
-} // namespace repeated_composite_container
-} // namespace python
-} // namespace protobuf
-
-} // namespace google
-#endif // GOOGLE_PROTOBUF_PYTHON_CPP_REPEATED_COMPOSITE_CONTAINER_H__
diff --git a/third_party/protobuf/python/google/protobuf/pyext/repeated_scalar_container.cc b/third_party/protobuf/python/google/protobuf/pyext/repeated_scalar_container.cc
deleted file mode 100644
index 95da85f87b..0000000000
--- a/third_party/protobuf/python/google/protobuf/pyext/repeated_scalar_container.cc
+++ /dev/null
@@ -1,812 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Author: anuraag@google.com (Anuraag Agrawal)
-// Author: tibell@google.com (Johan Tibell)
-
-#include <google/protobuf/pyext/repeated_scalar_container.h>
-
-#include <memory>
-#ifndef _SHARED_PTR_H
-#include <google/protobuf/stubs/shared_ptr.h>
-#endif
-
-#include <google/protobuf/stubs/common.h>
-#include <google/protobuf/stubs/logging.h>
-#include <google/protobuf/descriptor.h>
-#include <google/protobuf/dynamic_message.h>
-#include <google/protobuf/message.h>
-#include <google/protobuf/pyext/descriptor.h>
-#include <google/protobuf/pyext/descriptor_pool.h>
-#include <google/protobuf/pyext/message.h>
-#include <google/protobuf/pyext/scoped_pyobject_ptr.h>
-
-#if PY_MAJOR_VERSION >= 3
- #define PyInt_FromLong PyLong_FromLong
- #if PY_VERSION_HEX < 0x03030000
- #error "Python 3.0 - 3.2 are not supported."
- #else
- #define PyString_AsString(ob) \
- (PyUnicode_Check(ob)? PyUnicode_AsUTF8(ob): PyBytes_AsString(ob))
- #endif
-#endif
-
-namespace google {
-namespace protobuf {
-namespace python {
-
-namespace repeated_scalar_container {
-
-static int InternalAssignRepeatedField(
- RepeatedScalarContainer* self, PyObject* list) {
- self->message->GetReflection()->ClearField(self->message,
- self->parent_field_descriptor);
- for (Py_ssize_t i = 0; i < PyList_GET_SIZE(list); ++i) {
- PyObject* value = PyList_GET_ITEM(list, i);
- if (ScopedPyObjectPtr(Append(self, value)) == NULL) {
- return -1;
- }
- }
- return 0;
-}
-
-static Py_ssize_t Len(RepeatedScalarContainer* self) {
- Message* message = self->message;
- return message->GetReflection()->FieldSize(*message,
- self->parent_field_descriptor);
-}
-
-static int AssignItem(RepeatedScalarContainer* self,
- Py_ssize_t index,
- PyObject* arg) {
- cmessage::AssureWritable(self->parent);
- Message* message = self->message;
- const FieldDescriptor* field_descriptor = self->parent_field_descriptor;
-
- const Reflection* reflection = message->GetReflection();
- int field_size = reflection->FieldSize(*message, field_descriptor);
- if (index < 0) {
- index = field_size + index;
- }
- if (index < 0 || index >= field_size) {
- PyErr_Format(PyExc_IndexError,
- "list assignment index (%d) out of range",
- static_cast<int>(index));
- return -1;
- }
-
- if (arg == NULL) {
- ScopedPyObjectPtr py_index(PyLong_FromLong(index));
- return cmessage::InternalDeleteRepeatedField(self->parent, field_descriptor,
- py_index.get(), NULL);
- }
-
- if (PySequence_Check(arg) && !(PyBytes_Check(arg) || PyUnicode_Check(arg))) {
- PyErr_SetString(PyExc_TypeError, "Value must be scalar");
- return -1;
- }
-
- switch (field_descriptor->cpp_type()) {
- case FieldDescriptor::CPPTYPE_INT32: {
- GOOGLE_CHECK_GET_INT32(arg, value, -1);
- reflection->SetRepeatedInt32(message, field_descriptor, index, value);
- break;
- }
- case FieldDescriptor::CPPTYPE_INT64: {
- GOOGLE_CHECK_GET_INT64(arg, value, -1);
- reflection->SetRepeatedInt64(message, field_descriptor, index, value);
- break;
- }
- case FieldDescriptor::CPPTYPE_UINT32: {
- GOOGLE_CHECK_GET_UINT32(arg, value, -1);
- reflection->SetRepeatedUInt32(message, field_descriptor, index, value);
- break;
- }
- case FieldDescriptor::CPPTYPE_UINT64: {
- GOOGLE_CHECK_GET_UINT64(arg, value, -1);
- reflection->SetRepeatedUInt64(message, field_descriptor, index, value);
- break;
- }
- case FieldDescriptor::CPPTYPE_FLOAT: {
- GOOGLE_CHECK_GET_FLOAT(arg, value, -1);
- reflection->SetRepeatedFloat(message, field_descriptor, index, value);
- break;
- }
- case FieldDescriptor::CPPTYPE_DOUBLE: {
- GOOGLE_CHECK_GET_DOUBLE(arg, value, -1);
- reflection->SetRepeatedDouble(message, field_descriptor, index, value);
- break;
- }
- case FieldDescriptor::CPPTYPE_BOOL: {
- GOOGLE_CHECK_GET_BOOL(arg, value, -1);
- reflection->SetRepeatedBool(message, field_descriptor, index, value);
- break;
- }
- case FieldDescriptor::CPPTYPE_STRING: {
- if (!CheckAndSetString(
- arg, message, field_descriptor, reflection, false, index)) {
- return -1;
- }
- break;
- }
- case FieldDescriptor::CPPTYPE_ENUM: {
- GOOGLE_CHECK_GET_INT32(arg, value, -1);
- if (reflection->SupportsUnknownEnumValues()) {
- reflection->SetRepeatedEnumValue(message, field_descriptor, index,
- value);
- } else {
- const EnumDescriptor* enum_descriptor = field_descriptor->enum_type();
- const EnumValueDescriptor* enum_value =
- enum_descriptor->FindValueByNumber(value);
- if (enum_value != NULL) {
- reflection->SetRepeatedEnum(message, field_descriptor, index,
- enum_value);
- } else {
- ScopedPyObjectPtr s(PyObject_Str(arg));
- if (s != NULL) {
- PyErr_Format(PyExc_ValueError, "Unknown enum value: %s",
- PyString_AsString(s.get()));
- }
- return -1;
- }
- }
- break;
- }
- default:
- PyErr_Format(
- PyExc_SystemError, "Adding value to a field of unknown type %d",
- field_descriptor->cpp_type());
- return -1;
- }
- return 0;
-}
-
-static PyObject* Item(RepeatedScalarContainer* self, Py_ssize_t index) {
- Message* message = self->message;
- const FieldDescriptor* field_descriptor = self->parent_field_descriptor;
- const Reflection* reflection = message->GetReflection();
-
- int field_size = reflection->FieldSize(*message, field_descriptor);
- if (index < 0) {
- index = field_size + index;
- }
- if (index < 0 || index >= field_size) {
- PyErr_Format(PyExc_IndexError,
- "list index (%zd) out of range",
- index);
- return NULL;
- }
-
- PyObject* result = NULL;
- switch (field_descriptor->cpp_type()) {
- case FieldDescriptor::CPPTYPE_INT32: {
- int32 value = reflection->GetRepeatedInt32(
- *message, field_descriptor, index);
- result = PyInt_FromLong(value);
- break;
- }
- case FieldDescriptor::CPPTYPE_INT64: {
- int64 value = reflection->GetRepeatedInt64(
- *message, field_descriptor, index);
- result = PyLong_FromLongLong(value);
- break;
- }
- case FieldDescriptor::CPPTYPE_UINT32: {
- uint32 value = reflection->GetRepeatedUInt32(
- *message, field_descriptor, index);
- result = PyLong_FromLongLong(value);
- break;
- }
- case FieldDescriptor::CPPTYPE_UINT64: {
- uint64 value = reflection->GetRepeatedUInt64(
- *message, field_descriptor, index);
- result = PyLong_FromUnsignedLongLong(value);
- break;
- }
- case FieldDescriptor::CPPTYPE_FLOAT: {
- float value = reflection->GetRepeatedFloat(
- *message, field_descriptor, index);
- result = PyFloat_FromDouble(value);
- break;
- }
- case FieldDescriptor::CPPTYPE_DOUBLE: {
- double value = reflection->GetRepeatedDouble(
- *message, field_descriptor, index);
- result = PyFloat_FromDouble(value);
- break;
- }
- case FieldDescriptor::CPPTYPE_BOOL: {
- bool value = reflection->GetRepeatedBool(
- *message, field_descriptor, index);
- result = PyBool_FromLong(value ? 1 : 0);
- break;
- }
- case FieldDescriptor::CPPTYPE_ENUM: {
- const EnumValueDescriptor* enum_value =
- message->GetReflection()->GetRepeatedEnum(
- *message, field_descriptor, index);
- result = PyInt_FromLong(enum_value->number());
- break;
- }
- case FieldDescriptor::CPPTYPE_STRING: {
- string value = reflection->GetRepeatedString(
- *message, field_descriptor, index);
- result = ToStringObject(field_descriptor, value);
- break;
- }
- case FieldDescriptor::CPPTYPE_MESSAGE: {
- PyObject* py_cmsg = PyObject_CallObject(reinterpret_cast<PyObject*>(
- &CMessage_Type), NULL);
- if (py_cmsg == NULL) {
- return NULL;
- }
- CMessage* cmsg = reinterpret_cast<CMessage*>(py_cmsg);
- const Message& msg = reflection->GetRepeatedMessage(
- *message, field_descriptor, index);
- cmsg->owner = self->owner;
- cmsg->parent = self->parent;
- cmsg->message = const_cast<Message*>(&msg);
- cmsg->read_only = false;
- result = reinterpret_cast<PyObject*>(py_cmsg);
- break;
- }
- default:
- PyErr_Format(
- PyExc_SystemError,
- "Getting value from a repeated field of unknown type %d",
- field_descriptor->cpp_type());
- }
-
- return result;
-}
-
-static PyObject* Subscript(RepeatedScalarContainer* self, PyObject* slice) {
- Py_ssize_t from;
- Py_ssize_t to;
- Py_ssize_t step;
- Py_ssize_t length;
- Py_ssize_t slicelength;
- bool return_list = false;
-#if PY_MAJOR_VERSION < 3
- if (PyInt_Check(slice)) {
- from = to = PyInt_AsLong(slice);
- } else // NOLINT
-#endif
- if (PyLong_Check(slice)) {
- from = to = PyLong_AsLong(slice);
- } else if (PySlice_Check(slice)) {
- length = Len(self);
-#if PY_MAJOR_VERSION >= 3
- if (PySlice_GetIndicesEx(slice,
-#else
- if (PySlice_GetIndicesEx(reinterpret_cast<PySliceObject*>(slice),
-#endif
- length, &from, &to, &step, &slicelength) == -1) {
- return NULL;
- }
- return_list = true;
- } else {
- PyErr_SetString(PyExc_TypeError, "list indices must be integers");
- return NULL;
- }
-
- if (!return_list) {
- return Item(self, from);
- }
-
- PyObject* list = PyList_New(0);
- if (list == NULL) {
- return NULL;
- }
- if (from <= to) {
- if (step < 0) {
- return list;
- }
- for (Py_ssize_t index = from; index < to; index += step) {
- if (index < 0 || index >= length) {
- break;
- }
- ScopedPyObjectPtr s(Item(self, index));
- PyList_Append(list, s.get());
- }
- } else {
- if (step > 0) {
- return list;
- }
- for (Py_ssize_t index = from; index > to; index += step) {
- if (index < 0 || index >= length) {
- break;
- }
- ScopedPyObjectPtr s(Item(self, index));
- PyList_Append(list, s.get());
- }
- }
- return list;
-}
-
-PyObject* Append(RepeatedScalarContainer* self, PyObject* item) {
- cmessage::AssureWritable(self->parent);
- Message* message = self->message;
- const FieldDescriptor* field_descriptor = self->parent_field_descriptor;
-
- const Reflection* reflection = message->GetReflection();
- switch (field_descriptor->cpp_type()) {
- case FieldDescriptor::CPPTYPE_INT32: {
- GOOGLE_CHECK_GET_INT32(item, value, NULL);
- reflection->AddInt32(message, field_descriptor, value);
- break;
- }
- case FieldDescriptor::CPPTYPE_INT64: {
- GOOGLE_CHECK_GET_INT64(item, value, NULL);
- reflection->AddInt64(message, field_descriptor, value);
- break;
- }
- case FieldDescriptor::CPPTYPE_UINT32: {
- GOOGLE_CHECK_GET_UINT32(item, value, NULL);
- reflection->AddUInt32(message, field_descriptor, value);
- break;
- }
- case FieldDescriptor::CPPTYPE_UINT64: {
- GOOGLE_CHECK_GET_UINT64(item, value, NULL);
- reflection->AddUInt64(message, field_descriptor, value);
- break;
- }
- case FieldDescriptor::CPPTYPE_FLOAT: {
- GOOGLE_CHECK_GET_FLOAT(item, value, NULL);
- reflection->AddFloat(message, field_descriptor, value);
- break;
- }
- case FieldDescriptor::CPPTYPE_DOUBLE: {
- GOOGLE_CHECK_GET_DOUBLE(item, value, NULL);
- reflection->AddDouble(message, field_descriptor, value);
- break;
- }
- case FieldDescriptor::CPPTYPE_BOOL: {
- GOOGLE_CHECK_GET_BOOL(item, value, NULL);
- reflection->AddBool(message, field_descriptor, value);
- break;
- }
- case FieldDescriptor::CPPTYPE_STRING: {
- if (!CheckAndSetString(
- item, message, field_descriptor, reflection, true, -1)) {
- return NULL;
- }
- break;
- }
- case FieldDescriptor::CPPTYPE_ENUM: {
- GOOGLE_CHECK_GET_INT32(item, value, NULL);
- if (reflection->SupportsUnknownEnumValues()) {
- reflection->AddEnumValue(message, field_descriptor, value);
- } else {
- const EnumDescriptor* enum_descriptor = field_descriptor->enum_type();
- const EnumValueDescriptor* enum_value =
- enum_descriptor->FindValueByNumber(value);
- if (enum_value != NULL) {
- reflection->AddEnum(message, field_descriptor, enum_value);
- } else {
- ScopedPyObjectPtr s(PyObject_Str(item));
- if (s != NULL) {
- PyErr_Format(PyExc_ValueError, "Unknown enum value: %s",
- PyString_AsString(s.get()));
- }
- return NULL;
- }
- }
- break;
- }
- default:
- PyErr_Format(
- PyExc_SystemError, "Adding value to a field of unknown type %d",
- field_descriptor->cpp_type());
- return NULL;
- }
-
- Py_RETURN_NONE;
-}
-
-static int AssSubscript(RepeatedScalarContainer* self,
- PyObject* slice,
- PyObject* value) {
- Py_ssize_t from;
- Py_ssize_t to;
- Py_ssize_t step;
- Py_ssize_t length;
- Py_ssize_t slicelength;
- bool create_list = false;
-
- cmessage::AssureWritable(self->parent);
- Message* message = self->message;
- const FieldDescriptor* field_descriptor =
- self->parent_field_descriptor;
-
-#if PY_MAJOR_VERSION < 3
- if (PyInt_Check(slice)) {
- from = to = PyInt_AsLong(slice);
- } else
-#endif
- if (PyLong_Check(slice)) {
- from = to = PyLong_AsLong(slice);
- } else if (PySlice_Check(slice)) {
- const Reflection* reflection = message->GetReflection();
- length = reflection->FieldSize(*message, field_descriptor);
-#if PY_MAJOR_VERSION >= 3
- if (PySlice_GetIndicesEx(slice,
-#else
- if (PySlice_GetIndicesEx(reinterpret_cast<PySliceObject*>(slice),
-#endif
- length, &from, &to, &step, &slicelength) == -1) {
- return -1;
- }
- create_list = true;
- } else {
- PyErr_SetString(PyExc_TypeError, "list indices must be integers");
- return -1;
- }
-
- if (value == NULL) {
- return cmessage::InternalDeleteRepeatedField(
- self->parent, field_descriptor, slice, NULL);
- }
-
- if (!create_list) {
- return AssignItem(self, from, value);
- }
-
- ScopedPyObjectPtr full_slice(PySlice_New(NULL, NULL, NULL));
- if (full_slice == NULL) {
- return -1;
- }
- ScopedPyObjectPtr new_list(Subscript(self, full_slice.get()));
- if (new_list == NULL) {
- return -1;
- }
- if (PySequence_SetSlice(new_list.get(), from, to, value) < 0) {
- return -1;
- }
-
- return InternalAssignRepeatedField(self, new_list.get());
-}
-
-PyObject* Extend(RepeatedScalarContainer* self, PyObject* value) {
- cmessage::AssureWritable(self->parent);
-
- // TODO(ptucker): Deprecate this behavior. b/18413862
- if (value == Py_None) {
- Py_RETURN_NONE;
- }
- if ((Py_TYPE(value)->tp_as_sequence == NULL) && PyObject_Not(value)) {
- Py_RETURN_NONE;
- }
-
- ScopedPyObjectPtr iter(PyObject_GetIter(value));
- if (iter == NULL) {
- PyErr_SetString(PyExc_TypeError, "Value must be iterable");
- return NULL;
- }
- ScopedPyObjectPtr next;
- while ((next.reset(PyIter_Next(iter.get()))) != NULL) {
- if (ScopedPyObjectPtr(Append(self, next.get())) == NULL) {
- return NULL;
- }
- }
- if (PyErr_Occurred()) {
- return NULL;
- }
- Py_RETURN_NONE;
-}
-
-static PyObject* Insert(RepeatedScalarContainer* self, PyObject* args) {
- Py_ssize_t index;
- PyObject* value;
- if (!PyArg_ParseTuple(args, "lO", &index, &value)) {
- return NULL;
- }
- ScopedPyObjectPtr full_slice(PySlice_New(NULL, NULL, NULL));
- ScopedPyObjectPtr new_list(Subscript(self, full_slice.get()));
- if (PyList_Insert(new_list.get(), index, value) < 0) {
- return NULL;
- }
- int ret = InternalAssignRepeatedField(self, new_list.get());
- if (ret < 0) {
- return NULL;
- }
- Py_RETURN_NONE;
-}
-
-static PyObject* Remove(RepeatedScalarContainer* self, PyObject* value) {
- Py_ssize_t match_index = -1;
- for (Py_ssize_t i = 0; i < Len(self); ++i) {
- ScopedPyObjectPtr elem(Item(self, i));
- if (PyObject_RichCompareBool(elem.get(), value, Py_EQ)) {
- match_index = i;
- break;
- }
- }
- if (match_index == -1) {
- PyErr_SetString(PyExc_ValueError, "remove(x): x not in container");
- return NULL;
- }
- if (AssignItem(self, match_index, NULL) < 0) {
- return NULL;
- }
- Py_RETURN_NONE;
-}
-
-static PyObject* RichCompare(RepeatedScalarContainer* self,
- PyObject* other,
- int opid) {
- if (opid != Py_EQ && opid != Py_NE) {
- Py_INCREF(Py_NotImplemented);
- return Py_NotImplemented;
- }
-
- // Copy the contents of this repeated scalar container, and other if it is
- // also a repeated scalar container, into Python lists so we can delegate
- // to the list's compare method.
-
- ScopedPyObjectPtr full_slice(PySlice_New(NULL, NULL, NULL));
- if (full_slice == NULL) {
- return NULL;
- }
-
- ScopedPyObjectPtr other_list_deleter;
- if (PyObject_TypeCheck(other, &RepeatedScalarContainer_Type)) {
- other_list_deleter.reset(Subscript(
- reinterpret_cast<RepeatedScalarContainer*>(other), full_slice.get()));
- other = other_list_deleter.get();
- }
-
- ScopedPyObjectPtr list(Subscript(self, full_slice.get()));
- if (list == NULL) {
- return NULL;
- }
- return PyObject_RichCompare(list.get(), other, opid);
-}
-
-PyObject* Reduce(RepeatedScalarContainer* unused_self) {
- PyErr_Format(
- PickleError_class,
- "can't pickle repeated message fields, convert to list first");
- return NULL;
-}
-
-static PyObject* Sort(RepeatedScalarContainer* self,
- PyObject* args,
- PyObject* kwds) {
- // Support the old sort_function argument for backwards
- // compatibility.
- if (kwds != NULL) {
- PyObject* sort_func = PyDict_GetItemString(kwds, "sort_function");
- if (sort_func != NULL) {
- // Must set before deleting as sort_func is a borrowed reference
- // and kwds might be the only thing keeping it alive.
- if (PyDict_SetItemString(kwds, "cmp", sort_func) == -1)
- return NULL;
- if (PyDict_DelItemString(kwds, "sort_function") == -1)
- return NULL;
- }
- }
-
- ScopedPyObjectPtr full_slice(PySlice_New(NULL, NULL, NULL));
- if (full_slice == NULL) {
- return NULL;
- }
- ScopedPyObjectPtr list(Subscript(self, full_slice.get()));
- if (list == NULL) {
- return NULL;
- }
- ScopedPyObjectPtr m(PyObject_GetAttrString(list.get(), "sort"));
- if (m == NULL) {
- return NULL;
- }
- ScopedPyObjectPtr res(PyObject_Call(m.get(), args, kwds));
- if (res == NULL) {
- return NULL;
- }
- int ret = InternalAssignRepeatedField(self, list.get());
- if (ret < 0) {
- return NULL;
- }
- Py_RETURN_NONE;
-}
-
-static PyObject* Pop(RepeatedScalarContainer* self,
- PyObject* args) {
- Py_ssize_t index = -1;
- if (!PyArg_ParseTuple(args, "|n", &index)) {
- return NULL;
- }
- PyObject* item = Item(self, index);
- if (item == NULL) {
- PyErr_Format(PyExc_IndexError,
- "list index (%zd) out of range",
- index);
- return NULL;
- }
- if (AssignItem(self, index, NULL) < 0) {
- return NULL;
- }
- return item;
-}
-
-// The private constructor of RepeatedScalarContainer objects.
-PyObject *NewContainer(
- CMessage* parent, const FieldDescriptor* parent_field_descriptor) {
- if (!CheckFieldBelongsToMessage(parent_field_descriptor, parent->message)) {
- return NULL;
- }
-
- RepeatedScalarContainer* self = reinterpret_cast<RepeatedScalarContainer*>(
- PyType_GenericAlloc(&RepeatedScalarContainer_Type, 0));
- if (self == NULL) {
- return NULL;
- }
-
- self->message = parent->message;
- self->parent = parent;
- self->parent_field_descriptor = parent_field_descriptor;
- self->owner = parent->owner;
-
- return reinterpret_cast<PyObject*>(self);
-}
-
-// Initializes the underlying Message object of "to" so it becomes a new parent
-// repeated scalar, and copies all the values from "from" to it. A child scalar
-// container can be released by passing it as both from and to (e.g. making it
-// the recipient of the new parent message and copying the values from itself).
-static int InitializeAndCopyToParentContainer(
- RepeatedScalarContainer* from,
- RepeatedScalarContainer* to) {
- ScopedPyObjectPtr full_slice(PySlice_New(NULL, NULL, NULL));
- if (full_slice == NULL) {
- return -1;
- }
- ScopedPyObjectPtr values(Subscript(from, full_slice.get()));
- if (values == NULL) {
- return -1;
- }
- Message* new_message = from->message->New();
- to->parent = NULL;
- to->parent_field_descriptor = from->parent_field_descriptor;
- to->message = new_message;
- to->owner.reset(new_message);
- if (InternalAssignRepeatedField(to, values.get()) < 0) {
- return -1;
- }
- return 0;
-}
-
-int Release(RepeatedScalarContainer* self) {
- return InitializeAndCopyToParentContainer(self, self);
-}
-
-PyObject* DeepCopy(RepeatedScalarContainer* self, PyObject* arg) {
- RepeatedScalarContainer* clone = reinterpret_cast<RepeatedScalarContainer*>(
- PyType_GenericAlloc(&RepeatedScalarContainer_Type, 0));
- if (clone == NULL) {
- return NULL;
- }
-
- if (InitializeAndCopyToParentContainer(self, clone) < 0) {
- Py_DECREF(clone);
- return NULL;
- }
- return reinterpret_cast<PyObject*>(clone);
-}
-
-static void Dealloc(RepeatedScalarContainer* self) {
- self->owner.reset();
- Py_TYPE(self)->tp_free(reinterpret_cast<PyObject*>(self));
-}
-
-void SetOwner(RepeatedScalarContainer* self,
- const shared_ptr<Message>& new_owner) {
- self->owner = new_owner;
-}
-
-static PySequenceMethods SqMethods = {
- (lenfunc)Len, /* sq_length */
- 0, /* sq_concat */
- 0, /* sq_repeat */
- (ssizeargfunc)Item, /* sq_item */
- 0, /* sq_slice */
- (ssizeobjargproc)AssignItem /* sq_ass_item */
-};
-
-static PyMappingMethods MpMethods = {
- (lenfunc)Len, /* mp_length */
- (binaryfunc)Subscript, /* mp_subscript */
- (objobjargproc)AssSubscript, /* mp_ass_subscript */
-};
-
-static PyMethodDef Methods[] = {
- { "__deepcopy__", (PyCFunction)DeepCopy, METH_VARARGS,
- "Makes a deep copy of the class." },
- { "__reduce__", (PyCFunction)Reduce, METH_NOARGS,
- "Outputs picklable representation of the repeated field." },
- { "append", (PyCFunction)Append, METH_O,
- "Appends an object to the repeated container." },
- { "extend", (PyCFunction)Extend, METH_O,
- "Appends objects to the repeated container." },
- { "insert", (PyCFunction)Insert, METH_VARARGS,
- "Appends objects to the repeated container." },
- { "pop", (PyCFunction)Pop, METH_VARARGS,
- "Removes an object from the repeated container and returns it." },
- { "remove", (PyCFunction)Remove, METH_O,
- "Removes an object from the repeated container." },
- { "sort", (PyCFunction)Sort, METH_VARARGS | METH_KEYWORDS,
- "Sorts the repeated container."},
- { NULL, NULL }
-};
-
-} // namespace repeated_scalar_container
-
-PyTypeObject RepeatedScalarContainer_Type = {
- PyVarObject_HEAD_INIT(&PyType_Type, 0)
- FULL_MODULE_NAME ".RepeatedScalarContainer", // tp_name
- sizeof(RepeatedScalarContainer), // tp_basicsize
- 0, // tp_itemsize
- (destructor)repeated_scalar_container::Dealloc, // tp_dealloc
- 0, // tp_print
- 0, // tp_getattr
- 0, // tp_setattr
- 0, // tp_compare
- 0, // tp_repr
- 0, // tp_as_number
- &repeated_scalar_container::SqMethods, // tp_as_sequence
- &repeated_scalar_container::MpMethods, // tp_as_mapping
- PyObject_HashNotImplemented, // tp_hash
- 0, // tp_call
- 0, // tp_str
- 0, // tp_getattro
- 0, // tp_setattro
- 0, // tp_as_buffer
- Py_TPFLAGS_DEFAULT, // tp_flags
- "A Repeated scalar container", // tp_doc
- 0, // tp_traverse
- 0, // tp_clear
- (richcmpfunc)repeated_scalar_container::RichCompare, // tp_richcompare
- 0, // tp_weaklistoffset
- 0, // tp_iter
- 0, // tp_iternext
- repeated_scalar_container::Methods, // tp_methods
- 0, // tp_members
- 0, // tp_getset
- 0, // tp_base
- 0, // tp_dict
- 0, // tp_descr_get
- 0, // tp_descr_set
- 0, // tp_dictoffset
- 0, // tp_init
-};
-
-} // namespace python
-} // namespace protobuf
-} // namespace google
diff --git a/third_party/protobuf/python/google/protobuf/pyext/repeated_scalar_container.h b/third_party/protobuf/python/google/protobuf/pyext/repeated_scalar_container.h
deleted file mode 100644
index 555e621c9b..0000000000
--- a/third_party/protobuf/python/google/protobuf/pyext/repeated_scalar_container.h
+++ /dev/null
@@ -1,122 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Author: anuraag@google.com (Anuraag Agrawal)
-// Author: tibell@google.com (Johan Tibell)
-
-#ifndef GOOGLE_PROTOBUF_PYTHON_CPP_REPEATED_SCALAR_CONTAINER_H__
-#define GOOGLE_PROTOBUF_PYTHON_CPP_REPEATED_SCALAR_CONTAINER_H__
-
-#include <Python.h>
-
-#include <memory>
-#ifndef _SHARED_PTR_H
-#include <google/protobuf/stubs/shared_ptr.h>
-#endif
-
-#include <google/protobuf/descriptor.h>
-
-namespace google {
-namespace protobuf {
-
-class Message;
-
-#ifdef _SHARED_PTR_H
-using std::shared_ptr;
-#else
-using internal::shared_ptr;
-#endif
-
-namespace python {
-
-struct CMessage;
-
-typedef struct RepeatedScalarContainer {
- PyObject_HEAD;
-
- // This is the top-level C++ Message object that owns the whole
- // proto tree. Every Python RepeatedScalarContainer holds a
- // reference to it in order to keep it alive as long as there's a
- // Python object that references any part of the tree.
- shared_ptr<Message> owner;
-
- // Pointer to the C++ Message that contains this container. The
- // RepeatedScalarContainer does not own this pointer.
- Message* message;
-
- // Weak reference to a parent CMessage object (i.e. may be NULL.)
- //
- // Used to make sure all ancestors are also mutable when first
- // modifying the container.
- CMessage* parent;
-
- // Pointer to the parent's descriptor that describes this
- // field. Used together with the parent's message when making a
- // default message instance mutable.
- // The pointer is owned by the global DescriptorPool.
- const FieldDescriptor* parent_field_descriptor;
-} RepeatedScalarContainer;
-
-extern PyTypeObject RepeatedScalarContainer_Type;
-
-namespace repeated_scalar_container {
-
-// Builds a RepeatedScalarContainer object, from a parent message and a
-// field descriptor.
-extern PyObject *NewContainer(
- CMessage* parent, const FieldDescriptor* parent_field_descriptor);
-
-// Appends the scalar 'item' to the end of the container 'self'.
-//
-// Returns None if successful; returns NULL and sets an exception if
-// unsuccessful.
-PyObject* Append(RepeatedScalarContainer* self, PyObject* item);
-
-// Releases the messages in the container to a new message.
-//
-// Returns 0 on success, -1 on failure.
-int Release(RepeatedScalarContainer* self);
-
-// Appends all the elements in the input iterator to the container.
-//
-// Returns None if successful; returns NULL and sets an exception if
-// unsuccessful.
-PyObject* Extend(RepeatedScalarContainer* self, PyObject* value);
-
-// Set the owner field of self and any children of self.
-void SetOwner(RepeatedScalarContainer* self,
- const shared_ptr<Message>& new_owner);
-
-} // namespace repeated_scalar_container
-} // namespace python
-} // namespace protobuf
-
-} // namespace google
-#endif // GOOGLE_PROTOBUF_PYTHON_CPP_REPEATED_SCALAR_CONTAINER_H__
diff --git a/third_party/protobuf/python/google/protobuf/pyext/scoped_pyobject_ptr.h b/third_party/protobuf/python/google/protobuf/pyext/scoped_pyobject_ptr.h
deleted file mode 100644
index a128cd4c61..0000000000
--- a/third_party/protobuf/python/google/protobuf/pyext/scoped_pyobject_ptr.h
+++ /dev/null
@@ -1,96 +0,0 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// Author: tibell@google.com (Johan Tibell)
-
-#ifndef GOOGLE_PROTOBUF_PYTHON_CPP_SCOPED_PYOBJECT_PTR_H__
-#define GOOGLE_PROTOBUF_PYTHON_CPP_SCOPED_PYOBJECT_PTR_H__
-
-#include <google/protobuf/stubs/common.h>
-
-#include <Python.h>
-
-namespace google {
-class ScopedPyObjectPtr {
- public:
- // Constructor. Defaults to initializing with NULL.
- // There is no way to create an uninitialized ScopedPyObjectPtr.
- explicit ScopedPyObjectPtr(PyObject* p = NULL) : ptr_(p) { }
-
- // Destructor. If there is a PyObject object, delete it.
- ~ScopedPyObjectPtr() {
- Py_XDECREF(ptr_);
- }
-
- // Reset. Deletes the current owned object, if any.
- // Then takes ownership of a new object, if given.
- // This function must be called with a reference that you own.
- // this->reset(this->get()) is wrong!
- // this->reset(this->release()) is OK.
- PyObject* reset(PyObject* p = NULL) {
- Py_XDECREF(ptr_);
- ptr_ = p;
- return ptr_;
- }
-
- // Releases ownership of the object.
- // The caller now owns the returned reference.
- PyObject* release() {
- PyObject* p = ptr_;
- ptr_ = NULL;
- return p;
- }
-
- PyObject* operator->() const {
- assert(ptr_ != NULL);
- return ptr_;
- }
-
- PyObject* get() const { return ptr_; }
-
- Py_ssize_t refcnt() const { return Py_REFCNT(ptr_); }
-
- void inc() const { Py_INCREF(ptr_); }
-
- // Comparison operators.
- // These return whether a ScopedPyObjectPtr and a raw pointer
- // refer to the same object, not just to two different but equal
- // objects.
- bool operator==(const PyObject* p) const { return ptr_ == p; }
- bool operator!=(const PyObject* p) const { return ptr_ != p; }
-
- private:
- PyObject* ptr_;
-
- GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(ScopedPyObjectPtr);
-};
-
-} // namespace google
-#endif // GOOGLE_PROTOBUF_PYTHON_CPP_SCOPED_PYOBJECT_PTR_H__
diff --git a/third_party/protobuf/python/google/protobuf/reflection.py b/third_party/protobuf/python/google/protobuf/reflection.py
deleted file mode 100755
index 0c757264f4..0000000000
--- a/third_party/protobuf/python/google/protobuf/reflection.py
+++ /dev/null
@@ -1,120 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc. All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-# * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-# This code is meant to work on Python 2.4 and above only.
-
-"""Contains a metaclass and helper functions used to create
-protocol message classes from Descriptor objects at runtime.
-
-Recall that a metaclass is the "type" of a class.
-(A class is to a metaclass what an instance is to a class.)
-
-In this case, we use the GeneratedProtocolMessageType metaclass
-to inject all the useful functionality into the classes
-output by the protocol compiler at compile-time.
-
-The upshot of all this is that the real implementation
-details for ALL pure-Python protocol buffers are *here in
-this file*.
-"""
-
-__author__ = 'robinson@google.com (Will Robinson)'
-
-
-from google.protobuf.internal import api_implementation
-from google.protobuf import message
-
-
-if api_implementation.Type() == 'cpp':
- from google.protobuf.pyext import cpp_message as message_impl
-else:
- from google.protobuf.internal import python_message as message_impl
-
-# The type of all Message classes.
-# Part of the public interface.
-#
-# Used by generated files, but clients can also use it at runtime:
-# mydescriptor = pool.FindDescriptor(.....)
-# class MyProtoClass(Message):
-# __metaclass__ = GeneratedProtocolMessageType
-# DESCRIPTOR = mydescriptor
-GeneratedProtocolMessageType = message_impl.GeneratedProtocolMessageType
-
-
-def ParseMessage(descriptor, byte_str):
- """Generate a new Message instance from this Descriptor and a byte string.
-
- Args:
- descriptor: Protobuf Descriptor object
- byte_str: Serialized protocol buffer byte string
-
- Returns:
- Newly created protobuf Message object.
- """
- result_class = MakeClass(descriptor)
- new_msg = result_class()
- new_msg.ParseFromString(byte_str)
- return new_msg
-
-
-def MakeClass(descriptor):
- """Construct a class object for a protobuf described by descriptor.
-
- Composite descriptors are handled by defining the new class as a member of the
- parent class, recursing as deep as necessary.
- This is the dynamic equivalent to:
-
- class Parent(message.Message):
- __metaclass__ = GeneratedProtocolMessageType
- DESCRIPTOR = descriptor
- class Child(message.Message):
- __metaclass__ = GeneratedProtocolMessageType
- DESCRIPTOR = descriptor.nested_types[0]
-
- Sample usage:
- file_descriptor = descriptor_pb2.FileDescriptorProto()
- file_descriptor.ParseFromString(proto2_string)
- msg_descriptor = descriptor.MakeDescriptor(file_descriptor.message_type[0])
- msg_class = reflection.MakeClass(msg_descriptor)
- msg = msg_class()
-
- Args:
- descriptor: A descriptor.Descriptor object describing the protobuf.
- Returns:
- The Message class object described by the descriptor.
- """
- attributes = {}
- for name, nested_type in descriptor.nested_types_by_name.items():
- attributes[name] = MakeClass(nested_type)
-
- attributes[GeneratedProtocolMessageType._DESCRIPTOR_KEY] = descriptor
-
- return GeneratedProtocolMessageType(str(descriptor.name), (message.Message,),
- attributes)
diff --git a/third_party/protobuf/python/google/protobuf/service.py b/third_party/protobuf/python/google/protobuf/service.py
deleted file mode 100755
index 9e00de7042..0000000000
--- a/third_party/protobuf/python/google/protobuf/service.py
+++ /dev/null
@@ -1,226 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc. All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-# * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""DEPRECATED: Declares the RPC service interfaces.
-
-This module declares the abstract interfaces underlying proto2 RPC
-services. These are intended to be independent of any particular RPC
-implementation, so that proto2 services can be used on top of a variety
-of implementations. Starting with version 2.3.0, RPC implementations should
-not try to build on these, but should instead provide code generator plugins
-which generate code specific to the particular RPC implementation. This way
-the generated code can be more appropriate for the implementation in use
-and can avoid unnecessary layers of indirection.
-"""
-
-__author__ = 'petar@google.com (Petar Petrov)'
-
-
-class RpcException(Exception):
- """Exception raised on failed blocking RPC method call."""
- pass
-
-
-class Service(object):
-
- """Abstract base interface for protocol-buffer-based RPC services.
-
- Services themselves are abstract classes (implemented either by servers or as
- stubs), but they subclass this base interface. The methods of this
- interface can be used to call the methods of the service without knowing
- its exact type at compile time (analogous to the Message interface).
- """
-
- def GetDescriptor():
- """Retrieves this service's descriptor."""
- raise NotImplementedError
-
- def CallMethod(self, method_descriptor, rpc_controller,
- request, done):
- """Calls a method of the service specified by method_descriptor.
-
- If "done" is None then the call is blocking and the response
- message will be returned directly. Otherwise the call is asynchronous
- and "done" will later be called with the response value.
-
- In the blocking case, RpcException will be raised on error.
-
- Preconditions:
- * method_descriptor.service == GetDescriptor
- * request is of the exact same classes as returned by
- GetRequestClass(method).
- * After the call has started, the request must not be modified.
- * "rpc_controller" is of the correct type for the RPC implementation being
- used by this Service. For stubs, the "correct type" depends on the
- RpcChannel which the stub is using.
-
- Postconditions:
- * "done" will be called when the method is complete. This may be
- before CallMethod() returns or it may be at some point in the future.
- * If the RPC failed, the response value passed to "done" will be None.
- Further details about the failure can be found by querying the
- RpcController.
- """
- raise NotImplementedError
-
- def GetRequestClass(self, method_descriptor):
- """Returns the class of the request message for the specified method.
-
- CallMethod() requires that the request is of a particular subclass of
- Message. GetRequestClass() gets the default instance of this required
- type.
-
- Example:
- method = service.GetDescriptor().FindMethodByName("Foo")
- request = stub.GetRequestClass(method)()
- request.ParseFromString(input)
- service.CallMethod(method, request, callback)
- """
- raise NotImplementedError
-
- def GetResponseClass(self, method_descriptor):
- """Returns the class of the response message for the specified method.
-
- This method isn't really needed, as the RpcChannel's CallMethod constructs
- the response protocol message. It's provided anyway in case it is useful
- for the caller to know the response type in advance.
- """
- raise NotImplementedError
-
-
-class RpcController(object):
-
- """An RpcController mediates a single method call.
-
- The primary purpose of the controller is to provide a way to manipulate
- settings specific to the RPC implementation and to find out about RPC-level
- errors. The methods provided by the RpcController interface are intended
- to be a "least common denominator" set of features which we expect all
- implementations to support. Specific implementations may provide more
- advanced features (e.g. deadline propagation).
- """
-
- # Client-side methods below
-
- def Reset(self):
- """Resets the RpcController to its initial state.
-
- After the RpcController has been reset, it may be reused in
- a new call. Must not be called while an RPC is in progress.
- """
- raise NotImplementedError
-
- def Failed(self):
- """Returns true if the call failed.
-
- After a call has finished, returns true if the call failed. The possible
- reasons for failure depend on the RPC implementation. Failed() must not
- be called before a call has finished. If Failed() returns true, the
- contents of the response message are undefined.
- """
- raise NotImplementedError
-
- def ErrorText(self):
- """If Failed is true, returns a human-readable description of the error."""
- raise NotImplementedError
-
- def StartCancel(self):
- """Initiate cancellation.
-
- Advises the RPC system that the caller desires that the RPC call be
- canceled. The RPC system may cancel it immediately, may wait awhile and
- then cancel it, or may not even cancel the call at all. If the call is
- canceled, the "done" callback will still be called and the RpcController
- will indicate that the call failed at that time.
- """
- raise NotImplementedError
-
- # Server-side methods below
-
- def SetFailed(self, reason):
- """Sets a failure reason.
-
- Causes Failed() to return true on the client side. "reason" will be
- incorporated into the message returned by ErrorText(). If you find
- you need to return machine-readable information about failures, you
- should incorporate it into your response protocol buffer and should
- NOT call SetFailed().
- """
- raise NotImplementedError
-
- def IsCanceled(self):
- """Checks if the client cancelled the RPC.
-
- If true, indicates that the client canceled the RPC, so the server may
- as well give up on replying to it. The server should still call the
- final "done" callback.
- """
- raise NotImplementedError
-
- def NotifyOnCancel(self, callback):
- """Sets a callback to invoke on cancel.
-
- Asks that the given callback be called when the RPC is canceled. The
- callback will always be called exactly once. If the RPC completes without
- being canceled, the callback will be called after completion. If the RPC
- has already been canceled when NotifyOnCancel() is called, the callback
- will be called immediately.
-
- NotifyOnCancel() must be called no more than once per request.
- """
- raise NotImplementedError
-
-
-class RpcChannel(object):
-
- """Abstract interface for an RPC channel.
-
- An RpcChannel represents a communication line to a service which can be used
- to call that service's methods. The service may be running on another
- machine. Normally, you should not use an RpcChannel directly, but instead
- construct a stub {@link Service} wrapping it. Example:
-
- Example:
- RpcChannel channel = rpcImpl.Channel("remotehost.example.com:1234")
- RpcController controller = rpcImpl.Controller()
- MyService service = MyService_Stub(channel)
- service.MyMethod(controller, request, callback)
- """
-
- def CallMethod(self, method_descriptor, rpc_controller,
- request, response_class, done):
- """Calls the method identified by the descriptor.
-
- Call the given method of the remote service. The signature of this
- procedure looks the same as Service.CallMethod(), but the requirements
- are less strict in one important way: the request object doesn't have to
- be of any specific class as long as its descriptor is method.input_type.
- """
- raise NotImplementedError
diff --git a/third_party/protobuf/python/google/protobuf/service_reflection.py b/third_party/protobuf/python/google/protobuf/service_reflection.py
deleted file mode 100755
index 1c3636afe0..0000000000
--- a/third_party/protobuf/python/google/protobuf/service_reflection.py
+++ /dev/null
@@ -1,284 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc. All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-# * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""Contains metaclasses used to create protocol service and service stub
-classes from ServiceDescriptor objects at runtime.
-
-The GeneratedServiceType and GeneratedServiceStubType metaclasses are used to
-inject all useful functionality into the classes output by the protocol
-compiler at compile-time.
-"""
-
-__author__ = 'petar@google.com (Petar Petrov)'
-
-
-class GeneratedServiceType(type):
-
- """Metaclass for service classes created at runtime from ServiceDescriptors.
-
- Implementations for all methods described in the Service class are added here
- by this class. We also create properties to allow getting/setting all fields
- in the protocol message.
-
- The protocol compiler currently uses this metaclass to create protocol service
- classes at runtime. Clients can also manually create their own classes at
- runtime, as in this example:
-
- mydescriptor = ServiceDescriptor(.....)
- class MyProtoService(service.Service):
- __metaclass__ = GeneratedServiceType
- DESCRIPTOR = mydescriptor
- myservice_instance = MyProtoService()
- ...
- """
-
- _DESCRIPTOR_KEY = 'DESCRIPTOR'
-
- def __init__(cls, name, bases, dictionary):
- """Creates a message service class.
-
- Args:
- name: Name of the class (ignored, but required by the metaclass
- protocol).
- bases: Base classes of the class being constructed.
- dictionary: The class dictionary of the class being constructed.
- dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object
- describing this protocol service type.
- """
- # Don't do anything if this class doesn't have a descriptor. This happens
- # when a service class is subclassed.
- if GeneratedServiceType._DESCRIPTOR_KEY not in dictionary:
- return
- descriptor = dictionary[GeneratedServiceType._DESCRIPTOR_KEY]
- service_builder = _ServiceBuilder(descriptor)
- service_builder.BuildService(cls)
-
-
-class GeneratedServiceStubType(GeneratedServiceType):
-
- """Metaclass for service stubs created at runtime from ServiceDescriptors.
-
- This class has similar responsibilities as GeneratedServiceType, except that
- it creates the service stub classes.
- """
-
- _DESCRIPTOR_KEY = 'DESCRIPTOR'
-
- def __init__(cls, name, bases, dictionary):
- """Creates a message service stub class.
-
- Args:
- name: Name of the class (ignored, here).
- bases: Base classes of the class being constructed.
- dictionary: The class dictionary of the class being constructed.
- dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object
- describing this protocol service type.
- """
- super(GeneratedServiceStubType, cls).__init__(name, bases, dictionary)
- # Don't do anything if this class doesn't have a descriptor. This happens
- # when a service stub is subclassed.
- if GeneratedServiceStubType._DESCRIPTOR_KEY not in dictionary:
- return
- descriptor = dictionary[GeneratedServiceStubType._DESCRIPTOR_KEY]
- service_stub_builder = _ServiceStubBuilder(descriptor)
- service_stub_builder.BuildServiceStub(cls)
-
-
-class _ServiceBuilder(object):
-
- """This class constructs a protocol service class using a service descriptor.
-
- Given a service descriptor, this class constructs a class that represents
- the specified service descriptor. One service builder instance constructs
- exactly one service class. That means all instances of that class share the
- same builder.
- """
-
- def __init__(self, service_descriptor):
- """Initializes an instance of the service class builder.
-
- Args:
- service_descriptor: ServiceDescriptor to use when constructing the
- service class.
- """
- self.descriptor = service_descriptor
-
- def BuildService(self, cls):
- """Constructs the service class.
-
- Args:
- cls: The class that will be constructed.
- """
-
- # CallMethod needs to operate with an instance of the Service class. This
- # internal wrapper function exists only to be able to pass the service
- # instance to the method that does the real CallMethod work.
- def _WrapCallMethod(srvc, method_descriptor,
- rpc_controller, request, callback):
- return self._CallMethod(srvc, method_descriptor,
- rpc_controller, request, callback)
- self.cls = cls
- cls.CallMethod = _WrapCallMethod
- cls.GetDescriptor = staticmethod(lambda: self.descriptor)
- cls.GetDescriptor.__doc__ = "Returns the service descriptor."
- cls.GetRequestClass = self._GetRequestClass
- cls.GetResponseClass = self._GetResponseClass
- for method in self.descriptor.methods:
- setattr(cls, method.name, self._GenerateNonImplementedMethod(method))
-
- def _CallMethod(self, srvc, method_descriptor,
- rpc_controller, request, callback):
- """Calls the method described by a given method descriptor.
-
- Args:
- srvc: Instance of the service for which this method is called.
- method_descriptor: Descriptor that represent the method to call.
- rpc_controller: RPC controller to use for this method's execution.
- request: Request protocol message.
- callback: A callback to invoke after the method has completed.
- """
- if method_descriptor.containing_service != self.descriptor:
- raise RuntimeError(
- 'CallMethod() given method descriptor for wrong service type.')
- method = getattr(srvc, method_descriptor.name)
- return method(rpc_controller, request, callback)
-
- def _GetRequestClass(self, method_descriptor):
- """Returns the class of the request protocol message.
-
- Args:
- method_descriptor: Descriptor of the method for which to return the
- request protocol message class.
-
- Returns:
- A class that represents the input protocol message of the specified
- method.
- """
- if method_descriptor.containing_service != self.descriptor:
- raise RuntimeError(
- 'GetRequestClass() given method descriptor for wrong service type.')
- return method_descriptor.input_type._concrete_class
-
- def _GetResponseClass(self, method_descriptor):
- """Returns the class of the response protocol message.
-
- Args:
- method_descriptor: Descriptor of the method for which to return the
- response protocol message class.
-
- Returns:
- A class that represents the output protocol message of the specified
- method.
- """
- if method_descriptor.containing_service != self.descriptor:
- raise RuntimeError(
- 'GetResponseClass() given method descriptor for wrong service type.')
- return method_descriptor.output_type._concrete_class
-
- def _GenerateNonImplementedMethod(self, method):
- """Generates and returns a method that can be set for a service methods.
-
- Args:
- method: Descriptor of the service method for which a method is to be
- generated.
-
- Returns:
- A method that can be added to the service class.
- """
- return lambda inst, rpc_controller, request, callback: (
- self._NonImplementedMethod(method.name, rpc_controller, callback))
-
- def _NonImplementedMethod(self, method_name, rpc_controller, callback):
- """The body of all methods in the generated service class.
-
- Args:
- method_name: Name of the method being executed.
- rpc_controller: RPC controller used to execute this method.
- callback: A callback which will be invoked when the method finishes.
- """
- rpc_controller.SetFailed('Method %s not implemented.' % method_name)
- callback(None)
-
-
-class _ServiceStubBuilder(object):
-
- """Constructs a protocol service stub class using a service descriptor.
-
- Given a service descriptor, this class constructs a suitable stub class.
- A stub is just a type-safe wrapper around an RpcChannel which emulates a
- local implementation of the service.
-
- One service stub builder instance constructs exactly one class. It means all
- instances of that class share the same service stub builder.
- """
-
- def __init__(self, service_descriptor):
- """Initializes an instance of the service stub class builder.
-
- Args:
- service_descriptor: ServiceDescriptor to use when constructing the
- stub class.
- """
- self.descriptor = service_descriptor
-
- def BuildServiceStub(self, cls):
- """Constructs the stub class.
-
- Args:
- cls: The class that will be constructed.
- """
-
- def _ServiceStubInit(stub, rpc_channel):
- stub.rpc_channel = rpc_channel
- self.cls = cls
- cls.__init__ = _ServiceStubInit
- for method in self.descriptor.methods:
- setattr(cls, method.name, self._GenerateStubMethod(method))
-
- def _GenerateStubMethod(self, method):
- return (lambda inst, rpc_controller, request, callback=None:
- self._StubMethod(inst, method, rpc_controller, request, callback))
-
- def _StubMethod(self, stub, method_descriptor,
- rpc_controller, request, callback):
- """The body of all service methods in the generated stub class.
-
- Args:
- stub: Stub instance.
- method_descriptor: Descriptor of the invoked method.
- rpc_controller: Rpc controller to execute the method.
- request: Request protocol message.
- callback: A callback to execute when the method finishes.
- Returns:
- Response message (in case of blocking call).
- """
- return stub.rpc_channel.CallMethod(
- method_descriptor, rpc_controller, request,
- method_descriptor.output_type._concrete_class, callback)
diff --git a/third_party/protobuf/python/google/protobuf/symbol_database.py b/third_party/protobuf/python/google/protobuf/symbol_database.py
deleted file mode 100644
index 87760f2630..0000000000
--- a/third_party/protobuf/python/google/protobuf/symbol_database.py
+++ /dev/null
@@ -1,185 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc. All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-# * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""A database of Python protocol buffer generated symbols.
-
-SymbolDatabase makes it easy to create new instances of a registered type, given
-only the type's protocol buffer symbol name. Once all symbols are registered,
-they can be accessed using either the MessageFactory interface which
-SymbolDatabase exposes, or the DescriptorPool interface of the underlying
-pool.
-
-Example usage:
-
- db = symbol_database.SymbolDatabase()
-
- # Register symbols of interest, from one or multiple files.
- db.RegisterFileDescriptor(my_proto_pb2.DESCRIPTOR)
- db.RegisterMessage(my_proto_pb2.MyMessage)
- db.RegisterEnumDescriptor(my_proto_pb2.MyEnum.DESCRIPTOR)
-
- # The database can be used as a MessageFactory, to generate types based on
- # their name:
- types = db.GetMessages(['my_proto.proto'])
- my_message_instance = types['MyMessage']()
-
- # The database's underlying descriptor pool can be queried, so it's not
- # necessary to know a type's filename to be able to generate it:
- filename = db.pool.FindFileContainingSymbol('MyMessage')
- my_message_instance = db.GetMessages([filename])['MyMessage']()
-
- # This functionality is also provided directly via a convenience method:
- my_message_instance = db.GetSymbol('MyMessage')()
-"""
-
-
-from google.protobuf import descriptor_pool
-
-
-class SymbolDatabase(object):
- """A database of Python generated symbols.
-
- SymbolDatabase also models message_factory.MessageFactory.
-
- The symbol database can be used to keep a global registry of all protocol
- buffer types used within a program.
- """
-
- def __init__(self, pool=None):
- """Constructor."""
-
- self._symbols = {}
- self._symbols_by_file = {}
- self.pool = pool or descriptor_pool.Default()
-
- def RegisterMessage(self, message):
- """Registers the given message type in the local database.
-
- Args:
- message: a message.Message, to be registered.
-
- Returns:
- The provided message.
- """
-
- desc = message.DESCRIPTOR
- self._symbols[desc.full_name] = message
- if desc.file.name not in self._symbols_by_file:
- self._symbols_by_file[desc.file.name] = {}
- self._symbols_by_file[desc.file.name][desc.full_name] = message
- self.pool.AddDescriptor(desc)
- return message
-
- def RegisterEnumDescriptor(self, enum_descriptor):
- """Registers the given enum descriptor in the local database.
-
- Args:
- enum_descriptor: a descriptor.EnumDescriptor.
-
- Returns:
- The provided descriptor.
- """
- self.pool.AddEnumDescriptor(enum_descriptor)
- return enum_descriptor
-
- def RegisterFileDescriptor(self, file_descriptor):
- """Registers the given file descriptor in the local database.
-
- Args:
- file_descriptor: a descriptor.FileDescriptor.
-
- Returns:
- The provided descriptor.
- """
- self.pool.AddFileDescriptor(file_descriptor)
-
- def GetSymbol(self, symbol):
- """Tries to find a symbol in the local database.
-
- Currently, this method only returns message.Message instances, however, if
- may be extended in future to support other symbol types.
-
- Args:
- symbol: A str, a protocol buffer symbol.
-
- Returns:
- A Python class corresponding to the symbol.
-
- Raises:
- KeyError: if the symbol could not be found.
- """
-
- return self._symbols[symbol]
-
- def GetPrototype(self, descriptor):
- """Builds a proto2 message class based on the passed in descriptor.
-
- Passing a descriptor with a fully qualified name matching a previous
- invocation will cause the same class to be returned.
-
- Args:
- descriptor: The descriptor to build from.
-
- Returns:
- A class describing the passed in descriptor.
- """
-
- return self.GetSymbol(descriptor.full_name)
-
- def GetMessages(self, files):
- """Gets all the messages from a specified file.
-
- This will find and resolve dependencies, failing if they are not registered
- in the symbol database.
-
-
- Args:
- files: The file names to extract messages from.
-
- Returns:
- A dictionary mapping proto names to the message classes. This will include
- any dependent messages as well as any messages defined in the same file as
- a specified message.
-
- Raises:
- KeyError: if a file could not be found.
- """
-
- result = {}
- for f in files:
- result.update(self._symbols_by_file[f])
- return result
-
-_DEFAULT = SymbolDatabase(pool=descriptor_pool.Default())
-
-
-def Default():
- """Returns the default SymbolDatabase."""
- return _DEFAULT
diff --git a/third_party/protobuf/python/google/protobuf/text_encoding.py b/third_party/protobuf/python/google/protobuf/text_encoding.py
deleted file mode 100644
index 9899563825..0000000000
--- a/third_party/protobuf/python/google/protobuf/text_encoding.py
+++ /dev/null
@@ -1,107 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc. All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-# * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""Encoding related utilities."""
-import re
-
-import six
-
-# Lookup table for utf8
-_cescape_utf8_to_str = [chr(i) for i in range(0, 256)]
-_cescape_utf8_to_str[9] = r'\t' # optional escape
-_cescape_utf8_to_str[10] = r'\n' # optional escape
-_cescape_utf8_to_str[13] = r'\r' # optional escape
-_cescape_utf8_to_str[39] = r"\'" # optional escape
-
-_cescape_utf8_to_str[34] = r'\"' # necessary escape
-_cescape_utf8_to_str[92] = r'\\' # necessary escape
-
-# Lookup table for non-utf8, with necessary escapes at (o >= 127 or o < 32)
-_cescape_byte_to_str = ([r'\%03o' % i for i in range(0, 32)] +
- [chr(i) for i in range(32, 127)] +
- [r'\%03o' % i for i in range(127, 256)])
-_cescape_byte_to_str[9] = r'\t' # optional escape
-_cescape_byte_to_str[10] = r'\n' # optional escape
-_cescape_byte_to_str[13] = r'\r' # optional escape
-_cescape_byte_to_str[39] = r"\'" # optional escape
-
-_cescape_byte_to_str[34] = r'\"' # necessary escape
-_cescape_byte_to_str[92] = r'\\' # necessary escape
-
-
-def CEscape(text, as_utf8):
- """Escape a bytes string for use in an ascii protocol buffer.
-
- text.encode('string_escape') does not seem to satisfy our needs as it
- encodes unprintable characters using two-digit hex escapes whereas our
- C++ unescaping function allows hex escapes to be any length. So,
- "\0011".encode('string_escape') ends up being "\\x011", which will be
- decoded in C++ as a single-character string with char code 0x11.
-
- Args:
- text: A byte string to be escaped
- as_utf8: Specifies if result should be returned in UTF-8 encoding
- Returns:
- Escaped string
- """
- # PY3 hack: make Ord work for str and bytes:
- # //platforms/networking/data uses unicode here, hence basestring.
- Ord = ord if isinstance(text, six.string_types) else lambda x: x
- if as_utf8:
- return ''.join(_cescape_utf8_to_str[Ord(c)] for c in text)
- return ''.join(_cescape_byte_to_str[Ord(c)] for c in text)
-
-
-_CUNESCAPE_HEX = re.compile(r'(\\+)x([0-9a-fA-F])(?![0-9a-fA-F])')
-_cescape_highbit_to_str = ([chr(i) for i in range(0, 127)] +
- [r'\%03o' % i for i in range(127, 256)])
-
-
-def CUnescape(text):
- """Unescape a text string with C-style escape sequences to UTF-8 bytes."""
-
- def ReplaceHex(m):
- # Only replace the match if the number of leading back slashes is odd. i.e.
- # the slash itself is not escaped.
- if len(m.group(1)) & 1:
- return m.group(1) + 'x0' + m.group(2)
- return m.group(0)
-
- # This is required because the 'string_escape' encoding doesn't
- # allow single-digit hex escapes (like '\xf').
- result = _CUNESCAPE_HEX.sub(ReplaceHex, text)
-
- if str is bytes: # PY2
- return result.decode('string_escape')
- result = ''.join(_cescape_highbit_to_str[ord(c)] for c in result)
- return (result.encode('ascii') # Make it bytes to allow decode.
- .decode('unicode_escape')
- # Make it bytes again to return the proper type.
- .encode('raw_unicode_escape'))
diff --git a/third_party/protobuf/python/google/protobuf/text_format.py b/third_party/protobuf/python/google/protobuf/text_format.py
deleted file mode 100755
index 8d256076c2..0000000000
--- a/third_party/protobuf/python/google/protobuf/text_format.py
+++ /dev/null
@@ -1,1112 +0,0 @@
-# Protocol Buffers - Google's data interchange format
-# Copyright 2008 Google Inc. All rights reserved.
-# https://developers.google.com/protocol-buffers/
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-# * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-"""Contains routines for printing protocol messages in text format.
-
-Simple usage example:
-
- # Create a proto object and serialize it to a text proto string.
- message = my_proto_pb2.MyMessage(foo='bar')
- text_proto = text_format.MessageToString(message)
-
- # Parse a text proto string.
- message = text_format.Parse(text_proto, my_proto_pb2.MyMessage())
-"""
-
-__author__ = 'kenton@google.com (Kenton Varda)'
-
-import io
-import re
-
-import six
-
-if six.PY3:
- long = int
-
-from google.protobuf.internal import type_checkers
-from google.protobuf import descriptor
-from google.protobuf import text_encoding
-
-__all__ = ['MessageToString', 'PrintMessage', 'PrintField',
- 'PrintFieldValue', 'Merge']
-
-
-_INTEGER_CHECKERS = (type_checkers.Uint32ValueChecker(),
- type_checkers.Int32ValueChecker(),
- type_checkers.Uint64ValueChecker(),
- type_checkers.Int64ValueChecker())
-_FLOAT_INFINITY = re.compile('-?inf(?:inity)?f?', re.IGNORECASE)
-_FLOAT_NAN = re.compile('nanf?', re.IGNORECASE)
-_FLOAT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_FLOAT,
- descriptor.FieldDescriptor.CPPTYPE_DOUBLE])
-_QUOTES = frozenset(("'", '"'))
-
-
-class Error(Exception):
- """Top-level module error for text_format."""
-
-
-class ParseError(Error):
- """Thrown in case of text parsing error."""
-
-
-class TextWriter(object):
- def __init__(self, as_utf8):
- if six.PY2:
- self._writer = io.BytesIO()
- else:
- self._writer = io.StringIO()
-
- def write(self, val):
- if six.PY2:
- if isinstance(val, six.text_type):
- val = val.encode('utf-8')
- return self._writer.write(val)
-
- def close(self):
- return self._writer.close()
-
- def getvalue(self):
- return self._writer.getvalue()
-
-
-def MessageToString(message, as_utf8=False, as_one_line=False,
- pointy_brackets=False, use_index_order=False,
- float_format=None):
- """Convert protobuf message to text format.
-
- Floating point values can be formatted compactly with 15 digits of
- precision (which is the most that IEEE 754 "double" can guarantee)
- using float_format='.15g'. To ensure that converting to text and back to a
- proto will result in an identical value, float_format='.17g' should be used.
-
- Args:
- message: The protocol buffers message.
- as_utf8: Produce text output in UTF8 format.
- as_one_line: Don't introduce newlines between fields.
- pointy_brackets: If True, use angle brackets instead of curly braces for
- nesting.
- use_index_order: If True, print fields of a proto message using the order
- defined in source code instead of the field number. By default, use the
- field number order.
- float_format: If set, use this to specify floating point number formatting
- (per the "Format Specification Mini-Language"); otherwise, str() is used.
-
- Returns:
- A string of the text formatted protocol buffer message.
- """
- out = TextWriter(as_utf8)
- PrintMessage(message, out, as_utf8=as_utf8, as_one_line=as_one_line,
- pointy_brackets=pointy_brackets,
- use_index_order=use_index_order,
- float_format=float_format)
- result = out.getvalue()
- out.close()
- if as_one_line:
- return result.rstrip()
- return result
-
-
-def _IsMapEntry(field):
- return (field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and
- field.message_type.has_options and
- field.message_type.GetOptions().map_entry)
-
-
-def PrintMessage(message, out, indent=0, as_utf8=False, as_one_line=False,
- pointy_brackets=False, use_index_order=False,
- float_format=None):
- fields = message.ListFields()
- if use_index_order:
- fields.sort(key=lambda x: x[0].index)
- for field, value in fields:
- if _IsMapEntry(field):
- for key in sorted(value):
- # This is slow for maps with submessage entires because it copies the
- # entire tree. Unfortunately this would take significant refactoring
- # of this file to work around.
- #
- # TODO(haberman): refactor and optimize if this becomes an issue.
- entry_submsg = field.message_type._concrete_class(
- key=key, value=value[key])
- PrintField(field, entry_submsg, out, indent, as_utf8, as_one_line,
- pointy_brackets=pointy_brackets,
- use_index_order=use_index_order, float_format=float_format)
- elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
- for element in value:
- PrintField(field, element, out, indent, as_utf8, as_one_line,
- pointy_brackets=pointy_brackets,
- use_index_order=use_index_order,
- float_format=float_format)
- else:
- PrintField(field, value, out, indent, as_utf8, as_one_line,
- pointy_brackets=pointy_brackets,
- use_index_order=use_index_order,
- float_format=float_format)
-
-
-def PrintField(field, value, out, indent=0, as_utf8=False, as_one_line=False,
- pointy_brackets=False, use_index_order=False, float_format=None):
- """Print a single field name/value pair. For repeated fields, the value
- should be a single element.
- """
-
- out.write(' ' * indent)
- if field.is_extension:
- out.write('[')
- if (field.containing_type.GetOptions().message_set_wire_format and
- field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and
- field.label == descriptor.FieldDescriptor.LABEL_OPTIONAL):
- out.write(field.message_type.full_name)
- else:
- out.write(field.full_name)
- out.write(']')
- elif field.type == descriptor.FieldDescriptor.TYPE_GROUP:
- # For groups, use the capitalized name.
- out.write(field.message_type.name)
- else:
- out.write(field.name)
-
- if field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
- # The colon is optional in this case, but our cross-language golden files
- # don't include it.
- out.write(': ')
-
- PrintFieldValue(field, value, out, indent, as_utf8, as_one_line,
- pointy_brackets=pointy_brackets,
- use_index_order=use_index_order,
- float_format=float_format)
- if as_one_line:
- out.write(' ')
- else:
- out.write('\n')
-
-
-def PrintFieldValue(field, value, out, indent=0, as_utf8=False,
- as_one_line=False, pointy_brackets=False,
- use_index_order=False,
- float_format=None):
- """Print a single field value (not including name). For repeated fields,
- the value should be a single element."""
-
- if pointy_brackets:
- openb = '<'
- closeb = '>'
- else:
- openb = '{'
- closeb = '}'
-
- if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
- if as_one_line:
- out.write(' %s ' % openb)
- PrintMessage(value, out, indent, as_utf8, as_one_line,
- pointy_brackets=pointy_brackets,
- use_index_order=use_index_order,
- float_format=float_format)
- out.write(closeb)
- else:
- out.write(' %s\n' % openb)
- PrintMessage(value, out, indent + 2, as_utf8, as_one_line,
- pointy_brackets=pointy_brackets,
- use_index_order=use_index_order,
- float_format=float_format)
- out.write(' ' * indent + closeb)
- elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM:
- enum_value = field.enum_type.values_by_number.get(value, None)
- if enum_value is not None:
- out.write(enum_value.name)
- else:
- out.write(str(value))
- elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING:
- out.write('\"')
- if isinstance(value, six.text_type):
- out_value = value.encode('utf-8')
- else:
- out_value = value
- if field.type == descriptor.FieldDescriptor.TYPE_BYTES:
- # We need to escape non-UTF8 chars in TYPE_BYTES field.
- out_as_utf8 = False
- else:
- out_as_utf8 = as_utf8
- out.write(text_encoding.CEscape(out_value, out_as_utf8))
- out.write('\"')
- elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL:
- if value:
- out.write('true')
- else:
- out.write('false')
- elif field.cpp_type in _FLOAT_TYPES and float_format is not None:
- out.write('{1:{0}}'.format(float_format, value))
- else:
- out.write(str(value))
-
-
-def Parse(text, message, allow_unknown_extension=False):
- """Parses an text representation of a protocol message into a message.
-
- Args:
- text: Message text representation.
- message: A protocol buffer message to merge into.
- allow_unknown_extension: if True, skip over missing extensions and keep
- parsing
-
- Returns:
- The same message passed as argument.
-
- Raises:
- ParseError: On text parsing problems.
- """
- if not isinstance(text, str):
- text = text.decode('utf-8')
- return ParseLines(text.split('\n'), message, allow_unknown_extension)
-
-
-def Merge(text, message, allow_unknown_extension=False):
- """Parses an text representation of a protocol message into a message.
-
- Like Parse(), but allows repeated values for a non-repeated field, and uses
- the last one.
-
- Args:
- text: Message text representation.
- message: A protocol buffer message to merge into.
- allow_unknown_extension: if True, skip over missing extensions and keep
- parsing
-
- Returns:
- The same message passed as argument.
-
- Raises:
- ParseError: On text parsing problems.
- """
- return MergeLines(text.split('\n'), message, allow_unknown_extension)
-
-
-def ParseLines(lines, message, allow_unknown_extension=False):
- """Parses an text representation of a protocol message into a message.
-
- Args:
- lines: An iterable of lines of a message's text representation.
- message: A protocol buffer message to merge into.
- allow_unknown_extension: if True, skip over missing extensions and keep
- parsing
-
- Returns:
- The same message passed as argument.
-
- Raises:
- ParseError: On text parsing problems.
- """
- _ParseOrMerge(lines, message, False, allow_unknown_extension)
- return message
-
-
-def MergeLines(lines, message, allow_unknown_extension=False):
- """Parses an text representation of a protocol message into a message.
-
- Args:
- lines: An iterable of lines of a message's text representation.
- message: A protocol buffer message to merge into.
- allow_unknown_extension: if True, skip over missing extensions and keep
- parsing
-
- Returns:
- The same message passed as argument.
-
- Raises:
- ParseError: On text parsing problems.
- """
- _ParseOrMerge(lines, message, True, allow_unknown_extension)
- return message
-
-
-def _ParseOrMerge(lines,
- message,
- allow_multiple_scalars,
- allow_unknown_extension=False):
- """Converts an text representation of a protocol message into a message.
-
- Args:
- lines: Lines of a message's text representation.
- message: A protocol buffer message to merge into.
- allow_multiple_scalars: Determines if repeated values for a non-repeated
- field are permitted, e.g., the string "foo: 1 foo: 2" for a
- required/optional field named "foo".
- allow_unknown_extension: if True, skip over missing extensions and keep
- parsing
-
- Raises:
- ParseError: On text parsing problems.
- """
- tokenizer = _Tokenizer(lines)
- while not tokenizer.AtEnd():
- _MergeField(tokenizer, message, allow_multiple_scalars,
- allow_unknown_extension)
-
-
-def _MergeField(tokenizer,
- message,
- allow_multiple_scalars,
- allow_unknown_extension=False):
- """Merges a single protocol message field into a message.
-
- Args:
- tokenizer: A tokenizer to parse the field name and values.
- message: A protocol message to record the data.
- allow_multiple_scalars: Determines if repeated values for a non-repeated
- field are permitted, e.g., the string "foo: 1 foo: 2" for a
- required/optional field named "foo".
- allow_unknown_extension: if True, skip over missing extensions and keep
- parsing
-
- Raises:
- ParseError: In case of text parsing problems.
- """
- message_descriptor = message.DESCRIPTOR
- if (hasattr(message_descriptor, 'syntax') and
- message_descriptor.syntax == 'proto3'):
- # Proto3 doesn't represent presence so we can't test if multiple
- # scalars have occurred. We have to allow them.
- allow_multiple_scalars = True
- if tokenizer.TryConsume('['):
- name = [tokenizer.ConsumeIdentifier()]
- while tokenizer.TryConsume('.'):
- name.append(tokenizer.ConsumeIdentifier())
- name = '.'.join(name)
-
- if not message_descriptor.is_extendable:
- raise tokenizer.ParseErrorPreviousToken(
- 'Message type "%s" does not have extensions.' %
- message_descriptor.full_name)
- # pylint: disable=protected-access
- field = message.Extensions._FindExtensionByName(name)
- # pylint: enable=protected-access
- if not field:
- if allow_unknown_extension:
- field = None
- else:
- raise tokenizer.ParseErrorPreviousToken(
- 'Extension "%s" not registered.' % name)
- elif message_descriptor != field.containing_type:
- raise tokenizer.ParseErrorPreviousToken(
- 'Extension "%s" does not extend message type "%s".' % (
- name, message_descriptor.full_name))
-
- tokenizer.Consume(']')
-
- else:
- name = tokenizer.ConsumeIdentifier()
- field = message_descriptor.fields_by_name.get(name, None)
-
- # Group names are expected to be capitalized as they appear in the
- # .proto file, which actually matches their type names, not their field
- # names.
- if not field:
- field = message_descriptor.fields_by_name.get(name.lower(), None)
- if field and field.type != descriptor.FieldDescriptor.TYPE_GROUP:
- field = None
-
- if (field and field.type == descriptor.FieldDescriptor.TYPE_GROUP and
- field.message_type.name != name):
- field = None
-
- if not field:
- raise tokenizer.ParseErrorPreviousToken(
- 'Message type "%s" has no field named "%s".' % (
- message_descriptor.full_name, name))
-
- if field and field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
- is_map_entry = _IsMapEntry(field)
- tokenizer.TryConsume(':')
-
- if tokenizer.TryConsume('<'):
- end_token = '>'
- else:
- tokenizer.Consume('{')
- end_token = '}'
-
- if field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
- if field.is_extension:
- sub_message = message.Extensions[field].add()
- elif is_map_entry:
- sub_message = field.message_type._concrete_class()
- else:
- sub_message = getattr(message, field.name).add()
- else:
- if field.is_extension:
- sub_message = message.Extensions[field]
- else:
- sub_message = getattr(message, field.name)
- sub_message.SetInParent()
-
- while not tokenizer.TryConsume(end_token):
- if tokenizer.AtEnd():
- raise tokenizer.ParseErrorPreviousToken('Expected "%s".' % (end_token))
- _MergeField(tokenizer, sub_message, allow_multiple_scalars,
- allow_unknown_extension)
-
- if is_map_entry:
- value_cpptype = field.message_type.fields_by_name['value'].cpp_type
- if value_cpptype == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
- value = getattr(message, field.name)[sub_message.key]
- value.MergeFrom(sub_message.value)
- else:
- getattr(message, field.name)[sub_message.key] = sub_message.value
- elif field:
- tokenizer.Consume(':')
- if (field.label == descriptor.FieldDescriptor.LABEL_REPEATED and
- tokenizer.TryConsume('[')):
- # Short repeated format, e.g. "foo: [1, 2, 3]"
- while True:
- _MergeScalarField(tokenizer, message, field, allow_multiple_scalars)
- if tokenizer.TryConsume(']'):
- break
- tokenizer.Consume(',')
- else:
- _MergeScalarField(tokenizer, message, field, allow_multiple_scalars)
- else: # Proto field is unknown.
- assert allow_unknown_extension
- _SkipFieldContents(tokenizer)
-
- # For historical reasons, fields may optionally be separated by commas or
- # semicolons.
- if not tokenizer.TryConsume(','):
- tokenizer.TryConsume(';')
-
-
-def _SkipFieldContents(tokenizer):
- """Skips over contents (value or message) of a field.
-
- Args:
- tokenizer: A tokenizer to parse the field name and values.
- """
- # Try to guess the type of this field.
- # If this field is not a message, there should be a ":" between the
- # field name and the field value and also the field value should not
- # start with "{" or "<" which indicates the beginning of a message body.
- # If there is no ":" or there is a "{" or "<" after ":", this field has
- # to be a message or the input is ill-formed.
- if tokenizer.TryConsume(':') and not tokenizer.LookingAt(
- '{') and not tokenizer.LookingAt('<'):
- _SkipFieldValue(tokenizer)
- else:
- _SkipFieldMessage(tokenizer)
-
-
-def _SkipField(tokenizer):
- """Skips over a complete field (name and value/message).
-
- Args:
- tokenizer: A tokenizer to parse the field name and values.
- """
- if tokenizer.TryConsume('['):
- # Consume extension name.
- tokenizer.ConsumeIdentifier()
- while tokenizer.TryConsume('.'):
- tokenizer.ConsumeIdentifier()
- tokenizer.Consume(']')
- else:
- tokenizer.ConsumeIdentifier()
-
- _SkipFieldContents(tokenizer)
-
- # For historical reasons, fields may optionally be separated by commas or
- # semicolons.
- if not tokenizer.TryConsume(','):
- tokenizer.TryConsume(';')
-
-
-def _SkipFieldMessage(tokenizer):
- """Skips over a field message.
-
- Args:
- tokenizer: A tokenizer to parse the field name and values.
- """
-
- if tokenizer.TryConsume('<'):
- delimiter = '>'
- else:
- tokenizer.Consume('{')
- delimiter = '}'
-
- while not tokenizer.LookingAt('>') and not tokenizer.LookingAt('}'):
- _SkipField(tokenizer)
-
- tokenizer.Consume(delimiter)
-
-
-def _SkipFieldValue(tokenizer):
- """Skips over a field value.
-
- Args:
- tokenizer: A tokenizer to parse the field name and values.
-
- Raises:
- ParseError: In case an invalid field value is found.
- """
- # String tokens can come in multiple adjacent string literals.
- # If we can consume one, consume as many as we can.
- if tokenizer.TryConsumeString():
- while tokenizer.TryConsumeString():
- pass
- return
-
- if (not tokenizer.TryConsumeIdentifier() and
- not tokenizer.TryConsumeInt64() and
- not tokenizer.TryConsumeUint64() and
- not tokenizer.TryConsumeFloat()):
- raise ParseError('Invalid field value: ' + tokenizer.token)
-
-
-def _MergeScalarField(tokenizer, message, field, allow_multiple_scalars):
- """Merges a single protocol message scalar field into a message.
-
- Args:
- tokenizer: A tokenizer to parse the field value.
- message: A protocol message to record the data.
- field: The descriptor of the field to be merged.
- allow_multiple_scalars: Determines if repeated values for a non-repeated
- field are permitted, e.g., the string "foo: 1 foo: 2" for a
- required/optional field named "foo".
-
- Raises:
- ParseError: In case of text parsing problems.
- RuntimeError: On runtime errors.
- """
- value = None
-
- if field.type in (descriptor.FieldDescriptor.TYPE_INT32,
- descriptor.FieldDescriptor.TYPE_SINT32,
- descriptor.FieldDescriptor.TYPE_SFIXED32):
- value = tokenizer.ConsumeInt32()
- elif field.type in (descriptor.FieldDescriptor.TYPE_INT64,
- descriptor.FieldDescriptor.TYPE_SINT64,
- descriptor.FieldDescriptor.TYPE_SFIXED64):
- value = tokenizer.ConsumeInt64()
- elif field.type in (descriptor.FieldDescriptor.TYPE_UINT32,
- descriptor.FieldDescriptor.TYPE_FIXED32):
- value = tokenizer.ConsumeUint32()
- elif field.type in (descriptor.FieldDescriptor.TYPE_UINT64,
- descriptor.FieldDescriptor.TYPE_FIXED64):
- value = tokenizer.ConsumeUint64()
- elif field.type in (descriptor.FieldDescriptor.TYPE_FLOAT,
- descriptor.FieldDescriptor.TYPE_DOUBLE):
- value = tokenizer.ConsumeFloat()
- elif field.type == descriptor.FieldDescriptor.TYPE_BOOL:
- value = tokenizer.ConsumeBool()
- elif field.type == descriptor.FieldDescriptor.TYPE_STRING:
- value = tokenizer.ConsumeString()
- elif field.type == descriptor.FieldDescriptor.TYPE_BYTES:
- value = tokenizer.ConsumeByteString()
- elif field.type == descriptor.FieldDescriptor.TYPE_ENUM:
- value = tokenizer.ConsumeEnum(field)
- else:
- raise RuntimeError('Unknown field type %d' % field.type)
-
- if field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
- if field.is_extension:
- message.Extensions[field].append(value)
- else:
- getattr(message, field.name).append(value)
- else:
- if field.is_extension:
- if not allow_multiple_scalars and message.HasExtension(field):
- raise tokenizer.ParseErrorPreviousToken(
- 'Message type "%s" should not have multiple "%s" extensions.' %
- (message.DESCRIPTOR.full_name, field.full_name))
- else:
- message.Extensions[field] = value
- else:
- if not allow_multiple_scalars and message.HasField(field.name):
- raise tokenizer.ParseErrorPreviousToken(
- 'Message type "%s" should not have multiple "%s" fields.' %
- (message.DESCRIPTOR.full_name, field.name))
- else:
- setattr(message, field.name, value)
-
-
-class _Tokenizer(object):
- """Protocol buffer text representation tokenizer.
-
- This class handles the lower level string parsing by splitting it into
- meaningful tokens.
-
- It was directly ported from the Java protocol buffer API.
- """
-
- _WHITESPACE = re.compile('(\\s|(#.*$))+', re.MULTILINE)
- _TOKEN = re.compile('|'.join([
- r'[a-zA-Z_][0-9a-zA-Z_+-]*', # an identifier
- r'([0-9+-]|(\.[0-9]))[0-9a-zA-Z_.+-]*', # a number
- ] + [ # quoted str for each quote mark
- r'{qt}([^{qt}\n\\]|\\.)*({qt}|\\?$)'.format(qt=mark) for mark in _QUOTES
- ]))
-
- _IDENTIFIER = re.compile(r'\w+')
-
- def __init__(self, lines):
- self._position = 0
- self._line = -1
- self._column = 0
- self._token_start = None
- self.token = ''
- self._lines = iter(lines)
- self._current_line = ''
- self._previous_line = 0
- self._previous_column = 0
- self._more_lines = True
- self._SkipWhitespace()
- self.NextToken()
-
- def LookingAt(self, token):
- return self.token == token
-
- def AtEnd(self):
- """Checks the end of the text was reached.
-
- Returns:
- True iff the end was reached.
- """
- return not self.token
-
- def _PopLine(self):
- while len(self._current_line) <= self._column:
- try:
- self._current_line = next(self._lines)
- except StopIteration:
- self._current_line = ''
- self._more_lines = False
- return
- else:
- self._line += 1
- self._column = 0
-
- def _SkipWhitespace(self):
- while True:
- self._PopLine()
- match = self._WHITESPACE.match(self._current_line, self._column)
- if not match:
- break
- length = len(match.group(0))
- self._column += length
-
- def TryConsume(self, token):
- """Tries to consume a given piece of text.
-
- Args:
- token: Text to consume.
-
- Returns:
- True iff the text was consumed.
- """
- if self.token == token:
- self.NextToken()
- return True
- return False
-
- def Consume(self, token):
- """Consumes a piece of text.
-
- Args:
- token: Text to consume.
-
- Raises:
- ParseError: If the text couldn't be consumed.
- """
- if not self.TryConsume(token):
- raise self._ParseError('Expected "%s".' % token)
-
- def TryConsumeIdentifier(self):
- try:
- self.ConsumeIdentifier()
- return True
- except ParseError:
- return False
-
- def ConsumeIdentifier(self):
- """Consumes protocol message field identifier.
-
- Returns:
- Identifier string.
-
- Raises:
- ParseError: If an identifier couldn't be consumed.
- """
- result = self.token
- if not self._IDENTIFIER.match(result):
- raise self._ParseError('Expected identifier.')
- self.NextToken()
- return result
-
- def ConsumeInt32(self):
- """Consumes a signed 32bit integer number.
-
- Returns:
- The integer parsed.
-
- Raises:
- ParseError: If a signed 32bit integer couldn't be consumed.
- """
- try:
- result = ParseInteger(self.token, is_signed=True, is_long=False)
- except ValueError as e:
- raise self._ParseError(str(e))
- self.NextToken()
- return result
-
- def ConsumeUint32(self):
- """Consumes an unsigned 32bit integer number.
-
- Returns:
- The integer parsed.
-
- Raises:
- ParseError: If an unsigned 32bit integer couldn't be consumed.
- """
- try:
- result = ParseInteger(self.token, is_signed=False, is_long=False)
- except ValueError as e:
- raise self._ParseError(str(e))
- self.NextToken()
- return result
-
- def TryConsumeInt64(self):
- try:
- self.ConsumeInt64()
- return True
- except ParseError:
- return False
-
- def ConsumeInt64(self):
- """Consumes a signed 64bit integer number.
-
- Returns:
- The integer parsed.
-
- Raises:
- ParseError: If a signed 64bit integer couldn't be consumed.
- """
- try:
- result = ParseInteger(self.token, is_signed=True, is_long=True)
- except ValueError as e:
- raise self._ParseError(str(e))
- self.NextToken()
- return result
-
- def TryConsumeUint64(self):
- try:
- self.ConsumeUint64()
- return True
- except ParseError:
- return False
-
- def ConsumeUint64(self):
- """Consumes an unsigned 64bit integer number.
-
- Returns:
- The integer parsed.
-
- Raises:
- ParseError: If an unsigned 64bit integer couldn't be consumed.
- """
- try:
- result = ParseInteger(self.token, is_signed=False, is_long=True)
- except ValueError as e:
- raise self._ParseError(str(e))
- self.NextToken()
- return result
-
- def TryConsumeFloat(self):
- try:
- self.ConsumeFloat()
- return True
- except ParseError:
- return False
-
- def ConsumeFloat(self):
- """Consumes an floating point number.
-
- Returns:
- The number parsed.
-
- Raises:
- ParseError: If a floating point number couldn't be consumed.
- """
- try:
- result = ParseFloat(self.token)
- except ValueError as e:
- raise self._ParseError(str(e))
- self.NextToken()
- return result
-
- def ConsumeBool(self):
- """Consumes a boolean value.
-
- Returns:
- The bool parsed.
-
- Raises:
- ParseError: If a boolean value couldn't be consumed.
- """
- try:
- result = ParseBool(self.token)
- except ValueError as e:
- raise self._ParseError(str(e))
- self.NextToken()
- return result
-
- def TryConsumeString(self):
- try:
- self.ConsumeString()
- return True
- except ParseError:
- return False
-
- def ConsumeString(self):
- """Consumes a string value.
-
- Returns:
- The string parsed.
-
- Raises:
- ParseError: If a string value couldn't be consumed.
- """
- the_bytes = self.ConsumeByteString()
- try:
- return six.text_type(the_bytes, 'utf-8')
- except UnicodeDecodeError as e:
- raise self._StringParseError(e)
-
- def ConsumeByteString(self):
- """Consumes a byte array value.
-
- Returns:
- The array parsed (as a string).
-
- Raises:
- ParseError: If a byte array value couldn't be consumed.
- """
- the_list = [self._ConsumeSingleByteString()]
- while self.token and self.token[0] in _QUOTES:
- the_list.append(self._ConsumeSingleByteString())
- return b''.join(the_list)
-
- def _ConsumeSingleByteString(self):
- """Consume one token of a string literal.
-
- String literals (whether bytes or text) can come in multiple adjacent
- tokens which are automatically concatenated, like in C or Python. This
- method only consumes one token.
-
- Returns:
- The token parsed.
- Raises:
- ParseError: When the wrong format data is found.
- """
- text = self.token
- if len(text) < 1 or text[0] not in _QUOTES:
- raise self._ParseError('Expected string but found: %r' % (text,))
-
- if len(text) < 2 or text[-1] != text[0]:
- raise self._ParseError('String missing ending quote: %r' % (text,))
-
- try:
- result = text_encoding.CUnescape(text[1:-1])
- except ValueError as e:
- raise self._ParseError(str(e))
- self.NextToken()
- return result
-
- def ConsumeEnum(self, field):
- try:
- result = ParseEnum(field, self.token)
- except ValueError as e:
- raise self._ParseError(str(e))
- self.NextToken()
- return result
-
- def ParseErrorPreviousToken(self, message):
- """Creates and *returns* a ParseError for the previously read token.
-
- Args:
- message: A message to set for the exception.
-
- Returns:
- A ParseError instance.
- """
- return ParseError('%d:%d : %s' % (
- self._previous_line + 1, self._previous_column + 1, message))
-
- def _ParseError(self, message):
- """Creates and *returns* a ParseError for the current token."""
- return ParseError('%d:%d : %s' % (
- self._line + 1, self._column + 1, message))
-
- def _StringParseError(self, e):
- return self._ParseError('Couldn\'t parse string: ' + str(e))
-
- def NextToken(self):
- """Reads the next meaningful token."""
- self._previous_line = self._line
- self._previous_column = self._column
-
- self._column += len(self.token)
- self._SkipWhitespace()
-
- if not self._more_lines:
- self.token = ''
- return
-
- match = self._TOKEN.match(self._current_line, self._column)
- if match:
- token = match.group(0)
- self.token = token
- else:
- self.token = self._current_line[self._column]
-
-
-def ParseInteger(text, is_signed=False, is_long=False):
- """Parses an integer.
-
- Args:
- text: The text to parse.
- is_signed: True if a signed integer must be parsed.
- is_long: True if a long integer must be parsed.
-
- Returns:
- The integer value.
-
- Raises:
- ValueError: Thrown Iff the text is not a valid integer.
- """
- # Do the actual parsing. Exception handling is propagated to caller.
- try:
- # We force 32-bit values to int and 64-bit values to long to make
- # alternate implementations where the distinction is more significant
- # (e.g. the C++ implementation) simpler.
- if is_long:
- result = long(text, 0)
- else:
- result = int(text, 0)
- except ValueError:
- raise ValueError('Couldn\'t parse integer: %s' % text)
-
- # Check if the integer is sane. Exceptions handled by callers.
- checker = _INTEGER_CHECKERS[2 * int(is_long) + int(is_signed)]
- checker.CheckValue(result)
- return result
-
-
-def ParseFloat(text):
- """Parse a floating point number.
-
- Args:
- text: Text to parse.
-
- Returns:
- The number parsed.
-
- Raises:
- ValueError: If a floating point number couldn't be parsed.
- """
- try:
- # Assume Python compatible syntax.
- return float(text)
- except ValueError:
- # Check alternative spellings.
- if _FLOAT_INFINITY.match(text):
- if text[0] == '-':
- return float('-inf')
- else:
- return float('inf')
- elif _FLOAT_NAN.match(text):
- return float('nan')
- else:
- # assume '1.0f' format
- try:
- return float(text.rstrip('f'))
- except ValueError:
- raise ValueError('Couldn\'t parse float: %s' % text)
-
-
-def ParseBool(text):
- """Parse a boolean value.
-
- Args:
- text: Text to parse.
-
- Returns:
- Boolean values parsed
-
- Raises:
- ValueError: If text is not a valid boolean.
- """
- if text in ('true', 't', '1'):
- return True
- elif text in ('false', 'f', '0'):
- return False
- else:
- raise ValueError('Expected "true" or "false".')
-
-
-def ParseEnum(field, value):
- """Parse an enum value.
-
- The value can be specified by a number (the enum value), or by
- a string literal (the enum name).
-
- Args:
- field: Enum field descriptor.
- value: String value.
-
- Returns:
- Enum value number.
-
- Raises:
- ValueError: If the enum value could not be parsed.
- """
- enum_descriptor = field.enum_type
- try:
- number = int(value, 0)
- except ValueError:
- # Identifier.
- enum_value = enum_descriptor.values_by_name.get(value, None)
- if enum_value is None:
- raise ValueError(
- 'Enum type "%s" has no value named %s.' % (
- enum_descriptor.full_name, value))
- else:
- # Numeric value.
- enum_value = enum_descriptor.values_by_number.get(number, None)
- if enum_value is None:
- raise ValueError(
- 'Enum type "%s" has no value with number %d.' % (
- enum_descriptor.full_name, number))
- return enum_value.number