// Protocol Buffers - Google's data interchange format // Copyright 2008 Google Inc. All rights reserved. // // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file or at // https://developers.google.com/open-source/licenses/bsd // Author: kenton@google.com (Kenton Varda) // Based on original Protocol Buffers design by // Sanjay Ghemawat, Jeff Dean, and others. // // RepeatedField and RepeatedPtrField are used by generated protocol message // classes to manipulate repeated fields. These classes are very similar to // STL's vector, but include a number of optimizations found to be useful // specifically in the case of Protocol Buffers. RepeatedPtrField is // particularly different from STL vector as it manages ownership of the // pointers that it contains. // // This header covers RepeatedPtrField. #ifndef GOOGLE_PROTOBUF_REPEATED_PTR_FIELD_H__ #define GOOGLE_PROTOBUF_REPEATED_PTR_FIELD_H__ #include #include #include #include #include #include #include #include #include #include "absl/base/attributes.h" #include "absl/base/prefetch.h" #include "absl/log/absl_check.h" #include "absl/meta/type_traits.h" #include "google/protobuf/arena.h" #include "google/protobuf/internal_visibility.h" #include "google/protobuf/message_lite.h" #include "google/protobuf/port.h" // Must be included last. #include "google/protobuf/port_def.inc" #ifdef SWIG #error "You cannot SWIG proto headers" #endif namespace google { namespace protobuf { class Message; class Reflection; template struct WeakRepeatedPtrField; namespace internal { class MergePartialFromCodedStreamHelper; class SwapFieldHelper; } // namespace internal namespace internal { template class RepeatedPtrIterator; template class RepeatedPtrOverPtrsIterator; } // namespace internal namespace internal { // Swaps two non-overlapping blocks of memory of size `N` template inline void memswap(char* PROTOBUF_RESTRICT a, char* PROTOBUF_RESTRICT b) { // `PROTOBUF_RESTRICT` tells compiler that blocks do not overlapping which // allows it to generate optimized code for swap_ranges. std::swap_ranges(a, a + N, b); } template struct IsMovable : std::integral_constant::value && std::is_move_assignable::value> {}; // A trait that tells offset of `T::arena_`. // // Do not use this struct - it exists for internal use only. template struct ArenaOffsetHelper { static constexpr size_t value = offsetof(T, arena_); }; // Defined further below. template class GenericTypeHandler; // This is the common base class for RepeatedPtrFields. It deals only in void* // pointers. Users should not use this interface directly. // // The methods of this interface correspond to the methods of RepeatedPtrField, // but may have a template argument called TypeHandler. Its signature is: // class TypeHandler { // public: // using Type = MyType; // using Movable = ...; // // static Type*(*)(Arena*) GetNewFunc(); // static void GetArena(Type* value); // // static Type* New(Arena* arena); // static Type* New(Arena* arena, Type&& value); // static Type* NewFromPrototype(const Type* prototype, Arena* arena); // static void Delete(Type*, Arena* arena); // static void Clear(Type*); // static void Merge(const Type& from, Type* to); // // // Only needs to be implemented if SpaceUsedExcludingSelf() is called. // static int SpaceUsedLong(const Type&); // }; class PROTOBUF_EXPORT RepeatedPtrFieldBase { template using Value = typename TypeHandler::Type; static constexpr int kSSOCapacity = 1; using ElementFactory = void* (*)(Arena*); protected: // We use the same TypeHandler for all Message types to deduplicate generated // code. template using CommonHandler = typename std::conditional< std::is_base_of>::value, GenericTypeHandler, TypeHandler>::type; constexpr RepeatedPtrFieldBase() : tagged_rep_or_elem_(nullptr), current_size_(0), capacity_proxy_(0), arena_(nullptr) {} explicit RepeatedPtrFieldBase(Arena* arena) : tagged_rep_or_elem_(nullptr), current_size_(0), capacity_proxy_(0), arena_(arena) {} RepeatedPtrFieldBase(const RepeatedPtrFieldBase&) = delete; RepeatedPtrFieldBase& operator=(const RepeatedPtrFieldBase&) = delete; ~RepeatedPtrFieldBase() { #ifndef NDEBUG // Try to trigger segfault / asan failure in non-opt builds if arena_ // lifetime has ended before the destructor. if (arena_) (void)arena_->SpaceAllocated(); #endif } bool empty() const { return current_size_ == 0; } int size() const { return current_size_; } // Returns the size of the buffer with pointers to elements. // // Note: // // * prefer `SizeAtCapacity()` to `size() == Capacity()`; // * prefer `AllocatedSizeAtCapacity()` to `allocated_size() == Capacity()`. int Capacity() const { return capacity_proxy_ + kSSOCapacity; } template const Value& at(int index) const { ABSL_CHECK_GE(index, 0); ABSL_CHECK_LT(index, current_size_); return *cast(element_at(index)); } template Value& at(int index) { ABSL_CHECK_GE(index, 0); ABSL_CHECK_LT(index, current_size_); return *cast(element_at(index)); } template Value* Mutable(int index) { ABSL_DCHECK_GE(index, 0); ABSL_DCHECK_LT(index, current_size_); return cast(element_at(index)); } template Value* Add() { if (std::is_same, std::string>{}) { return cast(AddString()); } return cast(AddMessageLite(TypeHandler::GetNewFunc())); } template < typename TypeHandler, typename std::enable_if::type* = nullptr> inline void Add(Value&& value) { if (current_size_ < allocated_size()) { *cast(element_at(ExchangeCurrentSize(current_size_ + 1))) = std::move(value); return; } MaybeExtend(); if (!using_sso()) ++rep()->allocated_size; auto* result = TypeHandler::New(arena_, std::move(value)); element_at(ExchangeCurrentSize(current_size_ + 1)) = result; } // Must be called from destructor. // // Pre-condition: NeedsDestroy() returns true. template void Destroy() { ABSL_DCHECK(NeedsDestroy()); // TODO: arena check is redundant once all `RepeatedPtrField`s // with non-null arena are owned by the arena. if (PROTOBUF_PREDICT_FALSE(arena_ != nullptr)) return; using H = CommonHandler; int n = allocated_size(); void** elems = elements(); for (int i = 0; i < n; i++) { Delete(elems[i], nullptr); } if (!using_sso()) { internal::SizedDelete(rep(), Capacity() * sizeof(elems[0]) + kRepHeaderSize); } } inline bool NeedsDestroy() const { // Either there is an allocated element in SSO buffer or there is an // allocated Rep. return tagged_rep_or_elem_ != nullptr; } void DestroyProtos(); public: // The next few methods are public so that they can be called from generated // code when implicit weak fields are used, but they should never be called by // application code. template const Value& Get(int index) const { ABSL_DCHECK_GE(index, 0); ABSL_DCHECK_LT(index, current_size_); return *cast(element_at(index)); } // Creates and adds an element using the given prototype, without introducing // a link-time dependency on the concrete message type. // // Pre-condition: prototype must not be nullptr. MessageLite* AddMessage(const MessageLite* prototype); template void Clear() { const int n = current_size_; ABSL_DCHECK_GE(n, 0); if (n > 0) { using H = CommonHandler; ClearNonEmpty(); } } // Appends all message values from `from` to this instance. template void MergeFrom(const RepeatedPtrFieldBase& from) { static_assert(std::is_base_of::value, ""); #ifdef __cpp_if_constexpr if constexpr (!std::is_base_of::value) { // For LITE objects we use the generic MergeFrom to save on binary size. return MergeFrom(from); } #endif MergeFromConcreteMessage(from, Arena::CopyConstruct); } inline void InternalSwap(RepeatedPtrFieldBase* PROTOBUF_RESTRICT rhs) { ABSL_DCHECK(this != rhs); // Swap all fields except arena pointer at once. internal::memswap::value>( reinterpret_cast(this), reinterpret_cast(rhs)); } // Returns true if there are no preallocated elements in the array. bool PrepareForParse() { return allocated_size() == current_size_; } // Similar to `AddAllocated` but faster. // // Pre-condition: PrepareForParse() is true. void AddAllocatedForParse(void* value) { ABSL_DCHECK(PrepareForParse()); if (PROTOBUF_PREDICT_FALSE(SizeAtCapacity())) { *InternalExtend(1) = value; ++rep()->allocated_size; } else { if (using_sso()) { tagged_rep_or_elem_ = value; } else { rep()->elements[current_size_] = value; ++rep()->allocated_size; } } ExchangeCurrentSize(current_size_ + 1); } protected: template void RemoveLast() { ABSL_DCHECK_GT(current_size_, 0); ExchangeCurrentSize(current_size_ - 1); using H = CommonHandler; H::Clear(cast(element_at(current_size_))); } template void CopyFrom(const RepeatedPtrFieldBase& other) { if (&other == this) return; Clear(); if (other.empty()) return; MergeFrom(other); } void CloseGap(int start, int num); void Reserve(int capacity); template static inline Value* copy(const Value* value) { using H = CommonHandler; auto* new_value = H::NewFromPrototype(value, nullptr); H::Merge(*value, new_value); return cast(new_value); } // Used for constructing iterators. void* const* raw_data() const { return elements(); } void** raw_mutable_data() { return elements(); } template Value** mutable_data() { // TODO: Breaks C++ aliasing rules. We should probably remove this // method entirely. return reinterpret_cast**>(raw_mutable_data()); } template const Value* const* data() const { // TODO: Breaks C++ aliasing rules. We should probably remove this // method entirely. return reinterpret_cast* const*>(raw_data()); } template PROTOBUF_NDEBUG_INLINE void Swap(RepeatedPtrFieldBase* other) { if (internal::CanUseInternalSwap(GetArena(), other->GetArena())) { InternalSwap(other); } else { SwapFallback(other); } } void SwapElements(int index1, int index2) { using std::swap; // enable ADL with fallback swap(element_at(index1), element_at(index2)); } template PROTOBUF_NOINLINE size_t SpaceUsedExcludingSelfLong() const { size_t allocated_bytes = using_sso() ? 0 : static_cast(Capacity()) * sizeof(void*) + kRepHeaderSize; const int n = allocated_size(); void* const* elems = elements(); for (int i = 0; i < n; ++i) { allocated_bytes += TypeHandler::SpaceUsedLong(*cast(elems[i])); } return allocated_bytes; } // Advanced memory management -------------------------------------- // Like Add(), but if there are no cleared objects to use, returns nullptr. template Value* AddFromCleared() { if (current_size_ < allocated_size()) { return cast( element_at(ExchangeCurrentSize(current_size_ + 1))); } else { return nullptr; } } template void AddAllocated(Value* value) { ABSL_DCHECK_NE(value, nullptr); Arena* element_arena = TypeHandler::GetArena(value); Arena* arena = GetArena(); if (arena != element_arena || AllocatedSizeAtCapacity()) { AddAllocatedSlowWithCopy(value, element_arena, arena); return; } // Fast path: underlying arena representation (tagged pointer) is equal to // our arena pointer, and we can add to array without resizing it (at // least one slot that is not allocated). void** elems = elements(); if (current_size_ < allocated_size()) { // Make space at [current] by moving first allocated element to end of // allocated list. elems[allocated_size()] = elems[current_size_]; } elems[ExchangeCurrentSize(current_size_ + 1)] = value; if (!using_sso()) ++rep()->allocated_size; } template void UnsafeArenaAddAllocated(Value* value) { ABSL_DCHECK_NE(value, nullptr); // Make room for the new pointer. if (SizeAtCapacity()) { // The array is completely full with no cleared objects, so grow it. InternalExtend(1); ++rep()->allocated_size; } else if (AllocatedSizeAtCapacity()) { // There is no more space in the pointer array because it contains some // cleared objects awaiting reuse. We don't want to grow the array in // this case because otherwise a loop calling AddAllocated() followed by // Clear() would leak memory. using H = CommonHandler; Delete(element_at(current_size_), arena_); } else if (current_size_ < allocated_size()) { // We have some cleared objects. We don't care about their order, so we // can just move the first one to the end to make space. element_at(allocated_size()) = element_at(current_size_); ++rep()->allocated_size; } else { // There are no cleared objects. if (!using_sso()) ++rep()->allocated_size; } element_at(ExchangeCurrentSize(current_size_ + 1)) = value; } template PROTOBUF_NODISCARD Value* ReleaseLast() { Value* result = UnsafeArenaReleaseLast(); // Now perform a copy if we're on an arena. Arena* arena = GetArena(); if (internal::DebugHardenForceCopyInRelease()) { auto* new_result = copy(result); if (arena == nullptr) delete result; return new_result; } else { return (arena == nullptr) ? result : copy(result); } } // Releases and returns the last element, but does not do out-of-arena copy. // Instead, just returns the raw pointer to the contained element in the // arena. template Value* UnsafeArenaReleaseLast() { ABSL_DCHECK_GT(current_size_, 0); ExchangeCurrentSize(current_size_ - 1); auto* result = cast(element_at(current_size_)); if (using_sso()) { tagged_rep_or_elem_ = nullptr; } else { --rep()->allocated_size; if (current_size_ < allocated_size()) { // There are cleared elements on the end; replace the removed element // with the last allocated element. element_at(current_size_) = element_at(allocated_size()); } } return result; } int ClearedCount() const { return allocated_size() - current_size_; } // Slowpath handles all cases, copying if necessary. template PROTOBUF_NOINLINE void AddAllocatedSlowWithCopy( // Pass value_arena and my_arena to avoid duplicate virtual call (value) // or load (mine). Value* value, Arena* value_arena, Arena* my_arena) { using H = CommonHandler; // Ensure that either the value is in the same arena, or if not, we do the // appropriate thing: Own() it (if it's on heap and we're in an arena) or // copy it to our arena/heap (otherwise). if (my_arena != nullptr && value_arena == nullptr) { my_arena->Own(value); } else if (my_arena != value_arena) { ABSL_DCHECK(value_arena != nullptr); auto* new_value = TypeHandler::NewFromPrototype(value, my_arena); H::Merge(*value, new_value); value = new_value; } UnsafeArenaAddAllocated(value); } template PROTOBUF_NOINLINE void SwapFallback(RepeatedPtrFieldBase* other) { ABSL_DCHECK(!internal::CanUseInternalSwap(GetArena(), other->GetArena())); // Copy semantics in this case. We try to improve efficiency by placing the // temporary on |other|'s arena so that messages are copied twice rather // than three times. RepeatedPtrFieldBase temp(other->GetArena()); if (!this->empty()) { temp.MergeFrom(*this); } this->CopyFrom(*other); other->InternalSwap(&temp); if (temp.NeedsDestroy()) { temp.Destroy(); } } // Gets the Arena on which this RepeatedPtrField stores its elements. inline Arena* GetArena() const { return arena_; } static constexpr size_t InternalGetArenaOffset(internal::InternalVisibility) { return PROTOBUF_FIELD_OFFSET(RepeatedPtrFieldBase, arena_); } private: using InternalArenaConstructable_ = void; using DestructorSkippable_ = void; template friend class Arena::InternalHelper; // ExtensionSet stores repeated message extensions as // RepeatedPtrField, but non-lite ExtensionSets need to implement // SpaceUsedLong(), and thus need to call SpaceUsedExcludingSelfLong() // reinterpreting MessageLite as Message. ExtensionSet also needs to make use // of AddFromCleared(), which is not part of the public interface. friend class ExtensionSet; // The MapFieldBase implementation needs to call protected methods directly, // reinterpreting pointers as being to Message instead of a specific Message // subclass. friend class MapFieldBase; friend struct MapFieldTestPeer; // The table-driven MergePartialFromCodedStream implementation needs to // operate on RepeatedPtrField. friend class MergePartialFromCodedStreamHelper; friend class AccessorHelper; template friend struct google::protobuf::WeakRepeatedPtrField; friend class internal::TcParser; // TODO: Remove this friend. // Expose offset of `arena_` without exposing the member itself. // Used to optimize code size of `InternalSwap` method. template friend struct ArenaOffsetHelper; // The reflection implementation needs to call protected methods directly, // reinterpreting pointers as being to Message instead of a specific Message // subclass. friend class google::protobuf::Reflection; friend class internal::SwapFieldHelper; friend class LazyRepeatedPtrField; // Concrete Arena enabled copy function used to copy messages instances. // This follows the `Arena::CopyConstruct` signature so that the compiler // can have the inlined call into the out of line copy function(s) simply pass // the address of `Arena::CopyConstruct` 'as is'. using CopyFn = void* (*)(Arena*, const void*); struct Rep { int allocated_size; // Here we declare a huge array as a way of approximating C's "flexible // array member" feature without relying on undefined behavior. void* elements[(std::numeric_limits::max() - 2 * sizeof(int)) / sizeof(void*)]; }; static constexpr size_t kRepHeaderSize = offsetof(Rep, elements); // Replaces current_size_ with new_size and returns the previous value of // current_size_. This function is intended to be the only place where // current_size_ is modified. inline int ExchangeCurrentSize(int new_size) { return std::exchange(current_size_, new_size); } inline bool SizeAtCapacity() const { // Harden invariant size() <= allocated_size() <= Capacity(). ABSL_DCHECK_LE(size(), allocated_size()); ABSL_DCHECK_LE(allocated_size(), Capacity()); // This is equivalent to `current_size_ == Capacity()`. // Assuming `Capacity()` function is inlined, compiler is likely to optimize // away "+ kSSOCapacity" and reduce it to "current_size_ > capacity_proxy_" // which is an instruction less than "current_size_ == capacity_proxy_ + 1". return current_size_ >= Capacity(); } inline bool AllocatedSizeAtCapacity() const { // Harden invariant size() <= allocated_size() <= Capacity(). ABSL_DCHECK_LE(size(), allocated_size()); ABSL_DCHECK_LE(allocated_size(), Capacity()); // This combines optimization mentioned in `SizeAtCapacity()` and simplifies // `allocated_size()` in sso case. return using_sso() ? (tagged_rep_or_elem_ != nullptr) : rep()->allocated_size >= Capacity(); } void* const* elements() const { return using_sso() ? &tagged_rep_or_elem_ : +rep()->elements; } void** elements() { return using_sso() ? &tagged_rep_or_elem_ : +rep()->elements; } void*& element_at(int index) { if (using_sso()) { ABSL_DCHECK_EQ(index, 0); return tagged_rep_or_elem_; } return rep()->elements[index]; } const void* element_at(int index) const { return const_cast(this)->element_at(index); } int allocated_size() const { return using_sso() ? (tagged_rep_or_elem_ != nullptr ? 1 : 0) : rep()->allocated_size; } Rep* rep() { ABSL_DCHECK(!using_sso()); return reinterpret_cast( reinterpret_cast(tagged_rep_or_elem_) - 1); } const Rep* rep() const { return const_cast(this)->rep(); } bool using_sso() const { return (reinterpret_cast(tagged_rep_or_elem_) & 1) == 0; } template static inline Value* cast(void* element) { return reinterpret_cast*>(element); } template static inline const Value* cast(const void* element) { return reinterpret_cast*>(element); } template static inline void Delete(void* obj, Arena* arena) { TypeHandler::Delete(cast(obj), arena); } // Out-of-line helper routine for Clear() once the inlined check has // determined the container is non-empty template PROTOBUF_NOINLINE void ClearNonEmpty() { const int n = current_size_; void* const* elems = elements(); int i = 0; ABSL_DCHECK_GT(n, 0); // do/while loop to avoid initial test because we know n > 0 do { TypeHandler::Clear(cast(elems[i++])); } while (i < n); ExchangeCurrentSize(0); } // Merges messages from `from` into available, cleared messages sitting in the // range `[size(), allocated_size())`. Returns the number of message merged // which is `ClearedCount(), from.size())`. // Note that this function does explicitly NOT update `current_size_`. // This function is out of line as it should be the slow path: this scenario // only happens when a caller constructs and fills a repeated field, then // shrinks it, and then merges additional messages into it. int MergeIntoClearedMessages(const RepeatedPtrFieldBase& from); // Appends all messages from `from` to this instance, using the // provided `copy_fn` copy function to copy existing messages. void MergeFromConcreteMessage(const RepeatedPtrFieldBase& from, CopyFn copy_fn); // Extends capacity by at least |extend_amount|. Returns a pointer to the // next available element slot. // // Pre-condition: |extend_amount| must be > 0. void** InternalExtend(int extend_amount); // Ensures that capacity is big enough to store one more allocated element. inline void MaybeExtend() { if (AllocatedSizeAtCapacity()) { ABSL_DCHECK_EQ(allocated_size(), Capacity()); InternalExtend(1); } else { ABSL_DCHECK_NE(allocated_size(), Capacity()); } } // Ensures that capacity is at least `n` elements. // Returns a pointer to the element directly beyond the last element. inline void** InternalReserve(int n) { if (n <= Capacity()) { void** elements = using_sso() ? &tagged_rep_or_elem_ : rep()->elements; return elements + current_size_; } return InternalExtend(n - Capacity()); } // Internal helpers for Add that keep definition out-of-line. void* AddMessageLite(ElementFactory factory); void* AddString(); // Common implementation used by various Add* methods. `factory` is an object // used to construct a new element unless there are spare cleared elements // ready for reuse. Returns pointer to the new element. // // Note: avoid inlining this function in methods such as `Add()` as this would // drastically increase binary size due to template instantiation and implicit // inlining. template void* AddInternal(Factory factory); // A few notes on internal representation: // // We use an indirected approach, with struct Rep, to keep // sizeof(RepeatedPtrFieldBase) equivalent to what it was before arena support // was added; namely, 3 8-byte machine words on x86-64. An instance of Rep is // allocated only when the repeated field is non-empty, and it is a // dynamically-sized struct (the header is directly followed by elements[]). // We place arena_ and current_size_ directly in the object to avoid cache // misses due to the indirection, because these fields are checked frequently. // Placing all fields directly in the RepeatedPtrFieldBase instance would cost // significant performance for memory-sensitive workloads. void* tagged_rep_or_elem_; int current_size_; int capacity_proxy_; // we store `capacity - kSSOCapacity` as an optimization Arena* arena_; }; // Appends all message values from `from` to this instance using the abstract // message interface. This overload is used in places like reflection and // other locations where the underlying type is unavailable template <> void RepeatedPtrFieldBase::MergeFrom( const RepeatedPtrFieldBase& from); template <> inline void RepeatedPtrFieldBase::MergeFrom( const RepeatedPtrFieldBase& from) { return MergeFrom(from); } // Appends all `std::string` values from `from` to this instance. template <> void RepeatedPtrFieldBase::MergeFrom( const RepeatedPtrFieldBase& from); template void* RepeatedPtrFieldBase::AddInternal(Factory factory) { Arena* const arena = GetArena(); if (tagged_rep_or_elem_ == nullptr) { ExchangeCurrentSize(1); tagged_rep_or_elem_ = factory(arena); return tagged_rep_or_elem_; } absl::PrefetchToLocalCache(tagged_rep_or_elem_); if (using_sso()) { if (current_size_ == 0) { ExchangeCurrentSize(1); return tagged_rep_or_elem_; } void*& result = *InternalExtend(1); result = factory(arena); Rep* r = rep(); r->allocated_size = 2; ExchangeCurrentSize(2); return result; } Rep* r = rep(); if (PROTOBUF_PREDICT_FALSE(SizeAtCapacity())) { InternalExtend(1); r = rep(); } else { if (current_size_ != r->allocated_size) { return r->elements[ExchangeCurrentSize(current_size_ + 1)]; } } ++r->allocated_size; void*& result = r->elements[ExchangeCurrentSize(current_size_ + 1)]; result = factory(arena); return result; } PROTOBUF_EXPORT void InternalOutOfLineDeleteMessageLite(MessageLite* message); template class GenericTypeHandler { public: using Type = GenericType; using Movable = IsMovable; static constexpr auto GetNewFunc() { return Arena::DefaultConstruct; } static inline Arena* GetArena(Type* value) { return Arena::InternalGetArena(value); } static inline Type* New(Arena* arena) { return static_cast(Arena::DefaultConstruct(arena)); } static inline Type* New(Arena* arena, Type&& value) { return Arena::Create(arena, std::move(value)); } static inline Type* NewFromPrototype(const Type* /*prototype*/, Arena* arena = nullptr) { return New(arena); } static inline void Delete(Type* value, Arena* arena) { if (arena != nullptr) return; #ifdef __cpp_if_constexpr if constexpr (std::is_base_of::value) { // Using virtual destructor to reduce generated code size that would have // happened otherwise due to inlined `~Type()`. InternalOutOfLineDeleteMessageLite(value); } else { delete value; } #else delete value; #endif } static inline void Clear(Type* value) { value->Clear(); } static void Merge(const Type& from, Type* to); static inline size_t SpaceUsedLong(const Type& value) { return value.SpaceUsedLong(); } }; // NewFromPrototypeHelper() is not defined inline here, as we will need to do a // virtual function dispatch anyways to go from Message* to call New/Merge. (The // additional helper is needed as a workaround for MSVC.) PROTOBUF_EXPORT MessageLite* NewFromPrototypeHelper( const MessageLite* prototype, Arena* arena); template <> inline MessageLite* GenericTypeHandler::NewFromPrototype( const MessageLite* prototype, Arena* arena) { return NewFromPrototypeHelper(prototype, arena); } template <> inline Arena* GenericTypeHandler::GetArena(MessageLite* value) { return value->GetArena(); } template PROTOBUF_NOINLINE inline void GenericTypeHandler::Merge( const GenericType& from, GenericType* to) { to->MergeFrom(from); } template <> PROTOBUF_EXPORT void GenericTypeHandler::Merge( const MessageLite& from, MessageLite* to); // Message specialization bodies defined in message.cc. This split is necessary // to allow proto2-lite (which includes this header) to be independent of // Message. template <> PROTOBUF_EXPORT Message* GenericTypeHandler::NewFromPrototype( const Message* prototype, Arena* arena); template <> PROTOBUF_EXPORT Arena* GenericTypeHandler::GetArena(Message* value); PROTOBUF_EXPORT void* NewStringElement(Arena* arena); template <> class GenericTypeHandler { public: using Type = std::string; using Movable = IsMovable; static constexpr auto GetNewFunc() { return NewStringElement; } static inline Arena* GetArena(Type*) { return nullptr; } static PROTOBUF_NOINLINE Type* New(Arena* arena) { return Arena::Create(arena); } static PROTOBUF_NOINLINE Type* New(Arena* arena, Type&& value) { return Arena::Create(arena, std::move(value)); } static inline Type* NewFromPrototype(const Type*, Arena* arena) { return New(arena); } static inline void Delete(Type* value, Arena* arena) { if (arena == nullptr) { delete value; } } static inline void Clear(Type* value) { value->clear(); } static inline void Merge(const Type& from, Type* to) { *to = from; } static size_t SpaceUsedLong(const Type& value) { return sizeof(value) + StringSpaceUsedExcludingSelfLong(value); } }; } // namespace internal // RepeatedPtrField is like RepeatedField, but used for repeated strings or // Messages. template class RepeatedPtrField final : private internal::RepeatedPtrFieldBase { static_assert(!std::is_const::value, "We do not support const value types."); static_assert(!std::is_volatile::value, "We do not support volatile value types."); static_assert(!std::is_pointer::value, "We do not support pointer value types."); static_assert(!std::is_reference::value, "We do not support reference value types."); static constexpr PROTOBUF_ALWAYS_INLINE void StaticValidityCheck() { static_assert( absl::disjunction< internal::is_supported_string_type, internal::is_supported_message_type>::value, "We only support string and Message types in RepeatedPtrField."); } public: using value_type = Element; using size_type = int; using difference_type = ptrdiff_t; using reference = Element&; using const_reference = const Element&; using pointer = Element*; using const_pointer = const Element*; using iterator = internal::RepeatedPtrIterator; using const_iterator = internal::RepeatedPtrIterator; using reverse_iterator = std::reverse_iterator; using const_reverse_iterator = std::reverse_iterator; // Custom STL-like iterator that iterates over and returns the underlying // pointers to Element rather than Element itself. using pointer_iterator = internal::RepeatedPtrOverPtrsIterator; using const_pointer_iterator = internal::RepeatedPtrOverPtrsIterator; constexpr RepeatedPtrField(); // Arena enabled constructors: for internal use only. RepeatedPtrField(internal::InternalVisibility, Arena* arena) : RepeatedPtrField(arena) {} RepeatedPtrField(internal::InternalVisibility, Arena* arena, const RepeatedPtrField& rhs) : RepeatedPtrField(arena, rhs) {} // TODO: make constructor private explicit RepeatedPtrField(Arena* arena); template ())>::value>::type> RepeatedPtrField(Iter begin, Iter end); RepeatedPtrField(const RepeatedPtrField& rhs) : RepeatedPtrField(nullptr, rhs) {} RepeatedPtrField& operator=(const RepeatedPtrField& other) ABSL_ATTRIBUTE_LIFETIME_BOUND; RepeatedPtrField(RepeatedPtrField&& rhs) noexcept : RepeatedPtrField(nullptr, std::move(rhs)) {} RepeatedPtrField& operator=(RepeatedPtrField&& other) noexcept ABSL_ATTRIBUTE_LIFETIME_BOUND; ~RepeatedPtrField(); bool empty() const; int size() const; const_reference Get(int index) const ABSL_ATTRIBUTE_LIFETIME_BOUND; pointer Mutable(int index) ABSL_ATTRIBUTE_LIFETIME_BOUND; // Unlike std::vector, adding an element to a RepeatedPtrField doesn't always // make a new element; it might re-use an element left over from when the // field was Clear()'d or resize()'d smaller. For this reason, Add() is the // fastest API for adding a new element. pointer Add() ABSL_ATTRIBUTE_LIFETIME_BOUND; // `Add(std::move(value));` is equivalent to `*Add() = std::move(value);` // It will either move-construct to the end of this field, or swap value // with the new-or-recycled element at the end of this field. Note that // this operation is very slow if this RepeatedPtrField is not on the // same Arena, if any, as `value`. void Add(Element&& value); // Copying to the end of this RepeatedPtrField is slowest of all; it can't // reliably copy-construct to the last element of this RepeatedPtrField, for // example (unlike std::vector). // We currently block this API. The right way to add to the end is to call // Add() and modify the element it points to. // If you must add an existing value, call `*Add() = value;` void Add(const Element& value) = delete; // Append elements in the range [begin, end) after reserving // the appropriate number of elements. template void Add(Iter begin, Iter end); const_reference operator[](int index) const ABSL_ATTRIBUTE_LIFETIME_BOUND { return Get(index); } reference operator[](int index) ABSL_ATTRIBUTE_LIFETIME_BOUND { return *Mutable(index); } const_reference at(int index) const ABSL_ATTRIBUTE_LIFETIME_BOUND; reference at(int index) ABSL_ATTRIBUTE_LIFETIME_BOUND; // Removes the last element in the array. // Ownership of the element is retained by the array. void RemoveLast(); // Deletes elements with indices in the range [start .. start+num-1]. // Caution: moves all elements with indices [start+num .. ]. // Calling this routine inside a loop can cause quadratic behavior. void DeleteSubrange(int start, int num); ABSL_ATTRIBUTE_REINITIALIZES void Clear(); // Appends the elements from `other` after this instance. // The end result length will be `other.size() + this->size()`. void MergeFrom(const RepeatedPtrField& other); // Replaces the contents with a copy of the elements from `other`. ABSL_ATTRIBUTE_REINITIALIZES void CopyFrom(const RepeatedPtrField& other); // Replaces the contents with RepeatedPtrField(begin, end). template ABSL_ATTRIBUTE_REINITIALIZES void Assign(Iter begin, Iter end); // Reserves space to expand the field to at least the given size. This only // resizes the pointer array; it doesn't allocate any objects. If the // array is grown, it will always be at least doubled in size. void Reserve(int new_size); int Capacity() const; // Gets the underlying array. This pointer is possibly invalidated by // any add or remove operation. Element** mutable_data() ABSL_ATTRIBUTE_LIFETIME_BOUND; const Element* const* data() const ABSL_ATTRIBUTE_LIFETIME_BOUND; // Swaps entire contents with "other". If they are on separate arenas, then // copies data. void Swap(RepeatedPtrField* other); // Swaps entire contents with "other". Caller should guarantee that either // both fields are on the same arena or both are on the heap. Swapping between // different arenas with this function is disallowed and is caught via // ABSL_DCHECK. void UnsafeArenaSwap(RepeatedPtrField* other); // Swaps two elements. void SwapElements(int index1, int index2); iterator begin() ABSL_ATTRIBUTE_LIFETIME_BOUND; const_iterator begin() const ABSL_ATTRIBUTE_LIFETIME_BOUND; const_iterator cbegin() const ABSL_ATTRIBUTE_LIFETIME_BOUND; iterator end() ABSL_ATTRIBUTE_LIFETIME_BOUND; const_iterator end() const ABSL_ATTRIBUTE_LIFETIME_BOUND; const_iterator cend() const ABSL_ATTRIBUTE_LIFETIME_BOUND; reverse_iterator rbegin() ABSL_ATTRIBUTE_LIFETIME_BOUND { return reverse_iterator(end()); } const_reverse_iterator rbegin() const ABSL_ATTRIBUTE_LIFETIME_BOUND { return const_reverse_iterator(end()); } reverse_iterator rend() ABSL_ATTRIBUTE_LIFETIME_BOUND { return reverse_iterator(begin()); } const_reverse_iterator rend() const ABSL_ATTRIBUTE_LIFETIME_BOUND { return const_reverse_iterator(begin()); } pointer_iterator pointer_begin() ABSL_ATTRIBUTE_LIFETIME_BOUND; const_pointer_iterator pointer_begin() const ABSL_ATTRIBUTE_LIFETIME_BOUND; pointer_iterator pointer_end() ABSL_ATTRIBUTE_LIFETIME_BOUND; const_pointer_iterator pointer_end() const ABSL_ATTRIBUTE_LIFETIME_BOUND; // Returns (an estimate of) the number of bytes used by the repeated field, // excluding sizeof(*this). size_t SpaceUsedExcludingSelfLong() const; int SpaceUsedExcludingSelf() const { return internal::ToIntSize(SpaceUsedExcludingSelfLong()); } // Advanced memory management -------------------------------------- // When hardcore memory management becomes necessary -- as it sometimes // does here at Google -- the following methods may be useful. // Adds an already-allocated object, passing ownership to the // RepeatedPtrField. // // Note that some special behavior occurs with respect to arenas: // // (i) if this field holds submessages, the new submessage will be copied if // the original is in an arena and this RepeatedPtrField is either in a // different arena, or on the heap. // (ii) if this field holds strings, the passed-in string *must* be // heap-allocated, not arena-allocated. There is no way to dynamically check // this at runtime, so User Beware. // Requires: value != nullptr void AddAllocated(Element* value); // Removes and returns the last element, passing ownership to the caller. // Requires: size() > 0 // // If this RepeatedPtrField is on an arena, an object copy is required to pass // ownership back to the user (for compatible semantics). Use // UnsafeArenaReleaseLast() if this behavior is undesired. PROTOBUF_NODISCARD Element* ReleaseLast(); // Adds an already-allocated object, skipping arena-ownership checks. The user // must guarantee that the given object is in the same arena as this // RepeatedPtrField. // It is also useful in legacy code that uses temporary ownership to avoid // copies. Example: // RepeatedPtrField temp_field; // temp_field.UnsafeArenaAddAllocated(new T); // ... // Do something with temp_field // temp_field.UnsafeArenaExtractSubrange(0, temp_field.size(), nullptr); // If you put temp_field on the arena this fails, because the ownership // transfers to the arena at the "AddAllocated" call and is not released // anymore, causing a double delete. UnsafeArenaAddAllocated prevents this. // Requires: value != nullptr void UnsafeArenaAddAllocated(Element* value); // Removes and returns the last element. Unlike ReleaseLast, the returned // pointer is always to the original object. This may be in an arena, in // which case it would have the arena's lifetime. // Requires: current_size_ > 0 pointer UnsafeArenaReleaseLast(); // Extracts elements with indices in the range "[start .. start+num-1]". // The caller assumes ownership of the extracted elements and is responsible // for deleting them when they are no longer needed. // If "elements" is non-nullptr, then pointers to the extracted elements // are stored in "elements[0 .. num-1]" for the convenience of the caller. // If "elements" is nullptr, then the caller must use some other mechanism // to perform any further operations (like deletion) on these elements. // Caution: implementation also moves elements with indices [start+num ..]. // Calling this routine inside a loop can cause quadratic behavior. // // Memory copying behavior is identical to ReleaseLast(), described above: if // this RepeatedPtrField is on an arena, an object copy is performed for each // returned element, so that all returned element pointers are to // heap-allocated copies. If this copy is not desired, the user should call // UnsafeArenaExtractSubrange(). void ExtractSubrange(int start, int num, Element** elements); // Identical to ExtractSubrange() described above, except that no object // copies are ever performed. Instead, the raw object pointers are returned. // Thus, if on an arena, the returned objects must not be freed, because they // will not be heap-allocated objects. void UnsafeArenaExtractSubrange(int start, int num, Element** elements); // When elements are removed by calls to RemoveLast() or Clear(), they // are not actually freed. Instead, they are cleared and kept so that // they can be reused later. This can save lots of CPU time when // repeatedly reusing a protocol message for similar purposes. // // Hardcore programs may choose to manipulate these cleared objects // to better optimize memory management using the following routines. // Gets the number of cleared objects that are currently being kept // around for reuse. ABSL_DEPRECATED("This will be removed in a future release") int ClearedCount() const; // Removes the element referenced by position. // // Returns an iterator to the element immediately following the removed // element. // // Invalidates all iterators at or after the removed element, including end(). iterator erase(const_iterator position) ABSL_ATTRIBUTE_LIFETIME_BOUND; // Removes the elements in the range [first, last). // // Returns an iterator to the element immediately following the removed range. // // Invalidates all iterators at or after the removed range, including end(). iterator erase(const_iterator first, const_iterator last) ABSL_ATTRIBUTE_LIFETIME_BOUND; // Gets the arena on which this RepeatedPtrField stores its elements. inline Arena* GetArena(); // For internal use only. // // This is public due to it being called by generated code. void InternalSwap(RepeatedPtrField* PROTOBUF_RESTRICT other) { internal::RepeatedPtrFieldBase::InternalSwap(other); } using RepeatedPtrFieldBase::InternalGetArenaOffset; private: using InternalArenaConstructable_ = void; using DestructorSkippable_ = void; friend class Arena; friend class internal::TcParser; template friend struct WeakRepeatedPtrField; // Note: RepeatedPtrField SHOULD NOT be subclassed by users. using TypeHandler = internal::GenericTypeHandler; RepeatedPtrField(Arena* arena, const RepeatedPtrField& rhs); RepeatedPtrField(Arena* arena, RepeatedPtrField&& rhs); void AddAllocatedForParse(Element* p) { return RepeatedPtrFieldBase::AddAllocatedForParse(p); } }; // ------------------------------------------------------------------- template constexpr RepeatedPtrField::RepeatedPtrField() : RepeatedPtrFieldBase() { StaticValidityCheck(); } template inline RepeatedPtrField::RepeatedPtrField(Arena* arena) : RepeatedPtrFieldBase(arena) { // We can't have StaticValidityCheck here because that requires Element to be // a complete type, and in split repeated fields cases, we call // CreateMessage> for incomplete Ts. } template inline RepeatedPtrField::RepeatedPtrField(Arena* arena, const RepeatedPtrField& rhs) : RepeatedPtrFieldBase(arena) { StaticValidityCheck(); MergeFrom(rhs); } template template inline RepeatedPtrField::RepeatedPtrField(Iter begin, Iter end) { StaticValidityCheck(); Add(begin, end); } template RepeatedPtrField::~RepeatedPtrField() { StaticValidityCheck(); if (!NeedsDestroy()) return; #ifdef __cpp_if_constexpr if constexpr (std::is_base_of::value) { #else if (std::is_base_of::value) { #endif DestroyProtos(); } else { Destroy(); } } template inline RepeatedPtrField& RepeatedPtrField::operator=( const RepeatedPtrField& other) ABSL_ATTRIBUTE_LIFETIME_BOUND { if (this != &other) CopyFrom(other); return *this; } template inline RepeatedPtrField::RepeatedPtrField(Arena* arena, RepeatedPtrField&& rhs) : RepeatedPtrField(arena) { // We don't just call Swap(&rhs) here because it would perform 3 copies if rhs // is on a different arena. if (internal::CanMoveWithInternalSwap(arena, rhs.GetArena())) { InternalSwap(&rhs); } else { CopyFrom(rhs); } } template inline RepeatedPtrField& RepeatedPtrField::operator=( RepeatedPtrField&& other) noexcept ABSL_ATTRIBUTE_LIFETIME_BOUND { // We don't just call Swap(&other) here because it would perform 3 copies if // the two fields are on different arenas. if (this != &other) { if (internal::CanMoveWithInternalSwap(GetArena(), other.GetArena())) { InternalSwap(&other); } else { CopyFrom(other); } } return *this; } template inline bool RepeatedPtrField::empty() const { return RepeatedPtrFieldBase::empty(); } template inline int RepeatedPtrField::size() const { return RepeatedPtrFieldBase::size(); } template inline const Element& RepeatedPtrField::Get(int index) const ABSL_ATTRIBUTE_LIFETIME_BOUND { return RepeatedPtrFieldBase::Get(index); } template inline const Element& RepeatedPtrField::at(int index) const ABSL_ATTRIBUTE_LIFETIME_BOUND { return RepeatedPtrFieldBase::at(index); } template inline Element& RepeatedPtrField::at(int index) ABSL_ATTRIBUTE_LIFETIME_BOUND { return RepeatedPtrFieldBase::at(index); } template inline Element* RepeatedPtrField::Mutable(int index) ABSL_ATTRIBUTE_LIFETIME_BOUND { return RepeatedPtrFieldBase::Mutable(index); } template inline Element* RepeatedPtrField::Add() ABSL_ATTRIBUTE_LIFETIME_BOUND { return RepeatedPtrFieldBase::Add(); } template inline void RepeatedPtrField::Add(Element&& value) { RepeatedPtrFieldBase::Add(std::move(value)); } template template inline void RepeatedPtrField::Add(Iter begin, Iter end) { if (std::is_base_of< std::forward_iterator_tag, typename std::iterator_traits::iterator_category>::value) { int reserve = static_cast(std::distance(begin, end)); Reserve(size() + reserve); } for (; begin != end; ++begin) { *Add() = *begin; } } template inline void RepeatedPtrField::RemoveLast() { RepeatedPtrFieldBase::RemoveLast(); } template inline void RepeatedPtrField::DeleteSubrange(int start, int num) { ABSL_DCHECK_GE(start, 0); ABSL_DCHECK_GE(num, 0); ABSL_DCHECK_LE(start + num, size()); void** subrange = raw_mutable_data() + start; Arena* arena = GetArena(); for (int i = 0; i < num; ++i) { using H = CommonHandler; H::Delete(static_cast(subrange[i]), arena); } UnsafeArenaExtractSubrange(start, num, nullptr); } template inline void RepeatedPtrField::ExtractSubrange(int start, int num, Element** elements) { ABSL_DCHECK_GE(start, 0); ABSL_DCHECK_GE(num, 0); ABSL_DCHECK_LE(start + num, size()); if (num == 0) return; ABSL_DCHECK_NE(elements, nullptr) << "Releasing elements without transferring ownership is an unsafe " "operation. Use UnsafeArenaExtractSubrange."; if (elements != nullptr) { Arena* arena = GetArena(); auto* extracted = data() + start; if (internal::DebugHardenForceCopyInRelease()) { // Always copy. for (int i = 0; i < num; ++i) { elements[i] = copy(extracted[i]); } if (arena == nullptr) { for (int i = 0; i < num; ++i) { delete extracted[i]; } } } else { // If we're on an arena, we perform a copy for each element so that the // returned elements are heap-allocated. Otherwise, just forward it. if (arena != nullptr) { for (int i = 0; i < num; ++i) { elements[i] = copy(extracted[i]); } } else { memcpy(elements, extracted, num * sizeof(Element*)); } } } CloseGap(start, num); } template inline void RepeatedPtrField::UnsafeArenaExtractSubrange( int start, int num, Element** elements) { ABSL_DCHECK_GE(start, 0); ABSL_DCHECK_GE(num, 0); ABSL_DCHECK_LE(start + num, size()); if (num > 0) { // Save the values of the removed elements if requested. if (elements != nullptr) { memcpy(elements, data() + start, num * sizeof(Element*)); } CloseGap(start, num); } } template inline void RepeatedPtrField::Clear() { RepeatedPtrFieldBase::Clear(); } template inline void RepeatedPtrField::MergeFrom( const RepeatedPtrField& other) { if (other.empty()) return; RepeatedPtrFieldBase::MergeFrom(other); } template inline void RepeatedPtrField::CopyFrom(const RepeatedPtrField& other) { RepeatedPtrFieldBase::CopyFrom(other); } template template inline void RepeatedPtrField::Assign(Iter begin, Iter end) { Clear(); Add(begin, end); } template inline typename RepeatedPtrField::iterator RepeatedPtrField::erase(const_iterator position) ABSL_ATTRIBUTE_LIFETIME_BOUND { return erase(position, position + 1); } template inline typename RepeatedPtrField::iterator RepeatedPtrField::erase(const_iterator first, const_iterator last) ABSL_ATTRIBUTE_LIFETIME_BOUND { size_type pos_offset = static_cast(std::distance(cbegin(), first)); size_type last_offset = static_cast(std::distance(cbegin(), last)); DeleteSubrange(pos_offset, last_offset - pos_offset); return begin() + pos_offset; } template inline Element** RepeatedPtrField::mutable_data() ABSL_ATTRIBUTE_LIFETIME_BOUND { return RepeatedPtrFieldBase::mutable_data(); } template inline const Element* const* RepeatedPtrField::data() const ABSL_ATTRIBUTE_LIFETIME_BOUND { return RepeatedPtrFieldBase::data(); } template inline void RepeatedPtrField::Swap(RepeatedPtrField* other) { if (this == other) return; RepeatedPtrFieldBase::Swap(other); } template inline void RepeatedPtrField::UnsafeArenaSwap( RepeatedPtrField* other) { if (this == other) return; ABSL_DCHECK_EQ(GetArena(), other->GetArena()); RepeatedPtrFieldBase::InternalSwap(other); } template inline void RepeatedPtrField::SwapElements(int index1, int index2) { RepeatedPtrFieldBase::SwapElements(index1, index2); } template inline Arena* RepeatedPtrField::GetArena() { return RepeatedPtrFieldBase::GetArena(); } template inline size_t RepeatedPtrField::SpaceUsedExcludingSelfLong() const { // `google::protobuf::Message` has a virtual method `SpaceUsedLong`, hence we can // instantiate just one function for all protobuf messages. // Note: std::is_base_of requires that `Element` is a concrete class. using H = typename std::conditional::value, internal::GenericTypeHandler, TypeHandler>::type; return RepeatedPtrFieldBase::SpaceUsedExcludingSelfLong(); } template inline void RepeatedPtrField::AddAllocated(Element* value) { RepeatedPtrFieldBase::AddAllocated(value); } template inline void RepeatedPtrField::UnsafeArenaAddAllocated(Element* value) { RepeatedPtrFieldBase::UnsafeArenaAddAllocated(value); } template inline Element* RepeatedPtrField::ReleaseLast() { return RepeatedPtrFieldBase::ReleaseLast(); } template inline Element* RepeatedPtrField::UnsafeArenaReleaseLast() { return RepeatedPtrFieldBase::UnsafeArenaReleaseLast(); } template inline int RepeatedPtrField::ClearedCount() const { return RepeatedPtrFieldBase::ClearedCount(); } template inline void RepeatedPtrField::Reserve(int new_size) { return RepeatedPtrFieldBase::Reserve(new_size); } template inline int RepeatedPtrField::Capacity() const { return RepeatedPtrFieldBase::Capacity(); } // ------------------------------------------------------------------- namespace internal { // STL-like iterator implementation for RepeatedPtrField. You should not // refer to this class directly; use RepeatedPtrField::iterator instead. // // The iterator for RepeatedPtrField, RepeatedPtrIterator, is // very similar to iterator_ptr in util/gtl/iterator_adaptors.h, // but adds random-access operators and is modified to wrap a void** base // iterator (since RepeatedPtrField stores its array as a void* array and // casting void** to T** would violate C++ aliasing rules). // // This code based on net/proto/proto-array-internal.h by Jeffrey Yasskin // (jyasskin@google.com). template class RepeatedPtrIterator { public: using iterator = RepeatedPtrIterator; using iterator_category = std::random_access_iterator_tag; using value_type = typename std::remove_const::type; using difference_type = std::ptrdiff_t; using pointer = Element*; using reference = Element&; RepeatedPtrIterator() : it_(nullptr) {} explicit RepeatedPtrIterator(void* const* it) : it_(it) {} // Allows "upcasting" from RepeatedPtrIterator to // RepeatedPtrIterator. template ::value>::type* = nullptr> RepeatedPtrIterator(const RepeatedPtrIterator& other) : it_(other.it_) {} // dereferenceable reference operator*() const { return *reinterpret_cast(*it_); } pointer operator->() const { return &(operator*()); } // {inc,dec}rementable iterator& operator++() { ++it_; return *this; } iterator operator++(int) { return iterator(it_++); } iterator& operator--() { --it_; return *this; } iterator operator--(int) { return iterator(it_--); } // equality_comparable friend bool operator==(const iterator& x, const iterator& y) { return x.it_ == y.it_; } friend bool operator!=(const iterator& x, const iterator& y) { return x.it_ != y.it_; } // less_than_comparable friend bool operator<(const iterator& x, const iterator& y) { return x.it_ < y.it_; } friend bool operator<=(const iterator& x, const iterator& y) { return x.it_ <= y.it_; } friend bool operator>(const iterator& x, const iterator& y) { return x.it_ > y.it_; } friend bool operator>=(const iterator& x, const iterator& y) { return x.it_ >= y.it_; } // addable, subtractable iterator& operator+=(difference_type d) { it_ += d; return *this; } friend iterator operator+(iterator it, const difference_type d) { it += d; return it; } friend iterator operator+(const difference_type d, iterator it) { it += d; return it; } iterator& operator-=(difference_type d) { it_ -= d; return *this; } friend iterator operator-(iterator it, difference_type d) { it -= d; return it; } // indexable reference operator[](difference_type d) const { return *(*this + d); } // random access iterator friend difference_type operator-(iterator it1, iterator it2) { return it1.it_ - it2.it_; } private: template friend class RepeatedPtrIterator; // The internal iterator. void* const* it_; }; template struct IteratorConceptSupport { using tag = typename Traits::iterator_category; }; template struct IteratorConceptSupport> { using tag = typename Traits::iterator_concept; }; // Provides an iterator that operates on pointers to the underlying objects // rather than the objects themselves as RepeatedPtrIterator does. // Consider using this when working with stl algorithms that change // the array. // The VoidPtr template parameter holds the type-agnostic pointer value // referenced by the iterator. It should either be "void *" for a mutable // iterator, or "const void* const" for a constant iterator. template class RepeatedPtrOverPtrsIterator { private: using traits = std::iterator_traits::type*>; public: using value_type = typename traits::value_type; using difference_type = typename traits::difference_type; using pointer = Element*; using reference = Element&; using iterator_category = typename traits::iterator_category; using iterator_concept = typename IteratorConceptSupport::tag; using iterator = RepeatedPtrOverPtrsIterator; RepeatedPtrOverPtrsIterator() : it_(nullptr) {} explicit RepeatedPtrOverPtrsIterator(VoidPtr* it) : it_(it) {} // Allows "upcasting" from RepeatedPtrOverPtrsIterator to // RepeatedPtrOverPtrsIterator. template < typename OtherElement, typename OtherVoidPtr, typename std::enable_if< std::is_convertible::value && std::is_convertible::value>::type* = nullptr> RepeatedPtrOverPtrsIterator( const RepeatedPtrOverPtrsIterator& other) : it_(other.it_) {} // dereferenceable reference operator*() const { return *reinterpret_cast(it_); } pointer operator->() const { return reinterpret_cast(it_); } // {inc,dec}rementable iterator& operator++() { ++it_; return *this; } iterator operator++(int) { return iterator(it_++); } iterator& operator--() { --it_; return *this; } iterator operator--(int) { return iterator(it_--); } // equality_comparable friend bool operator==(const iterator& x, const iterator& y) { return x.it_ == y.it_; } friend bool operator!=(const iterator& x, const iterator& y) { return x.it_ != y.it_; } // less_than_comparable friend bool operator<(const iterator& x, const iterator& y) { return x.it_ < y.it_; } friend bool operator<=(const iterator& x, const iterator& y) { return x.it_ <= y.it_; } friend bool operator>(const iterator& x, const iterator& y) { return x.it_ > y.it_; } friend bool operator>=(const iterator& x, const iterator& y) { return x.it_ >= y.it_; } // addable, subtractable iterator& operator+=(difference_type d) { it_ += d; return *this; } friend iterator operator+(iterator it, difference_type d) { it += d; return it; } friend iterator operator+(difference_type d, iterator it) { it += d; return it; } iterator& operator-=(difference_type d) { it_ -= d; return *this; } friend iterator operator-(iterator it, difference_type d) { it -= d; return it; } // indexable reference operator[](difference_type d) const { return *(*this + d); } // random access iterator friend difference_type operator-(iterator it1, iterator it2) { return it1.it_ - it2.it_; } private: template friend class RepeatedPtrOverPtrsIterator; // The internal iterator. VoidPtr* it_; }; } // namespace internal template inline typename RepeatedPtrField::iterator RepeatedPtrField::begin() ABSL_ATTRIBUTE_LIFETIME_BOUND { return iterator(raw_data()); } template inline typename RepeatedPtrField::const_iterator RepeatedPtrField::begin() const ABSL_ATTRIBUTE_LIFETIME_BOUND { return iterator(raw_data()); } template inline typename RepeatedPtrField::const_iterator RepeatedPtrField::cbegin() const ABSL_ATTRIBUTE_LIFETIME_BOUND { return begin(); } template inline typename RepeatedPtrField::iterator RepeatedPtrField::end() ABSL_ATTRIBUTE_LIFETIME_BOUND { return iterator(raw_data() + size()); } template inline typename RepeatedPtrField::const_iterator RepeatedPtrField::end() const ABSL_ATTRIBUTE_LIFETIME_BOUND { return iterator(raw_data() + size()); } template inline typename RepeatedPtrField::const_iterator RepeatedPtrField::cend() const ABSL_ATTRIBUTE_LIFETIME_BOUND { return end(); } template inline typename RepeatedPtrField::pointer_iterator RepeatedPtrField::pointer_begin() ABSL_ATTRIBUTE_LIFETIME_BOUND { return pointer_iterator(raw_mutable_data()); } template inline typename RepeatedPtrField::const_pointer_iterator RepeatedPtrField::pointer_begin() const ABSL_ATTRIBUTE_LIFETIME_BOUND { return const_pointer_iterator(const_cast(raw_data())); } template inline typename RepeatedPtrField::pointer_iterator RepeatedPtrField::pointer_end() ABSL_ATTRIBUTE_LIFETIME_BOUND { return pointer_iterator(raw_mutable_data() + size()); } template inline typename RepeatedPtrField::const_pointer_iterator RepeatedPtrField::pointer_end() const ABSL_ATTRIBUTE_LIFETIME_BOUND { return const_pointer_iterator( const_cast(raw_data() + size())); } // Iterators and helper functions that follow the spirit of the STL // std::back_insert_iterator and std::back_inserter but are tailor-made // for RepeatedField and RepeatedPtrField. Typical usage would be: // // std::copy(some_sequence.begin(), some_sequence.end(), // RepeatedFieldBackInserter(proto.mutable_sequence())); // // Ported by johannes from util/gtl/proto-array-iterators.h namespace internal { // A back inserter for RepeatedPtrField objects. template class RepeatedPtrFieldBackInsertIterator { public: using iterator_category = std::output_iterator_tag; using value_type = T; using pointer = void; using reference = void; using difference_type = std::ptrdiff_t; RepeatedPtrFieldBackInsertIterator(RepeatedPtrField* const mutable_field) : field_(mutable_field) {} RepeatedPtrFieldBackInsertIterator& operator=(const T& value) { *field_->Add() = value; return *this; } RepeatedPtrFieldBackInsertIterator& operator=( const T* const ptr_to_value) { *field_->Add() = *ptr_to_value; return *this; } RepeatedPtrFieldBackInsertIterator& operator=(T&& value) { *field_->Add() = std::move(value); return *this; } RepeatedPtrFieldBackInsertIterator& operator*() { return *this; } RepeatedPtrFieldBackInsertIterator& operator++() { return *this; } RepeatedPtrFieldBackInsertIterator& operator++(int /* unused */) { return *this; } private: RepeatedPtrField* field_; }; // A back inserter for RepeatedPtrFields that inserts by transferring ownership // of a pointer. template class AllocatedRepeatedPtrFieldBackInsertIterator { public: using iterator_category = std::output_iterator_tag; using value_type = T; using pointer = void; using reference = void; using difference_type = std::ptrdiff_t; explicit AllocatedRepeatedPtrFieldBackInsertIterator( RepeatedPtrField* const mutable_field) : field_(mutable_field) {} AllocatedRepeatedPtrFieldBackInsertIterator& operator=( T* const ptr_to_value) { field_->AddAllocated(ptr_to_value); return *this; } AllocatedRepeatedPtrFieldBackInsertIterator& operator*() { return *this; } AllocatedRepeatedPtrFieldBackInsertIterator& operator++() { return *this; } AllocatedRepeatedPtrFieldBackInsertIterator& operator++(int /* unused */) { return *this; } private: RepeatedPtrField* field_; }; // Almost identical to AllocatedRepeatedPtrFieldBackInsertIterator. This one // uses the UnsafeArenaAddAllocated instead. template class UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator { public: using iterator_category = std::output_iterator_tag; using value_type = T; using pointer = void; using reference = void; using difference_type = std::ptrdiff_t; explicit UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator( RepeatedPtrField* const mutable_field) : field_(mutable_field) {} UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator& operator=( T const* const ptr_to_value) { field_->UnsafeArenaAddAllocated(const_cast(ptr_to_value)); return *this; } UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator& operator*() { return *this; } UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator& operator++() { return *this; } UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator& operator++( int /* unused */) { return *this; } private: RepeatedPtrField* field_; }; } // namespace internal // Provides a back insert iterator for RepeatedPtrField instances, // similar to std::back_inserter(). template internal::RepeatedPtrFieldBackInsertIterator RepeatedPtrFieldBackInserter( RepeatedPtrField* const mutable_field) { return internal::RepeatedPtrFieldBackInsertIterator(mutable_field); } // Special back insert iterator for RepeatedPtrField instances, just in // case someone wants to write generic template code that can access both // RepeatedFields and RepeatedPtrFields using a common name. template internal::RepeatedPtrFieldBackInsertIterator RepeatedFieldBackInserter( RepeatedPtrField* const mutable_field) { return internal::RepeatedPtrFieldBackInsertIterator(mutable_field); } // Provides a back insert iterator for RepeatedPtrField instances // similar to std::back_inserter() which transfers the ownership while // copying elements. template internal::AllocatedRepeatedPtrFieldBackInsertIterator AllocatedRepeatedPtrFieldBackInserter( RepeatedPtrField* const mutable_field) { return internal::AllocatedRepeatedPtrFieldBackInsertIterator( mutable_field); } // Similar to AllocatedRepeatedPtrFieldBackInserter, using // UnsafeArenaAddAllocated instead of AddAllocated. // This is slightly faster if that matters. It is also useful in legacy code // that uses temporary ownership to avoid copies. Example: // RepeatedPtrField temp_field; // temp_field.UnsafeArenaAddAllocated(new T); // ... // Do something with temp_field // temp_field.UnsafeArenaExtractSubrange(0, temp_field.size(), nullptr); // Putting temp_field on the arena fails because the ownership transfers to the // arena at the "AddAllocated" call and is not released anymore causing a // double delete. This function uses UnsafeArenaAddAllocated to prevent this. template internal::UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator UnsafeArenaAllocatedRepeatedPtrFieldBackInserter( RepeatedPtrField* const mutable_field) { return internal::UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator( mutable_field); } namespace internal { // Size optimization for `memswap` - supplied below N is used by every // `RepeatedPtrField`. extern template PROTOBUF_EXPORT_TEMPLATE_DECLARE void memswap::value>( char* PROTOBUF_RESTRICT, char* PROTOBUF_RESTRICT); } // namespace internal } // namespace protobuf } // namespace google #include "google/protobuf/port_undef.inc" #endif // GOOGLE_PROTOBUF_REPEATED_PTR_FIELD_H__