nopenpilot/external/capnp/include/capnp/layout.h

1275 lines
51 KiB
C++

// Copyright (c) 2013-2016 Sandstorm Development Group, Inc. and contributors
// Licensed under the MIT License:
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
// This file is NOT intended for use by clients, except in generated code.
//
// This file defines low-level, non-type-safe classes for traversing the Cap'n Proto memory layout
// (which is also its wire format). Code generated by the Cap'n Proto compiler uses these classes,
// as does other parts of the Cap'n proto library which provide a higher-level interface for
// dynamic introspection.
#ifndef CAPNP_LAYOUT_H_
#define CAPNP_LAYOUT_H_
#if defined(__GNUC__) && !defined(CAPNP_HEADER_WARNINGS)
#pragma GCC system_header
#endif
#include <kj/common.h>
#include <kj/memory.h>
#include "common.h"
#include "blob.h"
#include "endian.h"
#if (defined(__mips__) || defined(__hppa__)) && !defined(CAPNP_CANONICALIZE_NAN)
#define CAPNP_CANONICALIZE_NAN 1
// Explicitly detect NaNs and canonicalize them to the quiet NaN value as would be returned by
// __builtin_nan("") on systems implementing the IEEE-754 recommended (but not required) NaN
// signalling/quiet differentiation (such as x86). Unfortunately, some architectures -- in
// particular, MIPS -- represent quiet vs. signalling nans differently than the rest of the world.
// Canonicalizing them makes output consistent (which is important!), but hurts performance
// slightly.
//
// Note that trying to convert MIPS NaNs to standard NaNs without losing data doesn't work.
// Signaling vs. quiet is indicated by a bit, with the meaning being the opposite on MIPS vs.
// everyone else. It would be great if we could just flip that bit, but we can't, because if the
// significand is all-zero, then the value is infinity rather than NaN. This means that on most
// machines, where the bit indicates quietness, there is one more quiet NaN value than signalling
// NaN value, whereas on MIPS there is one more sNaN than qNaN, and thus there is no isomorphic
// mapping that properly preserves quietness. Instead of doing something hacky, we just give up
// and blow away NaN payloads, because no one uses them anyway.
#endif
namespace capnp {
#if !CAPNP_LITE
class ClientHook;
#endif // !CAPNP_LITE
namespace _ { // private
class PointerBuilder;
class PointerReader;
class StructBuilder;
class StructReader;
class ListBuilder;
class ListReader;
class OrphanBuilder;
struct WirePointer;
struct WireHelpers;
class SegmentReader;
class SegmentBuilder;
class Arena;
class BuilderArena;
// =============================================================================
#if CAPNP_DEBUG_TYPES
typedef kj::UnitRatio<kj::Bounded<64, uint>, BitLabel, ElementLabel> BitsPerElementTableType;
#else
typedef uint BitsPerElementTableType;
#endif
static constexpr BitsPerElementTableType BITS_PER_ELEMENT_TABLE[8] = {
bounded< 0>() * BITS / ELEMENTS,
bounded< 1>() * BITS / ELEMENTS,
bounded< 8>() * BITS / ELEMENTS,
bounded<16>() * BITS / ELEMENTS,
bounded<32>() * BITS / ELEMENTS,
bounded<64>() * BITS / ELEMENTS,
bounded< 0>() * BITS / ELEMENTS,
bounded< 0>() * BITS / ELEMENTS
};
inline KJ_CONSTEXPR() BitsPerElementTableType dataBitsPerElement(ElementSize size) {
return _::BITS_PER_ELEMENT_TABLE[static_cast<int>(size)];
}
inline constexpr PointersPerElementN<1> pointersPerElement(ElementSize size) {
return size == ElementSize::POINTER
? PointersPerElementN<1>(ONE * POINTERS / ELEMENTS)
: PointersPerElementN<1>(ZERO * POINTERS / ELEMENTS);
}
static constexpr BitsPerElementTableType BITS_PER_ELEMENT_INCLUDING_PONITERS_TABLE[8] = {
bounded< 0>() * BITS / ELEMENTS,
bounded< 1>() * BITS / ELEMENTS,
bounded< 8>() * BITS / ELEMENTS,
bounded<16>() * BITS / ELEMENTS,
bounded<32>() * BITS / ELEMENTS,
bounded<64>() * BITS / ELEMENTS,
bounded<64>() * BITS / ELEMENTS,
bounded< 0>() * BITS / ELEMENTS
};
inline KJ_CONSTEXPR() BitsPerElementTableType bitsPerElementIncludingPointers(ElementSize size) {
return _::BITS_PER_ELEMENT_INCLUDING_PONITERS_TABLE[static_cast<int>(size)];
}
template <size_t size> struct ElementSizeForByteSize;
template <> struct ElementSizeForByteSize<1> { static constexpr ElementSize value = ElementSize::BYTE; };
template <> struct ElementSizeForByteSize<2> { static constexpr ElementSize value = ElementSize::TWO_BYTES; };
template <> struct ElementSizeForByteSize<4> { static constexpr ElementSize value = ElementSize::FOUR_BYTES; };
template <> struct ElementSizeForByteSize<8> { static constexpr ElementSize value = ElementSize::EIGHT_BYTES; };
template <typename T> struct ElementSizeForType {
static constexpr ElementSize value =
// Primitive types that aren't special-cased below can be determined from sizeof().
CAPNP_KIND(T) == Kind::PRIMITIVE ? ElementSizeForByteSize<sizeof(T)>::value :
CAPNP_KIND(T) == Kind::ENUM ? ElementSize::TWO_BYTES :
CAPNP_KIND(T) == Kind::STRUCT ? ElementSize::INLINE_COMPOSITE :
// Everything else is a pointer.
ElementSize::POINTER;
};
// Void and bool are special.
template <> struct ElementSizeForType<Void> { static constexpr ElementSize value = ElementSize::VOID; };
template <> struct ElementSizeForType<bool> { static constexpr ElementSize value = ElementSize::BIT; };
// Lists and blobs are pointers, not structs.
template <typename T, Kind K> struct ElementSizeForType<List<T, K>> {
static constexpr ElementSize value = ElementSize::POINTER;
};
template <> struct ElementSizeForType<Text> {
static constexpr ElementSize value = ElementSize::POINTER;
};
template <> struct ElementSizeForType<Data> {
static constexpr ElementSize value = ElementSize::POINTER;
};
template <typename T>
inline constexpr ElementSize elementSizeForType() {
return ElementSizeForType<T>::value;
}
struct MessageSizeCounts {
WordCountN<61, uint64_t> wordCount; // 2^64 bytes
uint capCount;
MessageSizeCounts& operator+=(const MessageSizeCounts& other) {
// OK to truncate unchecked because this class is used to count actual stuff in memory, and
// we couldn't possibly have anywhere near 2^61 words.
wordCount = assumeBits<61>(wordCount + other.wordCount);
capCount += other.capCount;
return *this;
}
void addWords(WordCountN<61, uint64_t> other) {
wordCount = assumeBits<61>(wordCount + other);
}
MessageSize asPublic() {
return MessageSize { unbound(wordCount / WORDS), capCount };
}
};
// =============================================================================
template <int wordCount>
union AlignedData {
// Useful for declaring static constant data blobs as an array of bytes, but forcing those
// bytes to be word-aligned.
uint8_t bytes[wordCount * sizeof(word)];
word words[wordCount];
};
struct StructSize {
StructDataWordCount data;
StructPointerCount pointers;
inline constexpr WordCountN<17> total() const { return data + pointers * WORDS_PER_POINTER; }
StructSize() = default;
inline constexpr StructSize(StructDataWordCount data, StructPointerCount pointers)
: data(data), pointers(pointers) {}
};
template <typename T, typename CapnpPrivate = typename T::_capnpPrivate>
inline constexpr StructSize structSize() {
return StructSize(bounded(CapnpPrivate::dataWordSize) * WORDS,
bounded(CapnpPrivate::pointerCount) * POINTERS);
}
template <typename T, typename CapnpPrivate = typename T::_capnpPrivate,
typename = kj::EnableIf<CAPNP_KIND(T) == Kind::STRUCT>>
inline constexpr StructSize minStructSizeForElement() {
// If T is a struct, return its struct size. Otherwise return the minimum struct size big enough
// to hold a T.
return StructSize(bounded(CapnpPrivate::dataWordSize) * WORDS,
bounded(CapnpPrivate::pointerCount) * POINTERS);
}
template <typename T, typename = kj::EnableIf<CAPNP_KIND(T) != Kind::STRUCT>>
inline constexpr StructSize minStructSizeForElement() {
// If T is a struct, return its struct size. Otherwise return the minimum struct size big enough
// to hold a T.
return StructSize(
dataBitsPerElement(elementSizeForType<T>()) * ELEMENTS > ZERO * BITS
? StructDataWordCount(ONE * WORDS) : StructDataWordCount(ZERO * WORDS),
pointersPerElement(elementSizeForType<T>()) * ELEMENTS);
}
// -------------------------------------------------------------------
// Masking of default values
template <typename T, Kind kind = CAPNP_KIND(T)> struct Mask_;
template <typename T> struct Mask_<T, Kind::PRIMITIVE> { typedef T Type; };
template <typename T> struct Mask_<T, Kind::ENUM> { typedef uint16_t Type; };
template <> struct Mask_<float, Kind::PRIMITIVE> { typedef uint32_t Type; };
template <> struct Mask_<double, Kind::PRIMITIVE> { typedef uint64_t Type; };
template <typename T> struct Mask_<T, Kind::OTHER> {
// Union discriminants end up here.
static_assert(sizeof(T) == 2, "Don't know how to mask this type.");
typedef uint16_t Type;
};
template <typename T>
using Mask = typename Mask_<T>::Type;
template <typename T>
KJ_ALWAYS_INLINE(Mask<T> mask(T value, Mask<T> mask));
template <typename T>
KJ_ALWAYS_INLINE(T unmask(Mask<T> value, Mask<T> mask));
template <typename T>
inline Mask<T> mask(T value, Mask<T> mask) {
return static_cast<Mask<T> >(value) ^ mask;
}
template <>
inline uint32_t mask<float>(float value, uint32_t mask) {
#if CAPNP_CANONICALIZE_NAN
if (value != value) {
return 0x7fc00000u ^ mask;
}
#endif
uint32_t i;
static_assert(sizeof(i) == sizeof(value), "float is not 32 bits?");
memcpy(&i, &value, sizeof(value));
return i ^ mask;
}
template <>
inline uint64_t mask<double>(double value, uint64_t mask) {
#if CAPNP_CANONICALIZE_NAN
if (value != value) {
return 0x7ff8000000000000ull ^ mask;
}
#endif
uint64_t i;
static_assert(sizeof(i) == sizeof(value), "double is not 64 bits?");
memcpy(&i, &value, sizeof(value));
return i ^ mask;
}
template <typename T>
inline T unmask(Mask<T> value, Mask<T> mask) {
return static_cast<T>(value ^ mask);
}
template <>
inline float unmask<float>(uint32_t value, uint32_t mask) {
value ^= mask;
float result;
static_assert(sizeof(result) == sizeof(value), "float is not 32 bits?");
memcpy(&result, &value, sizeof(value));
return result;
}
template <>
inline double unmask<double>(uint64_t value, uint64_t mask) {
value ^= mask;
double result;
static_assert(sizeof(result) == sizeof(value), "double is not 64 bits?");
memcpy(&result, &value, sizeof(value));
return result;
}
// -------------------------------------------------------------------
class CapTableReader {
public:
#if !CAPNP_LITE
virtual kj::Maybe<kj::Own<ClientHook>> extractCap(uint index) = 0;
// Extract the capability at the given index. If the index is invalid, returns null.
#endif // !CAPNP_LITE
};
class CapTableBuilder: public CapTableReader {
public:
#if !CAPNP_LITE
virtual uint injectCap(kj::Own<ClientHook>&& cap) = 0;
// Add the capability to the message and return its index. If the same ClientHook is injected
// twice, this may return the same index both times, but in this case dropCap() needs to be
// called an equal number of times to actually remove the cap.
virtual void dropCap(uint index) = 0;
// Remove a capability injected earlier. Called when the pointer is overwritten or zero'd out.
#endif // !CAPNP_LITE
};
// -------------------------------------------------------------------
class PointerBuilder: public kj::DisallowConstCopy {
// Represents a single pointer, usually embedded in a struct or a list.
public:
inline PointerBuilder(): segment(nullptr), capTable(nullptr), pointer(nullptr) {}
static inline PointerBuilder getRoot(
SegmentBuilder* segment, CapTableBuilder* capTable, word* location);
// Get a PointerBuilder representing a message root located in the given segment at the given
// location.
inline bool isNull() { return getPointerType() == PointerType::NULL_; }
PointerType getPointerType() const;
StructBuilder getStruct(StructSize size, const word* defaultValue);
ListBuilder getList(ElementSize elementSize, const word* defaultValue);
ListBuilder getStructList(StructSize elementSize, const word* defaultValue);
ListBuilder getListAnySize(const word* defaultValue);
template <typename T> typename T::Builder getBlob(
const void* defaultValue, ByteCount defaultSize);
#if !CAPNP_LITE
kj::Own<ClientHook> getCapability();
#endif // !CAPNP_LITE
// Get methods: Get the value. If it is null, initialize it to a copy of the default value.
// The default value is encoded as an "unchecked message" for structs, lists, and objects, or a
// simple byte array for blobs.
StructBuilder initStruct(StructSize size);
ListBuilder initList(ElementSize elementSize, ElementCount elementCount);
ListBuilder initStructList(ElementCount elementCount, StructSize size);
template <typename T> typename T::Builder initBlob(ByteCount size);
// Init methods: Initialize the pointer to a newly-allocated object, discarding the existing
// object.
void setStruct(const StructReader& value, bool canonical = false);
void setList(const ListReader& value, bool canonical = false);
template <typename T> void setBlob(typename T::Reader value);
#if !CAPNP_LITE
void setCapability(kj::Own<ClientHook>&& cap);
#endif // !CAPNP_LITE
// Set methods: Initialize the pointer to a newly-allocated copy of the given value, discarding
// the existing object.
void adopt(OrphanBuilder&& orphan);
// Set the pointer to point at the given orphaned value.
OrphanBuilder disown();
// Set the pointer to null and return its previous value as an orphan.
void clear();
// Clear the pointer to null, discarding its previous value.
void transferFrom(PointerBuilder other);
// Equivalent to `adopt(other.disown())`.
void copyFrom(PointerReader other, bool canonical = false);
// Equivalent to `set(other.get())`.
// If you set the canonical flag, it will attempt to lay the target out
// canonically, provided enough space is available.
PointerReader asReader() const;
BuilderArena* getArena() const;
// Get the arena containing this pointer.
CapTableBuilder* getCapTable();
// Gets the capability context in which this object is operating.
PointerBuilder imbue(CapTableBuilder* capTable);
// Return a copy of this builder except using the given capability context.
private:
SegmentBuilder* segment; // Memory segment in which the pointer resides.
CapTableBuilder* capTable; // Table of capability indexes.
WirePointer* pointer; // Pointer to the pointer.
inline PointerBuilder(SegmentBuilder* segment, CapTableBuilder* capTable, WirePointer* pointer)
: segment(segment), capTable(capTable), pointer(pointer) {}
friend class StructBuilder;
friend class ListBuilder;
friend class OrphanBuilder;
};
class PointerReader {
public:
inline PointerReader()
: segment(nullptr), capTable(nullptr), pointer(nullptr), nestingLimit(0x7fffffff) {}
static PointerReader getRoot(SegmentReader* segment, CapTableReader* capTable,
const word* location, int nestingLimit);
// Get a PointerReader representing a message root located in the given segment at the given
// location.
static inline PointerReader getRootUnchecked(const word* location);
// Get a PointerReader for an unchecked message.
MessageSizeCounts targetSize() const;
// Return the total size of the target object and everything to which it points. Does not count
// far pointer overhead. This is useful for deciding how much space is needed to copy the object
// into a flat array. However, the caller is advised NOT to treat this value as secure. Instead,
// use the result as a hint for allocating the first segment, do the copy, and then throw an
// exception if it overruns.
inline bool isNull() const { return getPointerType() == PointerType::NULL_; }
PointerType getPointerType() const;
StructReader getStruct(const word* defaultValue) const;
ListReader getList(ElementSize expectedElementSize, const word* defaultValue) const;
ListReader getListAnySize(const word* defaultValue) const;
template <typename T>
typename T::Reader getBlob(const void* defaultValue, ByteCount defaultSize) const;
#if !CAPNP_LITE
kj::Own<ClientHook> getCapability() const;
#endif // !CAPNP_LITE
// Get methods: Get the value. If it is null, return the default value instead.
// The default value is encoded as an "unchecked message" for structs, lists, and objects, or a
// simple byte array for blobs.
const word* getUnchecked() const;
// If this is an unchecked message, get a word* pointing at the location of the pointer. This
// word* can actually be passed to readUnchecked() to read the designated sub-object later. If
// this isn't an unchecked message, throws an exception.
kj::Maybe<Arena&> getArena() const;
// Get the arena containing this pointer.
CapTableReader* getCapTable();
// Gets the capability context in which this object is operating.
PointerReader imbue(CapTableReader* capTable) const;
// Return a copy of this reader except using the given capability context.
bool isCanonical(const word **readHead);
// Validate this pointer's canonicity, subject to the conditions:
// * All data to the left of readHead has been read thus far (for pointer
// ordering)
// * All pointers in preorder have already been checked
// * This pointer is in the first and only segment of the message
private:
SegmentReader* segment; // Memory segment in which the pointer resides.
CapTableReader* capTable; // Table of capability indexes.
const WirePointer* pointer; // Pointer to the pointer. null = treat as null pointer.
int nestingLimit;
// Limits the depth of message structures to guard against stack-overflow-based DoS attacks.
// Once this reaches zero, further pointers will be pruned.
inline PointerReader(SegmentReader* segment, CapTableReader* capTable,
const WirePointer* pointer, int nestingLimit)
: segment(segment), capTable(capTable), pointer(pointer), nestingLimit(nestingLimit) {}
friend class StructReader;
friend class ListReader;
friend class PointerBuilder;
friend class OrphanBuilder;
};
// -------------------------------------------------------------------
class StructBuilder: public kj::DisallowConstCopy {
public:
inline StructBuilder(): segment(nullptr), capTable(nullptr), data(nullptr), pointers(nullptr) {}
inline word* getLocation() { return reinterpret_cast<word*>(data); }
// Get the object's location. Only valid for independently-allocated objects (i.e. not list
// elements).
inline StructDataBitCount getDataSectionSize() const { return dataSize; }
inline StructPointerCount getPointerSectionSize() const { return pointerCount; }
inline kj::ArrayPtr<byte> getDataSectionAsBlob();
inline _::ListBuilder getPointerSectionAsList();
template <typename T>
KJ_ALWAYS_INLINE(bool hasDataField(StructDataOffset offset));
// Return true if the field is set to something other than its default value.
template <typename T>
KJ_ALWAYS_INLINE(T getDataField(StructDataOffset offset));
// Gets the data field value of the given type at the given offset. The offset is measured in
// multiples of the field size, determined by the type.
template <typename T>
KJ_ALWAYS_INLINE(T getDataField(StructDataOffset offset, Mask<T> mask));
// Like getDataField() but applies the given XOR mask to the data on load. Used for reading
// fields with non-zero default values.
template <typename T>
KJ_ALWAYS_INLINE(void setDataField(StructDataOffset offset, kj::NoInfer<T> value));
// Sets the data field value at the given offset.
template <typename T>
KJ_ALWAYS_INLINE(void setDataField(StructDataOffset offset,
kj::NoInfer<T> value, Mask<T> mask));
// Like setDataField() but applies the given XOR mask before storing. Used for writing fields
// with non-zero default values.
KJ_ALWAYS_INLINE(PointerBuilder getPointerField(StructPointerOffset ptrIndex));
// Get a builder for a pointer field given the index within the pointer section.
void clearAll();
// Clear all pointers and data.
void transferContentFrom(StructBuilder other);
// Adopt all pointers from `other`, and also copy all data. If `other`'s sections are larger
// than this, the extra data is not transferred, meaning there is a risk of data loss when
// transferring from messages built with future versions of the protocol.
void copyContentFrom(StructReader other);
// Copy content from `other`. If `other`'s sections are larger than this, the extra data is not
// copied, meaning there is a risk of data loss when copying from messages built with future
// versions of the protocol.
StructReader asReader() const;
// Gets a StructReader pointing at the same memory.
BuilderArena* getArena();
// Gets the arena in which this object is allocated.
CapTableBuilder* getCapTable();
// Gets the capability context in which this object is operating.
StructBuilder imbue(CapTableBuilder* capTable);
// Return a copy of this builder except using the given capability context.
private:
SegmentBuilder* segment; // Memory segment in which the struct resides.
CapTableBuilder* capTable; // Table of capability indexes.
void* data; // Pointer to the encoded data.
WirePointer* pointers; // Pointer to the encoded pointers.
StructDataBitCount dataSize;
// Size of data section. We use a bit count rather than a word count to more easily handle the
// case of struct lists encoded with less than a word per element.
StructPointerCount pointerCount; // Size of the pointer section.
inline StructBuilder(SegmentBuilder* segment, CapTableBuilder* capTable,
void* data, WirePointer* pointers,
StructDataBitCount dataSize, StructPointerCount pointerCount)
: segment(segment), capTable(capTable), data(data), pointers(pointers),
dataSize(dataSize), pointerCount(pointerCount) {}
friend class ListBuilder;
friend struct WireHelpers;
friend class OrphanBuilder;
};
class StructReader {
public:
inline StructReader()
: segment(nullptr), capTable(nullptr), data(nullptr), pointers(nullptr),
dataSize(ZERO * BITS), pointerCount(ZERO * POINTERS), nestingLimit(0x7fffffff) {}
inline StructReader(kj::ArrayPtr<const word> data)
: segment(nullptr), capTable(nullptr), data(data.begin()), pointers(nullptr),
dataSize(assumeBits<STRUCT_DATA_WORD_COUNT_BITS>(data.size()) * WORDS * BITS_PER_WORD),
pointerCount(ZERO * POINTERS), nestingLimit(0x7fffffff) {}
const void* getLocation() const { return data; }
inline StructDataBitCount getDataSectionSize() const { return dataSize; }
inline StructPointerCount getPointerSectionSize() const { return pointerCount; }
inline kj::ArrayPtr<const byte> getDataSectionAsBlob();
inline _::ListReader getPointerSectionAsList();
kj::Array<word> canonicalize();
template <typename T>
KJ_ALWAYS_INLINE(bool hasDataField(StructDataOffset offset) const);
// Return true if the field is set to something other than its default value.
template <typename T>
KJ_ALWAYS_INLINE(T getDataField(StructDataOffset offset) const);
// Get the data field value of the given type at the given offset. The offset is measured in
// multiples of the field size, determined by the type. Returns zero if the offset is past the
// end of the struct's data section.
template <typename T>
KJ_ALWAYS_INLINE(T getDataField(StructDataOffset offset, Mask<T> mask) const);
// Like getDataField(offset), but applies the given XOR mask to the result. Used for reading
// fields with non-zero default values.
KJ_ALWAYS_INLINE(PointerReader getPointerField(StructPointerOffset ptrIndex) const);
// Get a reader for a pointer field given the index within the pointer section. If the index
// is out-of-bounds, returns a null pointer.
MessageSizeCounts totalSize() const;
// Return the total size of the struct and everything to which it points. Does not count far
// pointer overhead. This is useful for deciding how much space is needed to copy the struct
// into a flat array. However, the caller is advised NOT to treat this value as secure. Instead,
// use the result as a hint for allocating the first segment, do the copy, and then throw an
// exception if it overruns.
CapTableReader* getCapTable();
// Gets the capability context in which this object is operating.
StructReader imbue(CapTableReader* capTable) const;
// Return a copy of this reader except using the given capability context.
bool isCanonical(const word **readHead, const word **ptrHead,
bool *dataTrunc, bool *ptrTrunc);
// Validate this pointer's canonicity, subject to the conditions:
// * All data to the left of readHead has been read thus far (for pointer
// ordering)
// * All pointers in preorder have already been checked
// * This pointer is in the first and only segment of the message
//
// If this function returns false, the struct is non-canonical. If it
// returns true, then:
// * If it is a composite in a list, it is canonical if at least one struct
// in the list outputs dataTrunc = 1, and at least one outputs ptrTrunc = 1
// * If it is derived from a struct pointer, it is canonical if
// dataTrunc = 1 AND ptrTrunc = 1
private:
SegmentReader* segment; // Memory segment in which the struct resides.
CapTableReader* capTable; // Table of capability indexes.
const void* data;
const WirePointer* pointers;
StructDataBitCount dataSize;
// Size of data section. We use a bit count rather than a word count to more easily handle the
// case of struct lists encoded with less than a word per element.
StructPointerCount pointerCount; // Size of the pointer section.
int nestingLimit;
// Limits the depth of message structures to guard against stack-overflow-based DoS attacks.
// Once this reaches zero, further pointers will be pruned.
// TODO(perf): Limit to 16 bits for better packing?
inline StructReader(SegmentReader* segment, CapTableReader* capTable,
const void* data, const WirePointer* pointers,
StructDataBitCount dataSize, StructPointerCount pointerCount,
int nestingLimit)
: segment(segment), capTable(capTable), data(data), pointers(pointers),
dataSize(dataSize), pointerCount(pointerCount),
nestingLimit(nestingLimit) {}
friend class ListReader;
friend class StructBuilder;
friend struct WireHelpers;
};
// -------------------------------------------------------------------
class ListBuilder: public kj::DisallowConstCopy {
public:
inline explicit ListBuilder(ElementSize elementSize)
: segment(nullptr), capTable(nullptr), ptr(nullptr), elementCount(ZERO * ELEMENTS),
step(ZERO * BITS / ELEMENTS), structDataSize(ZERO * BITS),
structPointerCount(ZERO * POINTERS), elementSize(elementSize) {}
inline word* getLocation() {
// Get the object's location.
if (elementSize == ElementSize::INLINE_COMPOSITE && ptr != nullptr) {
return reinterpret_cast<word*>(ptr) - POINTER_SIZE_IN_WORDS;
} else {
return reinterpret_cast<word*>(ptr);
}
}
inline ElementSize getElementSize() const { return elementSize; }
inline ListElementCount size() const;
// The number of elements in the list.
Text::Builder asText();
Data::Builder asData();
// Reinterpret the list as a blob. Throws an exception if the elements are not byte-sized.
template <typename T>
KJ_ALWAYS_INLINE(T getDataElement(ElementCount index));
// Get the element of the given type at the given index.
template <typename T>
KJ_ALWAYS_INLINE(void setDataElement(ElementCount index, kj::NoInfer<T> value));
// Set the element at the given index.
KJ_ALWAYS_INLINE(PointerBuilder getPointerElement(ElementCount index));
StructBuilder getStructElement(ElementCount index);
ListReader asReader() const;
// Get a ListReader pointing at the same memory.
BuilderArena* getArena();
// Gets the arena in which this object is allocated.
CapTableBuilder* getCapTable();
// Gets the capability context in which this object is operating.
ListBuilder imbue(CapTableBuilder* capTable);
// Return a copy of this builder except using the given capability context.
private:
SegmentBuilder* segment; // Memory segment in which the list resides.
CapTableBuilder* capTable; // Table of capability indexes.
byte* ptr; // Pointer to list content.
ListElementCount elementCount; // Number of elements in the list.
BitsPerElementN<23> step;
// The distance between elements. The maximum value occurs when a struct contains 2^16-1 data
// words and 2^16-1 pointers, i.e. 2^17 - 2 words, or 2^23 - 128 bits.
StructDataBitCount structDataSize;
StructPointerCount structPointerCount;
// The struct properties to use when interpreting the elements as structs. All lists can be
// interpreted as struct lists, so these are always filled in.
ElementSize elementSize;
// The element size as a ElementSize. This is only really needed to disambiguate INLINE_COMPOSITE
// from other types when the overall size is exactly zero or one words.
inline ListBuilder(SegmentBuilder* segment, CapTableBuilder* capTable, void* ptr,
BitsPerElementN<23> step, ListElementCount size,
StructDataBitCount structDataSize, StructPointerCount structPointerCount,
ElementSize elementSize)
: segment(segment), capTable(capTable), ptr(reinterpret_cast<byte*>(ptr)),
elementCount(size), step(step), structDataSize(structDataSize),
structPointerCount(structPointerCount), elementSize(elementSize) {}
friend class StructBuilder;
friend struct WireHelpers;
friend class OrphanBuilder;
};
class ListReader {
public:
inline explicit ListReader(ElementSize elementSize)
: segment(nullptr), capTable(nullptr), ptr(nullptr), elementCount(ZERO * ELEMENTS),
step(ZERO * BITS / ELEMENTS), structDataSize(ZERO * BITS),
structPointerCount(ZERO * POINTERS), elementSize(elementSize), nestingLimit(0x7fffffff) {}
inline ListElementCount size() const;
// The number of elements in the list.
inline ElementSize getElementSize() const { return elementSize; }
Text::Reader asText();
Data::Reader asData();
// Reinterpret the list as a blob. Throws an exception if the elements are not byte-sized.
kj::ArrayPtr<const byte> asRawBytes();
template <typename T>
KJ_ALWAYS_INLINE(T getDataElement(ElementCount index) const);
// Get the element of the given type at the given index.
KJ_ALWAYS_INLINE(PointerReader getPointerElement(ElementCount index) const);
StructReader getStructElement(ElementCount index) const;
CapTableReader* getCapTable();
// Gets the capability context in which this object is operating.
ListReader imbue(CapTableReader* capTable) const;
// Return a copy of this reader except using the given capability context.
bool isCanonical(const word **readHead, const WirePointer* ref);
// Validate this pointer's canonicity, subject to the conditions:
// * All data to the left of readHead has been read thus far (for pointer
// ordering)
// * All pointers in preorder have already been checked
// * This pointer is in the first and only segment of the message
private:
SegmentReader* segment; // Memory segment in which the list resides.
CapTableReader* capTable; // Table of capability indexes.
const byte* ptr; // Pointer to list content.
ListElementCount elementCount; // Number of elements in the list.
BitsPerElementN<23> step;
// The distance between elements. The maximum value occurs when a struct contains 2^16-1 data
// words and 2^16-1 pointers, i.e. 2^17 - 2 words, or 2^23 - 2 bits.
StructDataBitCount structDataSize;
StructPointerCount structPointerCount;
// The struct properties to use when interpreting the elements as structs. All lists can be
// interpreted as struct lists, so these are always filled in.
ElementSize elementSize;
// The element size as a ElementSize. This is only really needed to disambiguate INLINE_COMPOSITE
// from other types when the overall size is exactly zero or one words.
int nestingLimit;
// Limits the depth of message structures to guard against stack-overflow-based DoS attacks.
// Once this reaches zero, further pointers will be pruned.
inline ListReader(SegmentReader* segment, CapTableReader* capTable, const void* ptr,
ListElementCount elementCount, BitsPerElementN<23> step,
StructDataBitCount structDataSize, StructPointerCount structPointerCount,
ElementSize elementSize, int nestingLimit)
: segment(segment), capTable(capTable), ptr(reinterpret_cast<const byte*>(ptr)),
elementCount(elementCount), step(step), structDataSize(structDataSize),
structPointerCount(structPointerCount), elementSize(elementSize),
nestingLimit(nestingLimit) {}
friend class StructReader;
friend class ListBuilder;
friend struct WireHelpers;
friend class OrphanBuilder;
};
// -------------------------------------------------------------------
class OrphanBuilder {
public:
inline OrphanBuilder(): segment(nullptr), capTable(nullptr), location(nullptr) {
memset(&tag, 0, sizeof(tag));
}
OrphanBuilder(const OrphanBuilder& other) = delete;
inline OrphanBuilder(OrphanBuilder&& other) noexcept;
inline ~OrphanBuilder() noexcept(false);
static OrphanBuilder initStruct(BuilderArena* arena, CapTableBuilder* capTable, StructSize size);
static OrphanBuilder initList(BuilderArena* arena, CapTableBuilder* capTable,
ElementCount elementCount, ElementSize elementSize);
static OrphanBuilder initStructList(BuilderArena* arena, CapTableBuilder* capTable,
ElementCount elementCount, StructSize elementSize);
static OrphanBuilder initText(BuilderArena* arena, CapTableBuilder* capTable, ByteCount size);
static OrphanBuilder initData(BuilderArena* arena, CapTableBuilder* capTable, ByteCount size);
static OrphanBuilder copy(BuilderArena* arena, CapTableBuilder* capTable, StructReader copyFrom);
static OrphanBuilder copy(BuilderArena* arena, CapTableBuilder* capTable, ListReader copyFrom);
static OrphanBuilder copy(BuilderArena* arena, CapTableBuilder* capTable, PointerReader copyFrom);
static OrphanBuilder copy(BuilderArena* arena, CapTableBuilder* capTable, Text::Reader copyFrom);
static OrphanBuilder copy(BuilderArena* arena, CapTableBuilder* capTable, Data::Reader copyFrom);
#if !CAPNP_LITE
static OrphanBuilder copy(BuilderArena* arena, CapTableBuilder* capTable,
kj::Own<ClientHook> copyFrom);
#endif // !CAPNP_LITE
static OrphanBuilder concat(BuilderArena* arena, CapTableBuilder* capTable,
ElementSize expectedElementSize, StructSize expectedStructSize,
kj::ArrayPtr<const ListReader> lists);
static OrphanBuilder referenceExternalData(BuilderArena* arena, Data::Reader data);
OrphanBuilder& operator=(const OrphanBuilder& other) = delete;
inline OrphanBuilder& operator=(OrphanBuilder&& other);
inline bool operator==(decltype(nullptr)) const { return location == nullptr; }
inline bool operator!=(decltype(nullptr)) const { return location != nullptr; }
StructBuilder asStruct(StructSize size);
// Interpret as a struct, or throw an exception if not a struct.
ListBuilder asList(ElementSize elementSize);
// Interpret as a list, or throw an exception if not a list. elementSize cannot be
// INLINE_COMPOSITE -- use asStructList() instead.
ListBuilder asStructList(StructSize elementSize);
// Interpret as a struct list, or throw an exception if not a list.
ListBuilder asListAnySize();
// For AnyList.
Text::Builder asText();
Data::Builder asData();
// Interpret as a blob, or throw an exception if not a blob.
StructReader asStructReader(StructSize size) const;
ListReader asListReader(ElementSize elementSize) const;
ListReader asListReaderAnySize() const;
#if !CAPNP_LITE
kj::Own<ClientHook> asCapability() const;
#endif // !CAPNP_LITE
Text::Reader asTextReader() const;
Data::Reader asDataReader() const;
bool truncate(ElementCount size, bool isText) KJ_WARN_UNUSED_RESULT;
// Resize the orphan list to the given size. Returns false if the list is currently empty but
// the requested size is non-zero, in which case the caller will need to allocate a new list.
void truncate(ElementCount size, ElementSize elementSize);
void truncate(ElementCount size, StructSize elementSize);
void truncateText(ElementCount size);
// Versions of truncate() that know how to allocate a new list if needed.
private:
static_assert(ONE * POINTERS * WORDS_PER_POINTER == ONE * WORDS,
"This struct assumes a pointer is one word.");
word tag;
// Contains an encoded WirePointer representing this object. WirePointer is defined in
// layout.c++, but fits in a word.
//
// This may be a FAR pointer. Even in that case, `location` points to the eventual destination
// of that far pointer. The reason we keep the far pointer around rather than just making `tag`
// represent the final destination is because if the eventual adopter of the pointer is not in
// the target's segment then it may be useful to reuse the far pointer landing pad.
//
// If `tag` is not a far pointer, its offset is garbage; only `location` points to the actual
// target.
SegmentBuilder* segment;
// Segment in which the object resides.
CapTableBuilder* capTable;
// Table of capability indexes.
word* location;
// Pointer to the object, or nullptr if the pointer is null. For capabilities, we make this
// 0x1 just so that it is non-null for operator==, but it is never used.
inline OrphanBuilder(const void* tagPtr, SegmentBuilder* segment,
CapTableBuilder* capTable, word* location)
: segment(segment), capTable(capTable), location(location) {
memcpy(&tag, tagPtr, sizeof(tag));
}
inline WirePointer* tagAsPtr() { return reinterpret_cast<WirePointer*>(&tag); }
inline const WirePointer* tagAsPtr() const { return reinterpret_cast<const WirePointer*>(&tag); }
void euthanize();
// Erase the target object, zeroing it out and possibly reclaiming the memory. Called when
// the OrphanBuilder is being destroyed or overwritten and it is non-null.
friend struct WireHelpers;
};
// =======================================================================================
// Internal implementation details...
// These are defined in the source file.
template <> typename Text::Builder PointerBuilder::initBlob<Text>(ByteCount size);
template <> void PointerBuilder::setBlob<Text>(typename Text::Reader value);
template <> typename Text::Builder PointerBuilder::getBlob<Text>(
const void* defaultValue, ByteCount defaultSize);
template <> typename Text::Reader PointerReader::getBlob<Text>(
const void* defaultValue, ByteCount defaultSize) const;
template <> typename Data::Builder PointerBuilder::initBlob<Data>(ByteCount size);
template <> void PointerBuilder::setBlob<Data>(typename Data::Reader value);
template <> typename Data::Builder PointerBuilder::getBlob<Data>(
const void* defaultValue, ByteCount defaultSize);
template <> typename Data::Reader PointerReader::getBlob<Data>(
const void* defaultValue, ByteCount defaultSize) const;
inline PointerBuilder PointerBuilder::getRoot(
SegmentBuilder* segment, CapTableBuilder* capTable, word* location) {
return PointerBuilder(segment, capTable, reinterpret_cast<WirePointer*>(location));
}
inline PointerReader PointerReader::getRootUnchecked(const word* location) {
return PointerReader(nullptr, nullptr,
reinterpret_cast<const WirePointer*>(location), 0x7fffffff);
}
// -------------------------------------------------------------------
inline kj::ArrayPtr<byte> StructBuilder::getDataSectionAsBlob() {
return kj::ArrayPtr<byte>(reinterpret_cast<byte*>(data),
unbound(dataSize / BITS_PER_BYTE / BYTES));
}
inline _::ListBuilder StructBuilder::getPointerSectionAsList() {
return _::ListBuilder(segment, capTable, pointers, ONE * POINTERS * BITS_PER_POINTER / ELEMENTS,
pointerCount * (ONE * ELEMENTS / POINTERS),
ZERO * BITS, ONE * POINTERS, ElementSize::POINTER);
}
template <typename T>
inline bool StructBuilder::hasDataField(StructDataOffset offset) {
return getDataField<Mask<T>>(offset) != 0;
}
template <>
inline bool StructBuilder::hasDataField<Void>(StructDataOffset offset) {
return false;
}
template <typename T>
inline T StructBuilder::getDataField(StructDataOffset offset) {
return reinterpret_cast<WireValue<T>*>(data)[unbound(offset / ELEMENTS)].get();
}
template <>
inline bool StructBuilder::getDataField<bool>(StructDataOffset offset) {
BitCount32 boffset = offset * (ONE * BITS / ELEMENTS);
byte* b = reinterpret_cast<byte*>(data) + boffset / BITS_PER_BYTE;
return (*reinterpret_cast<uint8_t*>(b) &
unbound(ONE << (boffset % BITS_PER_BYTE / BITS))) != 0;
}
template <>
inline Void StructBuilder::getDataField<Void>(StructDataOffset offset) {
return VOID;
}
template <typename T>
inline T StructBuilder::getDataField(StructDataOffset offset, Mask<T> mask) {
return unmask<T>(getDataField<Mask<T> >(offset), mask);
}
template <typename T>
inline void StructBuilder::setDataField(StructDataOffset offset, kj::NoInfer<T> value) {
reinterpret_cast<WireValue<T>*>(data)[unbound(offset / ELEMENTS)].set(value);
}
#if CAPNP_CANONICALIZE_NAN
// Use mask() on floats and doubles to make sure we canonicalize NaNs.
template <>
inline void StructBuilder::setDataField<float>(StructDataOffset offset, float value) {
setDataField<uint32_t>(offset, mask<float>(value, 0));
}
template <>
inline void StructBuilder::setDataField<double>(StructDataOffset offset, double value) {
setDataField<uint64_t>(offset, mask<double>(value, 0));
}
#endif
template <>
inline void StructBuilder::setDataField<bool>(StructDataOffset offset, bool value) {
auto boffset = offset * (ONE * BITS / ELEMENTS);
byte* b = reinterpret_cast<byte*>(data) + boffset / BITS_PER_BYTE;
uint bitnum = unboundMaxBits<3>(boffset % BITS_PER_BYTE / BITS);
*reinterpret_cast<uint8_t*>(b) = (*reinterpret_cast<uint8_t*>(b) & ~(1 << bitnum))
| (static_cast<uint8_t>(value) << bitnum);
}
template <>
inline void StructBuilder::setDataField<Void>(StructDataOffset offset, Void value) {}
template <typename T>
inline void StructBuilder::setDataField(StructDataOffset offset,
kj::NoInfer<T> value, Mask<T> m) {
setDataField<Mask<T> >(offset, mask<T>(value, m));
}
inline PointerBuilder StructBuilder::getPointerField(StructPointerOffset ptrIndex) {
// Hacky because WirePointer is defined in the .c++ file (so is incomplete here).
return PointerBuilder(segment, capTable, reinterpret_cast<WirePointer*>(
reinterpret_cast<word*>(pointers) + ptrIndex * WORDS_PER_POINTER));
}
// -------------------------------------------------------------------
inline kj::ArrayPtr<const byte> StructReader::getDataSectionAsBlob() {
return kj::ArrayPtr<const byte>(reinterpret_cast<const byte*>(data),
unbound(dataSize / BITS_PER_BYTE / BYTES));
}
inline _::ListReader StructReader::getPointerSectionAsList() {
return _::ListReader(segment, capTable, pointers, pointerCount * (ONE * ELEMENTS / POINTERS),
ONE * POINTERS * BITS_PER_POINTER / ELEMENTS, ZERO * BITS, ONE * POINTERS,
ElementSize::POINTER, nestingLimit);
}
template <typename T>
inline bool StructReader::hasDataField(StructDataOffset offset) const {
return getDataField<Mask<T>>(offset) != 0;
}
template <>
inline bool StructReader::hasDataField<Void>(StructDataOffset offset) const {
return false;
}
template <typename T>
inline T StructReader::getDataField(StructDataOffset offset) const {
if ((offset + ONE * ELEMENTS) * capnp::bitsPerElement<T>() <= dataSize) {
return reinterpret_cast<const WireValue<T>*>(data)[unbound(offset / ELEMENTS)].get();
} else {
return static_cast<T>(0);
}
}
template <>
inline bool StructReader::getDataField<bool>(StructDataOffset offset) const {
auto boffset = offset * (ONE * BITS / ELEMENTS);
if (boffset < dataSize) {
const byte* b = reinterpret_cast<const byte*>(data) + boffset / BITS_PER_BYTE;
return (*reinterpret_cast<const uint8_t*>(b) &
unbound(ONE << (boffset % BITS_PER_BYTE / BITS))) != 0;
} else {
return false;
}
}
template <>
inline Void StructReader::getDataField<Void>(StructDataOffset offset) const {
return VOID;
}
template <typename T>
T StructReader::getDataField(StructDataOffset offset, Mask<T> mask) const {
return unmask<T>(getDataField<Mask<T> >(offset), mask);
}
inline PointerReader StructReader::getPointerField(StructPointerOffset ptrIndex) const {
if (ptrIndex < pointerCount) {
// Hacky because WirePointer is defined in the .c++ file (so is incomplete here).
return PointerReader(segment, capTable, reinterpret_cast<const WirePointer*>(
reinterpret_cast<const word*>(pointers) + ptrIndex * WORDS_PER_POINTER), nestingLimit);
} else{
return PointerReader();
}
}
// -------------------------------------------------------------------
inline ListElementCount ListBuilder::size() const { return elementCount; }
template <typename T>
inline T ListBuilder::getDataElement(ElementCount index) {
return reinterpret_cast<WireValue<T>*>(
ptr + upgradeBound<uint64_t>(index) * step / BITS_PER_BYTE)->get();
// TODO(perf): Benchmark this alternate implementation, which I suspect may make better use of
// the x86 SIB byte. Also use it for all the other getData/setData implementations below, and
// the various non-inline methods that look up pointers.
// Also if using this, consider changing ptr back to void* instead of byte*.
// return reinterpret_cast<WireValue<T>*>(ptr)[
// index / ELEMENTS * (step / capnp::bitsPerElement<T>())].get();
}
template <>
inline bool ListBuilder::getDataElement<bool>(ElementCount index) {
// Ignore step for bit lists because bit lists cannot be upgraded to struct lists.
auto bindex = index * (ONE * BITS / ELEMENTS);
byte* b = ptr + bindex / BITS_PER_BYTE;
return (*reinterpret_cast<uint8_t*>(b) &
unbound(ONE << (bindex % BITS_PER_BYTE / BITS))) != 0;
}
template <>
inline Void ListBuilder::getDataElement<Void>(ElementCount index) {
return VOID;
}
template <typename T>
inline void ListBuilder::setDataElement(ElementCount index, kj::NoInfer<T> value) {
reinterpret_cast<WireValue<T>*>(
ptr + upgradeBound<uint64_t>(index) * step / BITS_PER_BYTE)->set(value);
}
#if CAPNP_CANONICALIZE_NAN
// Use mask() on floats and doubles to make sure we canonicalize NaNs.
template <>
inline void ListBuilder::setDataElement<float>(ElementCount index, float value) {
setDataElement<uint32_t>(index, mask<float>(value, 0));
}
template <>
inline void ListBuilder::setDataElement<double>(ElementCount index, double value) {
setDataElement<uint64_t>(index, mask<double>(value, 0));
}
#endif
template <>
inline void ListBuilder::setDataElement<bool>(ElementCount index, bool value) {
// Ignore stepBytes for bit lists because bit lists cannot be upgraded to struct lists.
auto bindex = index * (ONE * BITS / ELEMENTS);
byte* b = ptr + bindex / BITS_PER_BYTE;
auto bitnum = bindex % BITS_PER_BYTE / BITS;
*reinterpret_cast<uint8_t*>(b) = (*reinterpret_cast<uint8_t*>(b) & ~(1 << unbound(bitnum)))
| (static_cast<uint8_t>(value) << unbound(bitnum));
}
template <>
inline void ListBuilder::setDataElement<Void>(ElementCount index, Void value) {}
inline PointerBuilder ListBuilder::getPointerElement(ElementCount index) {
return PointerBuilder(segment, capTable, reinterpret_cast<WirePointer*>(ptr +
upgradeBound<uint64_t>(index) * step / BITS_PER_BYTE));
}
// -------------------------------------------------------------------
inline ListElementCount ListReader::size() const { return elementCount; }
template <typename T>
inline T ListReader::getDataElement(ElementCount index) const {
return reinterpret_cast<const WireValue<T>*>(
ptr + upgradeBound<uint64_t>(index) * step / BITS_PER_BYTE)->get();
}
template <>
inline bool ListReader::getDataElement<bool>(ElementCount index) const {
// Ignore step for bit lists because bit lists cannot be upgraded to struct lists.
auto bindex = index * (ONE * BITS / ELEMENTS);
const byte* b = ptr + bindex / BITS_PER_BYTE;
return (*reinterpret_cast<const uint8_t*>(b) &
unbound(ONE << (bindex % BITS_PER_BYTE / BITS))) != 0;
}
template <>
inline Void ListReader::getDataElement<Void>(ElementCount index) const {
return VOID;
}
inline PointerReader ListReader::getPointerElement(ElementCount index) const {
return PointerReader(segment, capTable, reinterpret_cast<const WirePointer*>(
ptr + upgradeBound<uint64_t>(index) * step / BITS_PER_BYTE), nestingLimit);
}
// -------------------------------------------------------------------
inline OrphanBuilder::OrphanBuilder(OrphanBuilder&& other) noexcept
: segment(other.segment), capTable(other.capTable), location(other.location) {
memcpy(&tag, &other.tag, sizeof(tag)); // Needs memcpy to comply with aliasing rules.
other.segment = nullptr;
other.location = nullptr;
}
inline OrphanBuilder::~OrphanBuilder() noexcept(false) {
if (segment != nullptr) euthanize();
}
inline OrphanBuilder& OrphanBuilder::operator=(OrphanBuilder&& other) {
// With normal smart pointers, it's important to handle the case where the incoming pointer
// is actually transitively owned by this one. In this case, euthanize() would destroy `other`
// before we copied it. This isn't possible in the case of `OrphanBuilder` because it only
// owns message objects, and `other` is not itself a message object, therefore cannot possibly
// be transitively owned by `this`.
if (segment != nullptr) euthanize();
segment = other.segment;
capTable = other.capTable;
location = other.location;
memcpy(&tag, &other.tag, sizeof(tag)); // Needs memcpy to comply with aliasing rules.
other.segment = nullptr;
other.location = nullptr;
return *this;
}
} // namespace _ (private)
} // namespace capnp
#endif // CAPNP_LAYOUT_H_