capnproto

FORK: Cap'n Proto serialization/RPC system - core tools and C++ library
git clone https://git.neptards.moe/neptards/capnproto.git
Log | Files | Refs | README | LICENSE

layout.c++ (153904B)


      1 // Copyright (c) 2013-2016 Sandstorm Development Group, Inc. and contributors
      2 // Licensed under the MIT License:
      3 //
      4 // Permission is hereby granted, free of charge, to any person obtaining a copy
      5 // of this software and associated documentation files (the "Software"), to deal
      6 // in the Software without restriction, including without limitation the rights
      7 // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
      8 // copies of the Software, and to permit persons to whom the Software is
      9 // furnished to do so, subject to the following conditions:
     10 //
     11 // The above copyright notice and this permission notice shall be included in
     12 // all copies or substantial portions of the Software.
     13 //
     14 // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
     15 // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
     16 // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
     17 // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
     18 // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
     19 // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
     20 // THE SOFTWARE.
     21 
     22 #define CAPNP_PRIVATE
     23 #include "layout.h"
     24 #include <kj/debug.h>
     25 #include "arena.h"
     26 #include <string.h>
     27 #include <stdlib.h>
     28 
     29 #if !CAPNP_LITE
     30 #include "capability.h"
     31 #endif  // !CAPNP_LITE
     32 
     33 namespace capnp {
     34 namespace _ {  // private
     35 
     36 #if !CAPNP_LITE
     37 static BrokenCapFactory* globalBrokenCapFactory = nullptr;
     38 // Horrible hack:  We need to be able to construct broken caps without any capability context,
     39 // but we can't have a link-time dependency on libcapnp-rpc.
     40 
     41 void setGlobalBrokenCapFactoryForLayoutCpp(BrokenCapFactory& factory) {
     42   // Called from capability.c++ when the capability API is used, to make sure that layout.c++
     43   // is ready for it.  May be called multiple times but always with the same value.
     44 #if __GNUC__ || defined(__clang__)
     45   __atomic_store_n(&globalBrokenCapFactory, &factory, __ATOMIC_RELAXED);
     46 #elif _MSC_VER
     47   *static_cast<BrokenCapFactory* volatile*>(&globalBrokenCapFactory) = &factory;
     48 #else
     49 #error "Platform not supported"
     50 #endif
     51 }
     52 
     53 static BrokenCapFactory* readGlobalBrokenCapFactoryForLayoutCpp() {
     54 #if __GNUC__ || defined(__clang__)
     55   // Thread-sanitizer doesn't have the right information to know this is safe without doing an
     56   // atomic read. https://groups.google.com/g/capnproto/c/634juhn5ap0/m/pyRiwWl1AAAJ
     57   return __atomic_load_n(&globalBrokenCapFactory, __ATOMIC_RELAXED);
     58 #else
     59   return globalBrokenCapFactory;
     60 #endif
     61 }
     62 
     63 }  // namespace _ (private)
     64 
     65 const uint ClientHook::NULL_CAPABILITY_BRAND = 0;
     66 const uint ClientHook::BROKEN_CAPABILITY_BRAND = 0;
     67 // Defined here rather than capability.c++ so that we can safely call isNull() in this file.
     68 
     69 namespace _ {  // private
     70 
     71 #endif  // !CAPNP_LITE
     72 
     73 #if CAPNP_DEBUG_TYPES
     74 #define G(n) bounded<n>()
     75 #else
     76 #define G(n) n
     77 #endif
     78 
     79 // =======================================================================================
     80 
     81 #if __GNUC__ >= 8 && !__clang__
     82 // GCC 8 introduced a warning which complains whenever we try to memset() or memcpy() a
     83 // WirePointer, because we deleted the regular copy constructor / assignment operator. Weirdly, if
     84 // I remove those deletions, GCC *still* complains that WirePointer is non-trivial. I don't
     85 // understand why -- maybe because WireValue has private members? We don't want to make WireValue's
     86 // member public, but memset() and memcpy() on it are certainly valid and desirable, so we'll just
     87 // have to disable the warning I guess.
     88 #pragma GCC diagnostic ignored "-Wclass-memaccess"
     89 #endif
     90 
     91 struct WirePointer {
     92   // A pointer, in exactly the format in which it appears on the wire.
     93 
     94   // Copying and moving is not allowed because the offset would become wrong.
     95   WirePointer(const WirePointer& other) = delete;
     96   WirePointer(WirePointer&& other) = delete;
     97   WirePointer& operator=(const WirePointer& other) = delete;
     98   WirePointer& operator=(WirePointer&& other) = delete;
     99 
    100   // -----------------------------------------------------------------
    101   // Common part of all pointers:  kind + offset
    102   //
    103   // Actually this is not terribly common.  The "offset" could actually be different things
    104   // depending on the context:
    105   // - For a regular (e.g. struct/list) pointer, a signed word offset from the word immediately
    106   //   following the pointer pointer.  (The off-by-one means the offset is more often zero, saving
    107   //   bytes on the wire when packed.)
    108   // - For an inline composite list tag (not really a pointer, but structured similarly), an
    109   //   element count.
    110   // - For a FAR pointer, an unsigned offset into the target segment.
    111   // - For a FAR landing pad, zero indicates that the target value immediately follows the pad while
    112   //   1 indicates that the pad is followed by another FAR pointer that actually points at the
    113   //   value.
    114 
    115   enum Kind {
    116     STRUCT = 0,
    117     // Reference points at / describes a struct.
    118 
    119     LIST = 1,
    120     // Reference points at / describes a list.
    121 
    122     FAR = 2,
    123     // Reference is a "far pointer", which points at data located in a different segment.  The
    124     // eventual target is one of the other kinds.
    125 
    126     OTHER = 3
    127     // Reference has type "other".  If the next 30 bits are all zero (i.e. the lower 32 bits contain
    128     // only the kind OTHER) then the pointer is a capability.  All other values are reserved.
    129   };
    130 
    131   WireValue<uint32_t> offsetAndKind;
    132 
    133   KJ_ALWAYS_INLINE(Kind kind() const) {
    134     return static_cast<Kind>(offsetAndKind.get() & 3);
    135   }
    136   KJ_ALWAYS_INLINE(bool isPositional() const) {
    137     return (offsetAndKind.get() & 2) == 0;  // match STRUCT and LIST but not FAR or OTHER
    138   }
    139   KJ_ALWAYS_INLINE(bool isCapability() const) {
    140     return offsetAndKind.get() == OTHER;
    141   }
    142 
    143   KJ_ALWAYS_INLINE(word* target()) {
    144     return reinterpret_cast<word*>(this) + 1 + (static_cast<int32_t>(offsetAndKind.get()) >> 2);
    145   }
    146   KJ_ALWAYS_INLINE(const word* target(SegmentReader* segment) const) {
    147     if (segment == nullptr) {
    148       return reinterpret_cast<const word*>(this + 1) +
    149           (static_cast<int32_t>(offsetAndKind.get()) >> 2);
    150     } else {
    151       return segment->checkOffset(reinterpret_cast<const word*>(this + 1),
    152                                   static_cast<int32_t>(offsetAndKind.get()) >> 2);
    153     }
    154   }
    155   KJ_ALWAYS_INLINE(void setKindAndTarget(Kind kind, word* target, SegmentBuilder* segment)) {
    156     // Check that the target is really in the same segment, otherwise subtracting pointers is
    157     // undefined behavior.  As it turns out, it's undefined behavior that actually produces
    158     // unexpected results in a real-world situation that actually happened:  At one time,
    159     // OrphanBuilder's "tag" (a WirePointer) was allowed to be initialized as if it lived in
    160     // a particular segment when in fact it does not.  On 32-bit systems, where words might
    161     // only be 32-bit aligned, it's possible that the difference between `this` and `target` is
    162     // not a whole number of words.  But clang optimizes:
    163     //     (target - (word*)this - 1) << 2
    164     // to:
    165     //     (((ptrdiff_t)target - (ptrdiff_t)this - 8) >> 1)
    166     // So now when the pointers are not aligned the same, we can end up corrupting the bottom
    167     // two bits, where `kind` is stored.  For example, this turns a struct into a far pointer.
    168     // Ouch!
    169     KJ_DREQUIRE(reinterpret_cast<uintptr_t>(this) >=
    170                 reinterpret_cast<uintptr_t>(segment->getStartPtr()));
    171     KJ_DREQUIRE(reinterpret_cast<uintptr_t>(this) <
    172                 reinterpret_cast<uintptr_t>(segment->getStartPtr() + segment->getSize()));
    173     KJ_DREQUIRE(reinterpret_cast<uintptr_t>(target) >=
    174                 reinterpret_cast<uintptr_t>(segment->getStartPtr()));
    175     KJ_DREQUIRE(reinterpret_cast<uintptr_t>(target) <=
    176                 reinterpret_cast<uintptr_t>(segment->getStartPtr() + segment->getSize()));
    177     offsetAndKind.set((static_cast<uint32_t>(target - reinterpret_cast<word*>(this) - 1) << 2) | kind);
    178   }
    179   KJ_ALWAYS_INLINE(void setKindWithZeroOffset(Kind kind)) {
    180     offsetAndKind.set(kind);
    181   }
    182   KJ_ALWAYS_INLINE(void setKindAndTargetForEmptyStruct()) {
    183     // This pointer points at an empty struct.  Assuming the WirePointer itself is in-bounds, we
    184     // can set the target to point either at the WirePointer itself or immediately after it.  The
    185     // latter would cause the WirePointer to be "null" (since for an empty struct the upper 32
    186     // bits are going to be zero).  So we set an offset of -1, as if the struct were allocated
    187     // immediately before this pointer, to distinguish it from null.
    188     offsetAndKind.set(0xfffffffc);
    189   }
    190   KJ_ALWAYS_INLINE(void setKindForOrphan(Kind kind)) {
    191     // OrphanBuilder contains a WirePointer, but since it isn't located in a segment, it should
    192     // not have a valid offset (unless it is a FAR or OTHER pointer).  We set its offset to -1
    193     // because setting it to zero would mean a pointer to an empty struct would appear to be a null
    194     // pointer.
    195     KJ_DREQUIRE(isPositional());
    196     offsetAndKind.set(kind | 0xfffffffc);
    197   }
    198 
    199   KJ_ALWAYS_INLINE(ListElementCount inlineCompositeListElementCount() const) {
    200     return ((bounded(offsetAndKind.get()) >> G(2))
    201             & G(kj::maxValueForBits<LIST_ELEMENT_COUNT_BITS>())) * ELEMENTS;
    202   }
    203   KJ_ALWAYS_INLINE(void setKindAndInlineCompositeListElementCount(
    204       Kind kind, ListElementCount elementCount)) {
    205     offsetAndKind.set(unboundAs<uint32_t>((elementCount / ELEMENTS) << G(2)) | kind);
    206   }
    207 
    208   KJ_ALWAYS_INLINE(const word* farTarget(SegmentReader* segment) const) {
    209     KJ_DREQUIRE(kind() == FAR,
    210         "farTarget() should only be called on FAR pointers.");
    211     return segment->checkOffset(segment->getStartPtr(), offsetAndKind.get() >> 3);
    212   }
    213   KJ_ALWAYS_INLINE(word* farTarget(SegmentBuilder* segment) const) {
    214     KJ_DREQUIRE(kind() == FAR,
    215         "farTarget() should only be called on FAR pointers.");
    216     return segment->getPtrUnchecked((bounded(offsetAndKind.get()) >> G(3)) * WORDS);
    217   }
    218   KJ_ALWAYS_INLINE(bool isDoubleFar() const) {
    219     KJ_DREQUIRE(kind() == FAR,
    220         "isDoubleFar() should only be called on FAR pointers.");
    221     return (offsetAndKind.get() >> 2) & 1;
    222   }
    223   KJ_ALWAYS_INLINE(void setFar(bool isDoubleFar, WordCountN<29> pos)) {
    224     offsetAndKind.set(unboundAs<uint32_t>((pos / WORDS) << G(3)) |
    225                       (static_cast<uint32_t>(isDoubleFar) << 2) |
    226                       static_cast<uint32_t>(Kind::FAR));
    227   }
    228   KJ_ALWAYS_INLINE(void setCap(uint index)) {
    229     offsetAndKind.set(static_cast<uint32_t>(Kind::OTHER));
    230     capRef.index.set(index);
    231   }
    232 
    233   // -----------------------------------------------------------------
    234   // Part of pointer that depends on the kind.
    235 
    236   // Note:  Originally StructRef, ListRef, and FarRef were unnamed types, but this somehow
    237   //   tickled a bug in GCC:
    238   //     http://gcc.gnu.org/bugzilla/show_bug.cgi?id=58192
    239   struct StructRef {
    240     WireValue<WordCount16> dataSize;
    241     WireValue<WirePointerCount16> ptrCount;
    242 
    243     inline WordCountN<17> wordSize() const {
    244       return upgradeBound<uint32_t>(dataSize.get()) + ptrCount.get() * WORDS_PER_POINTER;
    245     }
    246 
    247     KJ_ALWAYS_INLINE(void set(WordCount16 ds, WirePointerCount16 rc)) {
    248       dataSize.set(ds);
    249       ptrCount.set(rc);
    250     }
    251     KJ_ALWAYS_INLINE(void set(StructSize size)) {
    252       dataSize.set(size.data);
    253       ptrCount.set(size.pointers);
    254     }
    255   };
    256 
    257   struct ListRef {
    258     WireValue<uint32_t> elementSizeAndCount;
    259 
    260     KJ_ALWAYS_INLINE(ElementSize elementSize() const) {
    261       return static_cast<ElementSize>(elementSizeAndCount.get() & 7);
    262     }
    263     KJ_ALWAYS_INLINE(ElementCountN<29> elementCount() const) {
    264       return (bounded(elementSizeAndCount.get()) >> G(3)) * ELEMENTS;
    265     }
    266     KJ_ALWAYS_INLINE(WordCountN<29> inlineCompositeWordCount() const) {
    267       return elementCount() * (ONE * WORDS / ELEMENTS);
    268     }
    269 
    270     KJ_ALWAYS_INLINE(void set(ElementSize es, ElementCountN<29> ec)) {
    271       elementSizeAndCount.set(unboundAs<uint32_t>((ec / ELEMENTS) << G(3)) |
    272                               static_cast<int>(es));
    273     }
    274 
    275     KJ_ALWAYS_INLINE(void setInlineComposite(WordCountN<29> wc)) {
    276       elementSizeAndCount.set(unboundAs<uint32_t>((wc / WORDS) << G(3)) |
    277                               static_cast<int>(ElementSize::INLINE_COMPOSITE));
    278     }
    279   };
    280 
    281   struct FarRef {
    282     WireValue<SegmentId> segmentId;
    283 
    284     KJ_ALWAYS_INLINE(void set(SegmentId si)) {
    285       segmentId.set(si);
    286     }
    287   };
    288 
    289   struct CapRef {
    290     WireValue<uint32_t> index;
    291     // Index into the message's capability table.
    292   };
    293 
    294   union {
    295     uint32_t upper32Bits;
    296 
    297     StructRef structRef;
    298 
    299     ListRef listRef;
    300 
    301     FarRef farRef;
    302 
    303     CapRef capRef;
    304   };
    305 
    306   KJ_ALWAYS_INLINE(bool isNull() const) {
    307     // If the upper 32 bits are zero, this is a pointer to an empty struct.  We consider that to be
    308     // our "null" value.
    309     return (offsetAndKind.get() == 0) & (upper32Bits == 0);
    310   }
    311 
    312 };
    313 static_assert(sizeof(WirePointer) == sizeof(word),
    314     "capnp::WirePointer is not exactly one word.  This will probably break everything.");
    315 static_assert(unboundAs<size_t>(POINTERS * WORDS_PER_POINTER * BYTES_PER_WORD / BYTES) ==
    316               sizeof(WirePointer),
    317     "WORDS_PER_POINTER is wrong.");
    318 static_assert(unboundAs<size_t>(POINTERS * BYTES_PER_POINTER / BYTES) == sizeof(WirePointer),
    319     "BYTES_PER_POINTER is wrong.");
    320 static_assert(unboundAs<size_t>(POINTERS * BITS_PER_POINTER / BITS_PER_BYTE / BYTES) ==
    321               sizeof(WirePointer),
    322     "BITS_PER_POINTER is wrong.");
    323 
    324 namespace {
    325 
    326 static const union {
    327   AlignedData<unbound(POINTER_SIZE_IN_WORDS / WORDS)> word;
    328   WirePointer pointer;
    329 } zero = {{{0}}};
    330 
    331 }  // namespace
    332 
    333 // =======================================================================================
    334 
    335 namespace {
    336 
    337 template <typename T>
    338 struct SegmentAnd {
    339   SegmentBuilder* segment;
    340   T value;
    341 };
    342 
    343 }  // namespace
    344 
    345 struct WireHelpers {
    346 #if CAPNP_DEBUG_TYPES
    347   template <uint64_t maxN, typename T>
    348   static KJ_ALWAYS_INLINE(
    349       kj::Quantity<kj::Bounded<(maxN + 7) / 8, T>, word> roundBytesUpToWords(
    350           kj::Quantity<kj::Bounded<maxN, T>, byte> bytes)) {
    351     static_assert(sizeof(word) == 8, "This code assumes 64-bit words.");
    352     return (bytes + G(7) * BYTES) / BYTES_PER_WORD;
    353   }
    354 
    355   template <uint64_t maxN, typename T>
    356   static KJ_ALWAYS_INLINE(
    357       kj::Quantity<kj::Bounded<(maxN + 7) / 8, T>, byte> roundBitsUpToBytes(
    358           kj::Quantity<kj::Bounded<maxN, T>, BitLabel> bits)) {
    359     return (bits + G(7) * BITS) / BITS_PER_BYTE;
    360   }
    361 
    362   template <uint64_t maxN, typename T>
    363   static KJ_ALWAYS_INLINE(
    364       kj::Quantity<kj::Bounded<(maxN + 63) / 64, T>, word> roundBitsUpToWords(
    365           kj::Quantity<kj::Bounded<maxN, T>, BitLabel> bits)) {
    366     static_assert(sizeof(word) == 8, "This code assumes 64-bit words.");
    367     return (bits + G(63) * BITS) / BITS_PER_WORD;
    368   }
    369 #else
    370   static KJ_ALWAYS_INLINE(WordCount roundBytesUpToWords(ByteCount bytes)) {
    371     static_assert(sizeof(word) == 8, "This code assumes 64-bit words.");
    372     return (bytes + G(7) * BYTES) / BYTES_PER_WORD;
    373   }
    374 
    375   static KJ_ALWAYS_INLINE(ByteCount roundBitsUpToBytes(BitCount bits)) {
    376     return (bits + G(7) * BITS) / BITS_PER_BYTE;
    377   }
    378 
    379   static KJ_ALWAYS_INLINE(WordCount64 roundBitsUpToWords(BitCount64 bits)) {
    380     static_assert(sizeof(word) == 8, "This code assumes 64-bit words.");
    381     return (bits + G(63) * BITS) / BITS_PER_WORD;
    382   }
    383 
    384   static KJ_ALWAYS_INLINE(ByteCount64 roundBitsUpToBytes(BitCount64 bits)) {
    385     return (bits + G(7) * BITS) / BITS_PER_BYTE;
    386   }
    387 #endif
    388 
    389   static KJ_ALWAYS_INLINE(void zeroMemory(byte* ptr, ByteCount32 count)) {
    390     if (count != ZERO * BYTES) memset(ptr, 0, unbound(count / BYTES));
    391   }
    392 
    393   static KJ_ALWAYS_INLINE(void zeroMemory(word* ptr, WordCountN<29> count)) {
    394     if (count != ZERO * WORDS) memset(ptr, 0, unbound(count * BYTES_PER_WORD / BYTES));
    395   }
    396 
    397   static KJ_ALWAYS_INLINE(void zeroMemory(WirePointer* ptr, WirePointerCountN<29> count)) {
    398     if (count != ZERO * POINTERS) memset(ptr, 0, unbound(count * BYTES_PER_POINTER / BYTES));
    399   }
    400 
    401   static KJ_ALWAYS_INLINE(void zeroMemory(WirePointer* ptr)) {
    402     memset(ptr, 0, sizeof(*ptr));
    403   }
    404 
    405   template <typename T>
    406   static inline void zeroMemory(kj::ArrayPtr<T> array) {
    407     if (array.size() != 0u) memset(array.begin(), 0, array.size() * sizeof(array[0]));
    408   }
    409 
    410   static KJ_ALWAYS_INLINE(void copyMemory(byte* to, const byte* from, ByteCount32 count)) {
    411     if (count != ZERO * BYTES) memcpy(to, from, unbound(count / BYTES));
    412   }
    413 
    414   static KJ_ALWAYS_INLINE(void copyMemory(word* to, const word* from, WordCountN<29> count)) {
    415     if (count != ZERO * WORDS) memcpy(to, from, unbound(count * BYTES_PER_WORD / BYTES));
    416   }
    417 
    418   static KJ_ALWAYS_INLINE(void copyMemory(WirePointer* to, const WirePointer* from,
    419                                           WirePointerCountN<29> count)) {
    420     if (count != ZERO * POINTERS) memcpy(to, from, unbound(count * BYTES_PER_POINTER  / BYTES));
    421   }
    422 
    423   template <typename T>
    424   static inline void copyMemory(T* to, const T* from) {
    425     memcpy(to, from, sizeof(*from));
    426   }
    427 
    428   // TODO(cleanup): Turn these into a .copyTo() method of ArrayPtr?
    429   template <typename T>
    430   static inline void copyMemory(T* to, kj::ArrayPtr<T> from) {
    431     if (from.size() != 0u) memcpy(to, from.begin(), from.size() * sizeof(from[0]));
    432   }
    433   template <typename T>
    434   static inline void copyMemory(T* to, kj::ArrayPtr<const T> from) {
    435     if (from.size() != 0u) memcpy(to, from.begin(), from.size() * sizeof(from[0]));
    436   }
    437   static KJ_ALWAYS_INLINE(void copyMemory(char* to, kj::StringPtr from)) {
    438     if (from.size() != 0u) memcpy(to, from.begin(), from.size() * sizeof(from[0]));
    439   }
    440 
    441   static KJ_ALWAYS_INLINE(bool boundsCheck(
    442       SegmentReader* segment, const word* start, WordCountN<31> size)) {
    443     // If segment is null, this is an unchecked message, so we don't do bounds checks.
    444     return segment == nullptr || segment->checkObject(start, size);
    445   }
    446 
    447   static KJ_ALWAYS_INLINE(bool amplifiedRead(SegmentReader* segment, WordCount virtualAmount)) {
    448     // If segment is null, this is an unchecked message, so we don't do read limiter checks.
    449     return segment == nullptr || segment->amplifiedRead(virtualAmount);
    450   }
    451 
    452   static KJ_ALWAYS_INLINE(word* allocate(
    453       WirePointer*& ref, SegmentBuilder*& segment, CapTableBuilder* capTable,
    454       SegmentWordCount amount, WirePointer::Kind kind, BuilderArena* orphanArena)) {
    455     // Allocate space in the message for a new object, creating far pointers if necessary. The
    456     // space is guaranteed to be zero'd (because MessageBuilder implementations are required to
    457     // return zero'd memory).
    458     //
    459     // * `ref` starts out being a reference to the pointer which shall be assigned to point at the
    460     //   new object.  On return, `ref` points to a pointer which needs to be initialized with
    461     //   the object's type information.  Normally this is the same pointer, but it can change if
    462     //   a far pointer was allocated -- in this case, `ref` will end up pointing to the far
    463     //   pointer's tag.  Either way, `allocate()` takes care of making sure that the original
    464     //   pointer ends up leading to the new object.  On return, only the upper 32 bit of `*ref`
    465     //   need to be filled in by the caller.
    466     // * `segment` starts out pointing to the segment containing `ref`.  On return, it points to
    467     //   the segment containing the allocated object, which is usually the same segment but could
    468     //   be a different one if the original segment was out of space.
    469     // * `amount` is the number of words to allocate.
    470     // * `kind` is the kind of object to allocate.  It is used to initialize the pointer.  It
    471     //   cannot be `FAR` -- far pointers are allocated automatically as needed.
    472     // * `orphanArena` is usually null.  If it is non-null, then we're allocating an orphan object.
    473     //   In this case, `segment` starts out null; the allocation takes place in an arbitrary
    474     //   segment belonging to the arena.  `ref` will be initialized as a non-far pointer, but its
    475     //   target offset will be set to zero.
    476 
    477     if (orphanArena == nullptr) {
    478       if (!ref->isNull()) zeroObject(segment, capTable, ref);
    479 
    480       if (amount == ZERO * WORDS && kind == WirePointer::STRUCT) {
    481         // Note that the check for kind == WirePointer::STRUCT will hopefully cause this whole
    482         // branch to be optimized away from all the call sites that are allocating non-structs.
    483         ref->setKindAndTargetForEmptyStruct();
    484         return reinterpret_cast<word*>(ref);
    485       }
    486 
    487       word* ptr = segment->allocate(amount);
    488 
    489       if (ptr == nullptr) {
    490 
    491         // Need to allocate in a new segment.  We'll need to allocate an extra pointer worth of
    492         // space to act as the landing pad for a far pointer.
    493 
    494         WordCount amountPlusRef = amount + POINTER_SIZE_IN_WORDS;
    495         auto allocation = segment->getArena()->allocate(
    496             assertMaxBits<SEGMENT_WORD_COUNT_BITS>(amountPlusRef, []() {
    497               KJ_FAIL_REQUIRE("requested object size exceeds maximum segment size");
    498             }));
    499         segment = allocation.segment;
    500         ptr = allocation.words;
    501 
    502         // Set up the original pointer to be a far pointer to the new segment.
    503         ref->setFar(false, segment->getOffsetTo(ptr));
    504         ref->farRef.set(segment->getSegmentId());
    505 
    506         // Initialize the landing pad to indicate that the data immediately follows the pad.
    507         ref = reinterpret_cast<WirePointer*>(ptr);
    508         ref->setKindAndTarget(kind, ptr + POINTER_SIZE_IN_WORDS, segment);
    509 
    510         // Allocated space follows new pointer.
    511         return ptr + POINTER_SIZE_IN_WORDS;
    512       } else {
    513         ref->setKindAndTarget(kind, ptr, segment);
    514         return ptr;
    515       }
    516     } else {
    517       // orphanArena is non-null.  Allocate an orphan.
    518       KJ_DASSERT(ref->isNull());
    519       auto allocation = orphanArena->allocate(amount);
    520       segment = allocation.segment;
    521       ref->setKindForOrphan(kind);
    522       return allocation.words;
    523     }
    524   }
    525 
    526   static KJ_ALWAYS_INLINE(word* followFarsNoWritableCheck(
    527       WirePointer*& ref, word* refTarget, SegmentBuilder*& segment)) {
    528     // If `ref` is a far pointer, follow it.  On return, `ref` will have been updated to point at
    529     // a WirePointer that contains the type information about the target object, and a pointer to
    530     // the object contents is returned.  The caller must NOT use `ref->target()` as this may or may
    531     // not actually return a valid pointer.  `segment` is also updated to point at the segment which
    532     // actually contains the object.
    533     //
    534     // If `ref` is not a far pointer, this simply returns `refTarget`.  Usually, `refTarget` should
    535     // be the same as `ref->target()`, but may not be in cases where `ref` is only a tag.
    536 
    537     if (ref->kind() == WirePointer::FAR) {
    538       segment = segment->getArena()->getSegment(ref->farRef.segmentId.get());
    539       WirePointer* pad = reinterpret_cast<WirePointer*>(ref->farTarget(segment));
    540       if (!ref->isDoubleFar()) {
    541         ref = pad;
    542         return pad->target();
    543       }
    544 
    545       // Landing pad is another far pointer.  It is followed by a tag describing the pointed-to
    546       // object.
    547       ref = pad + 1;
    548 
    549       segment = segment->getArena()->getSegment(pad->farRef.segmentId.get());
    550       return pad->farTarget(segment);
    551     } else {
    552       return refTarget;
    553     }
    554   }
    555 
    556   static KJ_ALWAYS_INLINE(word* followFars(
    557       WirePointer*& ref, word* refTarget, SegmentBuilder*& segment)) {
    558     auto result = followFarsNoWritableCheck(ref, refTarget, segment);
    559     segment->checkWritable();
    560     return result;
    561   }
    562 
    563   static KJ_ALWAYS_INLINE(kj::Maybe<const word&> followFars(
    564       const WirePointer*& ref, const word* refTarget, SegmentReader*& segment))
    565       KJ_WARN_UNUSED_RESULT {
    566     // Like the other followFars() but operates on readers.
    567 
    568     // If the segment is null, this is an unchecked message, so there are no FAR pointers.
    569     if (segment != nullptr && ref->kind() == WirePointer::FAR) {
    570       // Look up the segment containing the landing pad.
    571       segment = segment->getArena()->tryGetSegment(ref->farRef.segmentId.get());
    572       KJ_REQUIRE(segment != nullptr, "Message contains far pointer to unknown segment.") {
    573         return nullptr;
    574       }
    575 
    576       // Find the landing pad and check that it is within bounds.
    577       const word* ptr = ref->farTarget(segment);
    578       auto padWords = (ONE + bounded(ref->isDoubleFar())) * POINTER_SIZE_IN_WORDS;
    579       KJ_REQUIRE(boundsCheck(segment, ptr, padWords),
    580                  "Message contains out-of-bounds far pointer.") {
    581         return nullptr;
    582       }
    583 
    584       const WirePointer* pad = reinterpret_cast<const WirePointer*>(ptr);
    585 
    586       // If this is not a double-far then the landing pad is our final pointer.
    587       if (!ref->isDoubleFar()) {
    588         ref = pad;
    589         return pad->target(segment);
    590       }
    591 
    592       // Landing pad is another far pointer.  It is followed by a tag describing the pointed-to
    593       // object.
    594       ref = pad + 1;
    595 
    596       SegmentReader* newSegment = segment->getArena()->tryGetSegment(pad->farRef.segmentId.get());
    597       KJ_REQUIRE(newSegment != nullptr,
    598           "Message contains double-far pointer to unknown segment.") {
    599         return nullptr;
    600       }
    601       KJ_REQUIRE(pad->kind() == WirePointer::FAR,
    602           "Second word of double-far pad must be far pointer.") {
    603         return nullptr;
    604       }
    605 
    606       segment = newSegment;
    607       return pad->farTarget(segment);
    608     } else {
    609       KJ_DASSERT(refTarget != nullptr);
    610       return refTarget;
    611     }
    612   }
    613 
    614   // -----------------------------------------------------------------
    615 
    616   static void zeroObject(SegmentBuilder* segment, CapTableBuilder* capTable, WirePointer* ref) {
    617     // Zero out the pointed-to object.  Use when the pointer is about to be overwritten making the
    618     // target object no longer reachable.
    619 
    620     // We shouldn't zero out external data linked into the message.
    621     if (!segment->isWritable()) return;
    622 
    623     switch (ref->kind()) {
    624       case WirePointer::STRUCT:
    625       case WirePointer::LIST:
    626         zeroObject(segment, capTable, ref, ref->target());
    627         break;
    628       case WirePointer::FAR: {
    629         segment = segment->getArena()->getSegment(ref->farRef.segmentId.get());
    630         if (segment->isWritable()) {  // Don't zero external data.
    631           WirePointer* pad = reinterpret_cast<WirePointer*>(ref->farTarget(segment));
    632 
    633           if (ref->isDoubleFar()) {
    634             segment = segment->getArena()->getSegment(pad->farRef.segmentId.get());
    635             if (segment->isWritable()) {
    636               zeroObject(segment, capTable, pad + 1, pad->farTarget(segment));
    637             }
    638             zeroMemory(pad, G(2) * POINTERS);
    639           } else {
    640             zeroObject(segment, capTable, pad);
    641             zeroMemory(pad);
    642           }
    643         }
    644         break;
    645       }
    646       case WirePointer::OTHER:
    647         if (ref->isCapability()) {
    648 #if CAPNP_LITE
    649           KJ_FAIL_ASSERT("Capability encountered in builder in lite mode?") { break; }
    650 #else  // CAPNP_LINE
    651           capTable->dropCap(ref->capRef.index.get());
    652 #endif  // CAPNP_LITE, else
    653         } else {
    654           KJ_FAIL_REQUIRE("Unknown pointer type.") { break; }
    655         }
    656         break;
    657     }
    658   }
    659 
    660   static void zeroObject(SegmentBuilder* segment, CapTableBuilder* capTable,
    661                          WirePointer* tag, word* ptr) {
    662     // We shouldn't zero out external data linked into the message.
    663     if (!segment->isWritable()) return;
    664 
    665     switch (tag->kind()) {
    666       case WirePointer::STRUCT: {
    667         WirePointer* pointerSection =
    668             reinterpret_cast<WirePointer*>(ptr + tag->structRef.dataSize.get());
    669         for (auto i: kj::zeroTo(tag->structRef.ptrCount.get())) {
    670           zeroObject(segment, capTable, pointerSection + i);
    671         }
    672         zeroMemory(ptr, tag->structRef.wordSize());
    673         break;
    674       }
    675       case WirePointer::LIST: {
    676         switch (tag->listRef.elementSize()) {
    677           case ElementSize::VOID:
    678             // Nothing.
    679             break;
    680           case ElementSize::BIT:
    681           case ElementSize::BYTE:
    682           case ElementSize::TWO_BYTES:
    683           case ElementSize::FOUR_BYTES:
    684           case ElementSize::EIGHT_BYTES: {
    685             zeroMemory(ptr, roundBitsUpToWords(
    686                 upgradeBound<uint64_t>(tag->listRef.elementCount()) *
    687                 dataBitsPerElement(tag->listRef.elementSize())));
    688             break;
    689           }
    690           case ElementSize::POINTER: {
    691             WirePointer* typedPtr = reinterpret_cast<WirePointer*>(ptr);
    692             auto count = tag->listRef.elementCount() * (ONE * POINTERS / ELEMENTS);
    693             for (auto i: kj::zeroTo(count)) {
    694               zeroObject(segment, capTable, typedPtr + i);
    695             }
    696             zeroMemory(typedPtr, count);
    697             break;
    698           }
    699           case ElementSize::INLINE_COMPOSITE: {
    700             WirePointer* elementTag = reinterpret_cast<WirePointer*>(ptr);
    701 
    702             KJ_ASSERT(elementTag->kind() == WirePointer::STRUCT,
    703                   "Don't know how to handle non-STRUCT inline composite.");
    704             WordCount dataSize = elementTag->structRef.dataSize.get();
    705             WirePointerCount pointerCount = elementTag->structRef.ptrCount.get();
    706 
    707             auto count = elementTag->inlineCompositeListElementCount();
    708             if (pointerCount > ZERO * POINTERS) {
    709               word* pos = ptr + POINTER_SIZE_IN_WORDS;
    710               for (auto i KJ_UNUSED: kj::zeroTo(count)) {
    711                 pos += dataSize;
    712 
    713                 for (auto j KJ_UNUSED: kj::zeroTo(pointerCount)) {
    714                   zeroObject(segment, capTable, reinterpret_cast<WirePointer*>(pos));
    715                   pos += POINTER_SIZE_IN_WORDS;
    716                 }
    717               }
    718             }
    719 
    720             auto wordsPerElement = elementTag->structRef.wordSize() / ELEMENTS;
    721             zeroMemory(ptr, assertMaxBits<SEGMENT_WORD_COUNT_BITS>(POINTER_SIZE_IN_WORDS +
    722                 upgradeBound<uint64_t>(count) * wordsPerElement, []() {
    723                   KJ_FAIL_ASSERT("encountered list pointer in builder which is too large to "
    724                       "possibly fit in a segment. Bug in builder code?");
    725                 }));
    726             break;
    727           }
    728         }
    729         break;
    730       }
    731       case WirePointer::FAR:
    732         KJ_FAIL_ASSERT("Unexpected FAR pointer.") {
    733           break;
    734         }
    735         break;
    736       case WirePointer::OTHER:
    737         KJ_FAIL_ASSERT("Unexpected OTHER pointer.") {
    738           break;
    739         }
    740         break;
    741     }
    742   }
    743 
    744   static KJ_ALWAYS_INLINE(
    745       void zeroPointerAndFars(SegmentBuilder* segment, WirePointer* ref)) {
    746     // Zero out the pointer itself and, if it is a far pointer, zero the landing pad as well, but
    747     // do not zero the object body.  Used when upgrading.
    748 
    749     if (ref->kind() == WirePointer::FAR) {
    750       SegmentBuilder* padSegment = segment->getArena()->getSegment(ref->farRef.segmentId.get());
    751       if (padSegment->isWritable()) {  // Don't zero external data.
    752         WirePointer* pad = reinterpret_cast<WirePointer*>(ref->farTarget(padSegment));
    753         if (ref->isDoubleFar()) {
    754           zeroMemory(pad, G(2) * POINTERS);
    755         } else {
    756           zeroMemory(pad);
    757         }
    758       }
    759     }
    760 
    761     zeroMemory(ref);
    762   }
    763 
    764 
    765   // -----------------------------------------------------------------
    766 
    767   static MessageSizeCounts totalSize(
    768       SegmentReader* segment, const WirePointer* ref, int nestingLimit) {
    769     // Compute the total size of the object pointed to, not counting far pointer overhead.
    770 
    771     MessageSizeCounts result = { ZERO * WORDS, 0 };
    772 
    773     if (ref->isNull()) {
    774       return result;
    775     }
    776 
    777     KJ_REQUIRE(nestingLimit > 0, "Message is too deeply-nested.") {
    778       return result;
    779     }
    780     --nestingLimit;
    781 
    782     const word* ptr;
    783     KJ_IF_MAYBE(p, followFars(ref, ref->target(segment), segment)) {
    784       ptr = p;
    785     } else {
    786       return result;
    787     }
    788 
    789     switch (ref->kind()) {
    790       case WirePointer::STRUCT: {
    791         KJ_REQUIRE(boundsCheck(segment, ptr, ref->structRef.wordSize()),
    792                    "Message contained out-of-bounds struct pointer.") {
    793           return result;
    794         }
    795         result.addWords(ref->structRef.wordSize());
    796 
    797         const WirePointer* pointerSection =
    798             reinterpret_cast<const WirePointer*>(ptr + ref->structRef.dataSize.get());
    799         for (auto i: kj::zeroTo(ref->structRef.ptrCount.get())) {
    800           result += totalSize(segment, pointerSection + i, nestingLimit);
    801         }
    802         break;
    803       }
    804       case WirePointer::LIST: {
    805         switch (ref->listRef.elementSize()) {
    806           case ElementSize::VOID:
    807             // Nothing.
    808             break;
    809           case ElementSize::BIT:
    810           case ElementSize::BYTE:
    811           case ElementSize::TWO_BYTES:
    812           case ElementSize::FOUR_BYTES:
    813           case ElementSize::EIGHT_BYTES: {
    814             auto totalWords = roundBitsUpToWords(
    815                 upgradeBound<uint64_t>(ref->listRef.elementCount()) *
    816                 dataBitsPerElement(ref->listRef.elementSize()));
    817             KJ_REQUIRE(boundsCheck(segment, ptr, totalWords),
    818                        "Message contained out-of-bounds list pointer.") {
    819               return result;
    820             }
    821             result.addWords(totalWords);
    822             break;
    823           }
    824           case ElementSize::POINTER: {
    825             auto count = ref->listRef.elementCount() * (POINTERS / ELEMENTS);
    826 
    827             KJ_REQUIRE(boundsCheck(segment, ptr, count * WORDS_PER_POINTER),
    828                        "Message contained out-of-bounds list pointer.") {
    829               return result;
    830             }
    831 
    832             result.addWords(count * WORDS_PER_POINTER);
    833 
    834             for (auto i: kj::zeroTo(count)) {
    835               result += totalSize(segment, reinterpret_cast<const WirePointer*>(ptr) + i,
    836                                   nestingLimit);
    837             }
    838             break;
    839           }
    840           case ElementSize::INLINE_COMPOSITE: {
    841             auto wordCount = ref->listRef.inlineCompositeWordCount();
    842             KJ_REQUIRE(boundsCheck(segment, ptr, wordCount + POINTER_SIZE_IN_WORDS),
    843                        "Message contained out-of-bounds list pointer.") {
    844               return result;
    845             }
    846 
    847             const WirePointer* elementTag = reinterpret_cast<const WirePointer*>(ptr);
    848             auto count = elementTag->inlineCompositeListElementCount();
    849 
    850             KJ_REQUIRE(elementTag->kind() == WirePointer::STRUCT,
    851                        "Don't know how to handle non-STRUCT inline composite.") {
    852               return result;
    853             }
    854 
    855             auto actualSize = elementTag->structRef.wordSize() / ELEMENTS *
    856                               upgradeBound<uint64_t>(count);
    857             KJ_REQUIRE(actualSize <= wordCount,
    858                        "Struct list pointer's elements overran size.") {
    859               return result;
    860             }
    861 
    862             // We count the actual size rather than the claimed word count because that's what
    863             // we'll end up with if we make a copy.
    864             result.addWords(actualSize + POINTER_SIZE_IN_WORDS);
    865 
    866             WordCount dataSize = elementTag->structRef.dataSize.get();
    867             WirePointerCount pointerCount = elementTag->structRef.ptrCount.get();
    868 
    869             if (pointerCount > ZERO * POINTERS) {
    870               const word* pos = ptr + POINTER_SIZE_IN_WORDS;
    871               for (auto i KJ_UNUSED: kj::zeroTo(count)) {
    872                 pos += dataSize;
    873 
    874                 for (auto j KJ_UNUSED: kj::zeroTo(pointerCount)) {
    875                   result += totalSize(segment, reinterpret_cast<const WirePointer*>(pos),
    876                                       nestingLimit);
    877                   pos += POINTER_SIZE_IN_WORDS;
    878                 }
    879               }
    880             }
    881             break;
    882           }
    883         }
    884         break;
    885       }
    886       case WirePointer::FAR:
    887         KJ_FAIL_REQUIRE("Unexpected FAR pointer.") {
    888           break;
    889         }
    890         break;
    891       case WirePointer::OTHER:
    892         if (ref->isCapability()) {
    893           result.capCount++;
    894         } else {
    895           KJ_FAIL_REQUIRE("Unknown pointer type.") { break; }
    896         }
    897         break;
    898     }
    899 
    900     return result;
    901   }
    902 
    903   // -----------------------------------------------------------------
    904   // Copy from an unchecked message.
    905 
    906   static KJ_ALWAYS_INLINE(
    907       void copyStruct(SegmentBuilder* segment, CapTableBuilder* capTable,
    908                       word* dst, const word* src,
    909                       StructDataWordCount dataSize, StructPointerCount pointerCount)) {
    910     copyMemory(dst, src, dataSize);
    911 
    912     const WirePointer* srcRefs = reinterpret_cast<const WirePointer*>(src + dataSize);
    913     WirePointer* dstRefs = reinterpret_cast<WirePointer*>(dst + dataSize);
    914 
    915     for (auto i: kj::zeroTo(pointerCount)) {
    916       SegmentBuilder* subSegment = segment;
    917       WirePointer* dstRef = dstRefs + i;
    918       copyMessage(subSegment, capTable, dstRef, srcRefs + i);
    919     }
    920   }
    921 
    922   static word* copyMessage(
    923       SegmentBuilder*& segment, CapTableBuilder* capTable,
    924       WirePointer*& dst, const WirePointer* src) {
    925     // Not always-inline because it's recursive.
    926 
    927     switch (src->kind()) {
    928       case WirePointer::STRUCT: {
    929         if (src->isNull()) {
    930           zeroMemory(dst);
    931           return nullptr;
    932         } else {
    933           const word* srcPtr = src->target(nullptr);
    934           word* dstPtr = allocate(
    935               dst, segment, capTable, src->structRef.wordSize(), WirePointer::STRUCT, nullptr);
    936 
    937           copyStruct(segment, capTable, dstPtr, srcPtr, src->structRef.dataSize.get(),
    938                      src->structRef.ptrCount.get());
    939 
    940           dst->structRef.set(src->structRef.dataSize.get(), src->structRef.ptrCount.get());
    941           return dstPtr;
    942         }
    943       }
    944       case WirePointer::LIST: {
    945         switch (src->listRef.elementSize()) {
    946           case ElementSize::VOID:
    947           case ElementSize::BIT:
    948           case ElementSize::BYTE:
    949           case ElementSize::TWO_BYTES:
    950           case ElementSize::FOUR_BYTES:
    951           case ElementSize::EIGHT_BYTES: {
    952             auto wordCount = roundBitsUpToWords(
    953                 upgradeBound<uint64_t>(src->listRef.elementCount()) *
    954                 dataBitsPerElement(src->listRef.elementSize()));
    955             const word* srcPtr = src->target(nullptr);
    956             word* dstPtr = allocate(dst, segment, capTable, wordCount, WirePointer::LIST, nullptr);
    957             copyMemory(dstPtr, srcPtr, wordCount);
    958 
    959             dst->listRef.set(src->listRef.elementSize(), src->listRef.elementCount());
    960             return dstPtr;
    961           }
    962 
    963           case ElementSize::POINTER: {
    964             const WirePointer* srcRefs = reinterpret_cast<const WirePointer*>(src->target(nullptr));
    965             WirePointer* dstRefs = reinterpret_cast<WirePointer*>(
    966                 allocate(dst, segment, capTable, src->listRef.elementCount() *
    967                     (ONE * POINTERS / ELEMENTS) * WORDS_PER_POINTER,
    968                     WirePointer::LIST, nullptr));
    969 
    970             for (auto i: kj::zeroTo(src->listRef.elementCount() * (ONE * POINTERS / ELEMENTS))) {
    971               SegmentBuilder* subSegment = segment;
    972               WirePointer* dstRef = dstRefs + i;
    973               copyMessage(subSegment, capTable, dstRef, srcRefs + i);
    974             }
    975 
    976             dst->listRef.set(ElementSize::POINTER, src->listRef.elementCount());
    977             return reinterpret_cast<word*>(dstRefs);
    978           }
    979 
    980           case ElementSize::INLINE_COMPOSITE: {
    981             const word* srcPtr = src->target(nullptr);
    982             word* dstPtr = allocate(dst, segment, capTable,
    983                 assertMaxBits<SEGMENT_WORD_COUNT_BITS>(
    984                     src->listRef.inlineCompositeWordCount() + POINTER_SIZE_IN_WORDS,
    985                     []() { KJ_FAIL_ASSERT("list too big to fit in a segment"); }),
    986                 WirePointer::LIST, nullptr);
    987 
    988             dst->listRef.setInlineComposite(src->listRef.inlineCompositeWordCount());
    989 
    990             const WirePointer* srcTag = reinterpret_cast<const WirePointer*>(srcPtr);
    991             copyMemory(reinterpret_cast<WirePointer*>(dstPtr), srcTag);
    992 
    993             const word* srcElement = srcPtr + POINTER_SIZE_IN_WORDS;
    994             word* dstElement = dstPtr + POINTER_SIZE_IN_WORDS;
    995 
    996             KJ_ASSERT(srcTag->kind() == WirePointer::STRUCT,
    997                 "INLINE_COMPOSITE of lists is not yet supported.");
    998 
    999             for (auto i KJ_UNUSED: kj::zeroTo(srcTag->inlineCompositeListElementCount())) {
   1000               copyStruct(segment, capTable, dstElement, srcElement,
   1001                   srcTag->structRef.dataSize.get(), srcTag->structRef.ptrCount.get());
   1002               srcElement += srcTag->structRef.wordSize();
   1003               dstElement += srcTag->structRef.wordSize();
   1004             }
   1005             return dstPtr;
   1006           }
   1007         }
   1008         break;
   1009       }
   1010       case WirePointer::OTHER:
   1011         KJ_FAIL_REQUIRE("Unchecked messages cannot contain OTHER pointers (e.g. capabilities).");
   1012         break;
   1013       case WirePointer::FAR:
   1014         KJ_FAIL_REQUIRE("Unchecked messages cannot contain far pointers.");
   1015         break;
   1016     }
   1017 
   1018     return nullptr;
   1019   }
   1020 
   1021   static void transferPointer(SegmentBuilder* dstSegment, WirePointer* dst,
   1022                               SegmentBuilder* srcSegment, WirePointer* src) {
   1023     // Make *dst point to the same object as *src.  Both must reside in the same message, but can
   1024     // be in different segments.  Not always-inline because this is rarely used.
   1025     //
   1026     // Caller MUST zero out the source pointer after calling this, to make sure no later code
   1027     // mistakenly thinks the source location still owns the object.  transferPointer() doesn't do
   1028     // this zeroing itself because many callers transfer several pointers in a loop then zero out
   1029     // the whole section.
   1030 
   1031     KJ_DASSERT(dst->isNull());
   1032     // We expect the caller to ensure the target is already null so won't leak.
   1033 
   1034     if (src->isNull()) {
   1035       zeroMemory(dst);
   1036     } else if (src->isPositional()) {
   1037       transferPointer(dstSegment, dst, srcSegment, src, src->target());
   1038     } else {
   1039       // Far and other pointers are position-independent, so we can just copy.
   1040       copyMemory(dst, src);
   1041     }
   1042   }
   1043 
   1044   static void transferPointer(SegmentBuilder* dstSegment, WirePointer* dst,
   1045                               SegmentBuilder* srcSegment, const WirePointer* srcTag,
   1046                               word* srcPtr) {
   1047     // Like the other overload, but splits src into a tag and a target.  Particularly useful for
   1048     // OrphanBuilder.
   1049 
   1050     if (dstSegment == srcSegment) {
   1051       // Same segment, so create a direct pointer.
   1052 
   1053       if (srcTag->kind() == WirePointer::STRUCT && srcTag->structRef.wordSize() == ZERO * WORDS) {
   1054         dst->setKindAndTargetForEmptyStruct();
   1055       } else {
   1056         dst->setKindAndTarget(srcTag->kind(), srcPtr, dstSegment);
   1057       }
   1058 
   1059       // We can just copy the upper 32 bits.  (Use memcpy() to comply with aliasing rules.)
   1060       copyMemory(&dst->upper32Bits, &srcTag->upper32Bits);
   1061     } else {
   1062       // Need to create a far pointer.  Try to allocate it in the same segment as the source, so
   1063       // that it doesn't need to be a double-far.
   1064 
   1065       WirePointer* landingPad =
   1066           reinterpret_cast<WirePointer*>(srcSegment->allocate(G(1) * WORDS));
   1067       if (landingPad == nullptr) {
   1068         // Darn, need a double-far.
   1069         auto allocation = srcSegment->getArena()->allocate(G(2) * WORDS);
   1070         SegmentBuilder* farSegment = allocation.segment;
   1071         landingPad = reinterpret_cast<WirePointer*>(allocation.words);
   1072 
   1073         landingPad[0].setFar(false, srcSegment->getOffsetTo(srcPtr));
   1074         landingPad[0].farRef.segmentId.set(srcSegment->getSegmentId());
   1075 
   1076         landingPad[1].setKindWithZeroOffset(srcTag->kind());
   1077         copyMemory(&landingPad[1].upper32Bits, &srcTag->upper32Bits);
   1078 
   1079         dst->setFar(true, farSegment->getOffsetTo(reinterpret_cast<word*>(landingPad)));
   1080         dst->farRef.set(farSegment->getSegmentId());
   1081       } else {
   1082         // Simple landing pad is just a pointer.
   1083         landingPad->setKindAndTarget(srcTag->kind(), srcPtr, srcSegment);
   1084         copyMemory(&landingPad->upper32Bits, &srcTag->upper32Bits);
   1085 
   1086         dst->setFar(false, srcSegment->getOffsetTo(reinterpret_cast<word*>(landingPad)));
   1087         dst->farRef.set(srcSegment->getSegmentId());
   1088       }
   1089     }
   1090   }
   1091 
   1092   // -----------------------------------------------------------------
   1093 
   1094   static KJ_ALWAYS_INLINE(StructBuilder initStructPointer(
   1095       WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable, StructSize size,
   1096       BuilderArena* orphanArena = nullptr)) {
   1097     // Allocate space for the new struct.  Newly-allocated space is automatically zeroed.
   1098     word* ptr = allocate(ref, segment, capTable, size.total(), WirePointer::STRUCT, orphanArena);
   1099 
   1100     // Initialize the pointer.
   1101     ref->structRef.set(size);
   1102 
   1103     // Build the StructBuilder.
   1104     return StructBuilder(segment, capTable, ptr, reinterpret_cast<WirePointer*>(ptr + size.data),
   1105                          size.data * BITS_PER_WORD, size.pointers);
   1106   }
   1107 
   1108   static KJ_ALWAYS_INLINE(StructBuilder getWritableStructPointer(
   1109       WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable, StructSize size,
   1110       const word* defaultValue)) {
   1111     return getWritableStructPointer(ref, ref->target(), segment, capTable, size, defaultValue);
   1112   }
   1113 
   1114   static KJ_ALWAYS_INLINE(StructBuilder getWritableStructPointer(
   1115       WirePointer* ref, word* refTarget, SegmentBuilder* segment, CapTableBuilder* capTable,
   1116       StructSize size, const word* defaultValue, BuilderArena* orphanArena = nullptr)) {
   1117     if (ref->isNull()) {
   1118     useDefault:
   1119       if (defaultValue == nullptr ||
   1120           reinterpret_cast<const WirePointer*>(defaultValue)->isNull()) {
   1121         return initStructPointer(ref, segment, capTable, size, orphanArena);
   1122       }
   1123       refTarget = copyMessage(segment, capTable, ref,
   1124           reinterpret_cast<const WirePointer*>(defaultValue));
   1125       defaultValue = nullptr;  // If the default value is itself invalid, don't use it again.
   1126     }
   1127 
   1128     WirePointer* oldRef = ref;
   1129     SegmentBuilder* oldSegment = segment;
   1130     word* oldPtr = followFars(oldRef, refTarget, oldSegment);
   1131 
   1132     KJ_REQUIRE(oldRef->kind() == WirePointer::STRUCT,
   1133         "Message contains non-struct pointer where struct pointer was expected.") {
   1134       goto useDefault;
   1135     }
   1136 
   1137     auto oldDataSize = oldRef->structRef.dataSize.get();
   1138     auto oldPointerCount = oldRef->structRef.ptrCount.get();
   1139     WirePointer* oldPointerSection =
   1140         reinterpret_cast<WirePointer*>(oldPtr + oldDataSize);
   1141 
   1142     if (oldDataSize < size.data || oldPointerCount < size.pointers) {
   1143       // The space allocated for this struct is too small.  Unlike with readers, we can't just
   1144       // run with it and do bounds checks at access time, because how would we handle writes?
   1145       // Instead, we have to copy the struct to a new space now.
   1146 
   1147       auto newDataSize = kj::max(oldDataSize, size.data);
   1148       auto newPointerCount = kj::max(oldPointerCount, size.pointers);
   1149       auto totalSize = newDataSize + newPointerCount * WORDS_PER_POINTER;
   1150 
   1151       // Don't let allocate() zero out the object just yet.
   1152       zeroPointerAndFars(segment, ref);
   1153 
   1154       word* ptr = allocate(ref, segment, capTable, totalSize, WirePointer::STRUCT, orphanArena);
   1155       ref->structRef.set(newDataSize, newPointerCount);
   1156 
   1157       // Copy data section.
   1158       copyMemory(ptr, oldPtr, oldDataSize);
   1159 
   1160       // Copy pointer section.
   1161       WirePointer* newPointerSection = reinterpret_cast<WirePointer*>(ptr + newDataSize);
   1162       for (auto i: kj::zeroTo(oldPointerCount)) {
   1163         transferPointer(segment, newPointerSection + i, oldSegment, oldPointerSection + i);
   1164       }
   1165 
   1166       // Zero out old location.  This has two purposes:
   1167       // 1) We don't want to leak the original contents of the struct when the message is written
   1168       //    out as it may contain secrets that the caller intends to remove from the new copy.
   1169       // 2) Zeros will be deflated by packing, making this dead memory almost-free if it ever
   1170       //    hits the wire.
   1171       zeroMemory(oldPtr, oldDataSize + oldPointerCount * WORDS_PER_POINTER);
   1172 
   1173       return StructBuilder(segment, capTable, ptr, newPointerSection, newDataSize * BITS_PER_WORD,
   1174                            newPointerCount);
   1175     } else {
   1176       return StructBuilder(oldSegment, capTable, oldPtr, oldPointerSection,
   1177                            oldDataSize * BITS_PER_WORD, oldPointerCount);
   1178     }
   1179   }
   1180 
   1181   static KJ_ALWAYS_INLINE(ListBuilder initListPointer(
   1182       WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable,
   1183       ElementCount elementCount, ElementSize elementSize, BuilderArena* orphanArena = nullptr)) {
   1184     KJ_DREQUIRE(elementSize != ElementSize::INLINE_COMPOSITE,
   1185         "Should have called initStructListPointer() instead.");
   1186 
   1187     auto checkedElementCount = assertMaxBits<LIST_ELEMENT_COUNT_BITS>(elementCount,
   1188         []() { KJ_FAIL_REQUIRE("tried to allocate list with too many elements"); });
   1189 
   1190     auto dataSize = dataBitsPerElement(elementSize) * ELEMENTS;
   1191     auto pointerCount = pointersPerElement(elementSize) * ELEMENTS;
   1192     auto step = bitsPerElementIncludingPointers(elementSize);
   1193     KJ_DASSERT(step * ELEMENTS == (dataSize + pointerCount * BITS_PER_POINTER));
   1194 
   1195     // Calculate size of the list.
   1196     auto wordCount = roundBitsUpToWords(upgradeBound<uint64_t>(checkedElementCount) * step);
   1197 
   1198     // Allocate the list.
   1199     word* ptr = allocate(ref, segment, capTable, wordCount, WirePointer::LIST, orphanArena);
   1200 
   1201     // Initialize the pointer.
   1202     ref->listRef.set(elementSize, checkedElementCount);
   1203 
   1204     // Build the ListBuilder.
   1205     return ListBuilder(segment, capTable, ptr, step, checkedElementCount,
   1206                        dataSize, pointerCount, elementSize);
   1207   }
   1208 
   1209   static KJ_ALWAYS_INLINE(ListBuilder initStructListPointer(
   1210       WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable,
   1211       ElementCount elementCount, StructSize elementSize, BuilderArena* orphanArena = nullptr)) {
   1212     auto checkedElementCount = assertMaxBits<LIST_ELEMENT_COUNT_BITS>(elementCount,
   1213         []() { KJ_FAIL_REQUIRE("tried to allocate list with too many elements"); });
   1214 
   1215     WordsPerElementN<17> wordsPerElement = elementSize.total() / ELEMENTS;
   1216 
   1217     // Allocate the list, prefixed by a single WirePointer.
   1218     auto wordCount = assertMax<kj::maxValueForBits<SEGMENT_WORD_COUNT_BITS>() - 1>(
   1219         upgradeBound<uint64_t>(checkedElementCount) * wordsPerElement,
   1220         []() { KJ_FAIL_REQUIRE("total size of struct list is larger than max segment size"); });
   1221     word* ptr = allocate(ref, segment, capTable, POINTER_SIZE_IN_WORDS + wordCount,
   1222                          WirePointer::LIST, orphanArena);
   1223 
   1224     // Initialize the pointer.
   1225     // INLINE_COMPOSITE lists replace the element count with the word count.
   1226     ref->listRef.setInlineComposite(wordCount);
   1227 
   1228     // Initialize the list tag.
   1229     reinterpret_cast<WirePointer*>(ptr)->setKindAndInlineCompositeListElementCount(
   1230         WirePointer::STRUCT, checkedElementCount);
   1231     reinterpret_cast<WirePointer*>(ptr)->structRef.set(elementSize);
   1232     ptr += POINTER_SIZE_IN_WORDS;
   1233 
   1234     // Build the ListBuilder.
   1235     return ListBuilder(segment, capTable, ptr, wordsPerElement * BITS_PER_WORD, checkedElementCount,
   1236                        elementSize.data * BITS_PER_WORD, elementSize.pointers,
   1237                        ElementSize::INLINE_COMPOSITE);
   1238   }
   1239 
   1240   static KJ_ALWAYS_INLINE(ListBuilder getWritableListPointer(
   1241       WirePointer* origRef, SegmentBuilder* origSegment, CapTableBuilder* capTable,
   1242       ElementSize elementSize, const word* defaultValue)) {
   1243     return getWritableListPointer(origRef, origRef->target(), origSegment, capTable, elementSize,
   1244                                   defaultValue);
   1245   }
   1246 
   1247   static KJ_ALWAYS_INLINE(ListBuilder getWritableListPointer(
   1248       WirePointer* origRef, word* origRefTarget,
   1249       SegmentBuilder* origSegment, CapTableBuilder* capTable, ElementSize elementSize,
   1250       const word* defaultValue, BuilderArena* orphanArena = nullptr)) {
   1251     KJ_DREQUIRE(elementSize != ElementSize::INLINE_COMPOSITE,
   1252              "Use getWritableStructListPointer() for struct lists.");
   1253 
   1254     if (origRef->isNull()) {
   1255     useDefault:
   1256       if (defaultValue == nullptr ||
   1257           reinterpret_cast<const WirePointer*>(defaultValue)->isNull()) {
   1258         return ListBuilder(elementSize);
   1259       }
   1260       origRefTarget = copyMessage(
   1261           origSegment, capTable, origRef, reinterpret_cast<const WirePointer*>(defaultValue));
   1262       defaultValue = nullptr;  // If the default value is itself invalid, don't use it again.
   1263     }
   1264 
   1265     // We must verify that the pointer has the right size.  Unlike in
   1266     // getWritableStructListPointer(), we never need to "upgrade" the data, because this
   1267     // method is called only for non-struct lists, and there is no allowed upgrade path *to*
   1268     // a non-struct list, only *from* them.
   1269 
   1270     WirePointer* ref = origRef;
   1271     SegmentBuilder* segment = origSegment;
   1272     word* ptr = followFars(ref, origRefTarget, segment);
   1273 
   1274     KJ_REQUIRE(ref->kind() == WirePointer::LIST,
   1275         "Called getWritableListPointer() but existing pointer is not a list.") {
   1276       goto useDefault;
   1277     }
   1278 
   1279     ElementSize oldSize = ref->listRef.elementSize();
   1280 
   1281     if (oldSize == ElementSize::INLINE_COMPOSITE) {
   1282       // The existing element size is INLINE_COMPOSITE, though we expected a list of primitives.
   1283       // The existing data must have been written with a newer version of the protocol.  We
   1284       // therefore never need to upgrade the data in this case, but we do need to validate that it
   1285       // is a valid upgrade from what we expected.
   1286 
   1287       // Read the tag to get the actual element count.
   1288       WirePointer* tag = reinterpret_cast<WirePointer*>(ptr);
   1289       KJ_REQUIRE(tag->kind() == WirePointer::STRUCT,
   1290           "INLINE_COMPOSITE list with non-STRUCT elements not supported.");
   1291       ptr += POINTER_SIZE_IN_WORDS;
   1292 
   1293       auto dataSize = tag->structRef.dataSize.get();
   1294       auto pointerCount = tag->structRef.ptrCount.get();
   1295 
   1296       switch (elementSize) {
   1297         case ElementSize::VOID:
   1298           // Anything is a valid upgrade from Void.
   1299           break;
   1300 
   1301         case ElementSize::BIT:
   1302           KJ_FAIL_REQUIRE(
   1303               "Found struct list where bit list was expected; upgrading boolean lists to structs "
   1304               "is no longer supported.") {
   1305             goto useDefault;
   1306           }
   1307           break;
   1308 
   1309         case ElementSize::BYTE:
   1310         case ElementSize::TWO_BYTES:
   1311         case ElementSize::FOUR_BYTES:
   1312         case ElementSize::EIGHT_BYTES:
   1313           KJ_REQUIRE(dataSize >= ONE * WORDS,
   1314                      "Existing list value is incompatible with expected type.") {
   1315             goto useDefault;
   1316           }
   1317           break;
   1318 
   1319         case ElementSize::POINTER:
   1320           KJ_REQUIRE(pointerCount >= ONE * POINTERS,
   1321                      "Existing list value is incompatible with expected type.") {
   1322             goto useDefault;
   1323           }
   1324           // Adjust the pointer to point at the reference segment.
   1325           ptr += dataSize;
   1326           break;
   1327 
   1328         case ElementSize::INLINE_COMPOSITE:
   1329           KJ_UNREACHABLE;
   1330       }
   1331 
   1332       // OK, looks valid.
   1333 
   1334       return ListBuilder(segment, capTable, ptr,
   1335                          tag->structRef.wordSize() * BITS_PER_WORD / ELEMENTS,
   1336                          tag->inlineCompositeListElementCount(),
   1337                          dataSize * BITS_PER_WORD, pointerCount, ElementSize::INLINE_COMPOSITE);
   1338     } else {
   1339       auto dataSize = dataBitsPerElement(oldSize) * ELEMENTS;
   1340       auto pointerCount = pointersPerElement(oldSize) * ELEMENTS;
   1341 
   1342       if (elementSize == ElementSize::BIT) {
   1343         KJ_REQUIRE(oldSize == ElementSize::BIT,
   1344             "Found non-bit list where bit list was expected.") {
   1345           goto useDefault;
   1346         }
   1347       } else {
   1348         KJ_REQUIRE(oldSize != ElementSize::BIT,
   1349             "Found bit list where non-bit list was expected.") {
   1350           goto useDefault;
   1351         }
   1352         KJ_REQUIRE(dataSize >= dataBitsPerElement(elementSize) * ELEMENTS,
   1353                    "Existing list value is incompatible with expected type.") {
   1354           goto useDefault;
   1355         }
   1356         KJ_REQUIRE(pointerCount >= pointersPerElement(elementSize) * ELEMENTS,
   1357                    "Existing list value is incompatible with expected type.") {
   1358           goto useDefault;
   1359         }
   1360       }
   1361 
   1362       auto step = (dataSize + pointerCount * BITS_PER_POINTER) / ELEMENTS;
   1363       return ListBuilder(segment, capTable, ptr, step, ref->listRef.elementCount(),
   1364                          dataSize, pointerCount, oldSize);
   1365     }
   1366   }
   1367 
   1368   static KJ_ALWAYS_INLINE(ListBuilder getWritableListPointerAnySize(
   1369       WirePointer* origRef, SegmentBuilder* origSegment, CapTableBuilder* capTable,
   1370       const word* defaultValue)) {
   1371     return getWritableListPointerAnySize(origRef, origRef->target(), origSegment,
   1372                                          capTable, defaultValue);
   1373   }
   1374 
   1375   static KJ_ALWAYS_INLINE(ListBuilder getWritableListPointerAnySize(
   1376       WirePointer* origRef, word* origRefTarget,
   1377       SegmentBuilder* origSegment, CapTableBuilder* capTable,
   1378       const word* defaultValue, BuilderArena* orphanArena = nullptr)) {
   1379     if (origRef->isNull()) {
   1380     useDefault:
   1381       if (defaultValue == nullptr ||
   1382           reinterpret_cast<const WirePointer*>(defaultValue)->isNull()) {
   1383         return ListBuilder(ElementSize::VOID);
   1384       }
   1385       origRefTarget = copyMessage(
   1386           origSegment, capTable, origRef, reinterpret_cast<const WirePointer*>(defaultValue));
   1387       defaultValue = nullptr;  // If the default value is itself invalid, don't use it again.
   1388     }
   1389 
   1390     WirePointer* ref = origRef;
   1391     SegmentBuilder* segment = origSegment;
   1392     word* ptr = followFars(ref, origRefTarget, segment);
   1393 
   1394     KJ_REQUIRE(ref->kind() == WirePointer::LIST,
   1395         "Called getWritableListPointerAnySize() but existing pointer is not a list.") {
   1396       goto useDefault;
   1397     }
   1398 
   1399     ElementSize elementSize = ref->listRef.elementSize();
   1400 
   1401     if (elementSize == ElementSize::INLINE_COMPOSITE) {
   1402       // Read the tag to get the actual element count.
   1403       WirePointer* tag = reinterpret_cast<WirePointer*>(ptr);
   1404       KJ_REQUIRE(tag->kind() == WirePointer::STRUCT,
   1405           "INLINE_COMPOSITE list with non-STRUCT elements not supported.");
   1406       ptr += POINTER_SIZE_IN_WORDS;
   1407 
   1408       return ListBuilder(segment, capTable, ptr,
   1409                          tag->structRef.wordSize() * BITS_PER_WORD / ELEMENTS,
   1410                          tag->inlineCompositeListElementCount(),
   1411                          tag->structRef.dataSize.get() * BITS_PER_WORD,
   1412                          tag->structRef.ptrCount.get(), ElementSize::INLINE_COMPOSITE);
   1413     } else {
   1414       auto dataSize = dataBitsPerElement(elementSize) * ELEMENTS;
   1415       auto pointerCount = pointersPerElement(elementSize) * ELEMENTS;
   1416 
   1417       auto step = (dataSize + pointerCount * BITS_PER_POINTER) / ELEMENTS;
   1418       return ListBuilder(segment, capTable, ptr, step, ref->listRef.elementCount(),
   1419                          dataSize, pointerCount, elementSize);
   1420     }
   1421   }
   1422 
   1423   static KJ_ALWAYS_INLINE(ListBuilder getWritableStructListPointer(
   1424       WirePointer* origRef, SegmentBuilder* origSegment, CapTableBuilder* capTable,
   1425       StructSize elementSize, const word* defaultValue)) {
   1426     return getWritableStructListPointer(origRef, origRef->target(), origSegment, capTable,
   1427                                         elementSize, defaultValue);
   1428   }
   1429   static KJ_ALWAYS_INLINE(ListBuilder getWritableStructListPointer(
   1430       WirePointer* origRef, word* origRefTarget,
   1431       SegmentBuilder* origSegment, CapTableBuilder* capTable,
   1432       StructSize elementSize, const word* defaultValue, BuilderArena* orphanArena = nullptr)) {
   1433     if (origRef->isNull()) {
   1434     useDefault:
   1435       if (defaultValue == nullptr ||
   1436           reinterpret_cast<const WirePointer*>(defaultValue)->isNull()) {
   1437         return ListBuilder(ElementSize::INLINE_COMPOSITE);
   1438       }
   1439       origRefTarget = copyMessage(
   1440           origSegment, capTable, origRef, reinterpret_cast<const WirePointer*>(defaultValue));
   1441       defaultValue = nullptr;  // If the default value is itself invalid, don't use it again.
   1442     }
   1443 
   1444     // We must verify that the pointer has the right size and potentially upgrade it if not.
   1445 
   1446     WirePointer* oldRef = origRef;
   1447     SegmentBuilder* oldSegment = origSegment;
   1448     word* oldPtr = followFars(oldRef, origRefTarget, oldSegment);
   1449 
   1450     KJ_REQUIRE(oldRef->kind() == WirePointer::LIST,
   1451                "Called getList{Field,Element}() but existing pointer is not a list.") {
   1452       goto useDefault;
   1453     }
   1454 
   1455     ElementSize oldSize = oldRef->listRef.elementSize();
   1456 
   1457     if (oldSize == ElementSize::INLINE_COMPOSITE) {
   1458       // Existing list is INLINE_COMPOSITE, but we need to verify that the sizes match.
   1459 
   1460       WirePointer* oldTag = reinterpret_cast<WirePointer*>(oldPtr);
   1461       oldPtr += POINTER_SIZE_IN_WORDS;
   1462       KJ_REQUIRE(oldTag->kind() == WirePointer::STRUCT,
   1463                  "INLINE_COMPOSITE list with non-STRUCT elements not supported.") {
   1464         goto useDefault;
   1465       }
   1466 
   1467       auto oldDataSize = oldTag->structRef.dataSize.get();
   1468       auto oldPointerCount = oldTag->structRef.ptrCount.get();
   1469       auto oldStep = (oldDataSize + oldPointerCount * WORDS_PER_POINTER) / ELEMENTS;
   1470 
   1471       auto elementCount = oldTag->inlineCompositeListElementCount();
   1472 
   1473       if (oldDataSize >= elementSize.data && oldPointerCount >= elementSize.pointers) {
   1474         // Old size is at least as large as we need.  Ship it.
   1475         return ListBuilder(oldSegment, capTable, oldPtr, oldStep * BITS_PER_WORD, elementCount,
   1476                            oldDataSize * BITS_PER_WORD, oldPointerCount,
   1477                            ElementSize::INLINE_COMPOSITE);
   1478       }
   1479 
   1480       // The structs in this list are smaller than expected, probably written using an older
   1481       // version of the protocol.  We need to make a copy and expand them.
   1482 
   1483       auto newDataSize = kj::max(oldDataSize, elementSize.data);
   1484       auto newPointerCount = kj::max(oldPointerCount, elementSize.pointers);
   1485       auto newStep = (newDataSize + newPointerCount * WORDS_PER_POINTER) / ELEMENTS;
   1486 
   1487       auto totalSize = assertMax<kj::maxValueForBits<SEGMENT_WORD_COUNT_BITS>() - 1>(
   1488             newStep * upgradeBound<uint64_t>(elementCount),
   1489             []() { KJ_FAIL_REQUIRE("total size of struct list is larger than max segment size"); });
   1490 
   1491       // Don't let allocate() zero out the object just yet.
   1492       zeroPointerAndFars(origSegment, origRef);
   1493 
   1494       word* newPtr = allocate(origRef, origSegment, capTable, totalSize + POINTER_SIZE_IN_WORDS,
   1495                               WirePointer::LIST, orphanArena);
   1496       origRef->listRef.setInlineComposite(totalSize);
   1497 
   1498       WirePointer* newTag = reinterpret_cast<WirePointer*>(newPtr);
   1499       newTag->setKindAndInlineCompositeListElementCount(WirePointer::STRUCT, elementCount);
   1500       newTag->structRef.set(newDataSize, newPointerCount);
   1501       newPtr += POINTER_SIZE_IN_WORDS;
   1502 
   1503       word* src = oldPtr;
   1504       word* dst = newPtr;
   1505       for (auto i KJ_UNUSED: kj::zeroTo(elementCount)) {
   1506         // Copy data section.
   1507         copyMemory(dst, src, oldDataSize);
   1508 
   1509         // Copy pointer section.
   1510         WirePointer* newPointerSection = reinterpret_cast<WirePointer*>(dst + newDataSize);
   1511         WirePointer* oldPointerSection = reinterpret_cast<WirePointer*>(src + oldDataSize);
   1512         for (auto j: kj::zeroTo(oldPointerCount)) {
   1513           transferPointer(origSegment, newPointerSection + j, oldSegment, oldPointerSection + j);
   1514         }
   1515 
   1516         dst += newStep * (ONE * ELEMENTS);
   1517         src += oldStep * (ONE * ELEMENTS);
   1518       }
   1519 
   1520       auto oldSize = assertMax<kj::maxValueForBits<SEGMENT_WORD_COUNT_BITS>() - 1>(
   1521             oldStep * upgradeBound<uint64_t>(elementCount),
   1522             []() { KJ_FAIL_ASSERT("old size overflows but new size doesn't?"); });
   1523 
   1524       // Zero out old location.  See explanation in getWritableStructPointer().
   1525       // Make sure to include the tag word.
   1526       zeroMemory(oldPtr - POINTER_SIZE_IN_WORDS, oldSize + POINTER_SIZE_IN_WORDS);
   1527 
   1528       return ListBuilder(origSegment, capTable, newPtr, newStep * BITS_PER_WORD, elementCount,
   1529                          newDataSize * BITS_PER_WORD, newPointerCount,
   1530                          ElementSize::INLINE_COMPOSITE);
   1531     } else {
   1532       // We're upgrading from a non-struct list.
   1533 
   1534       auto oldDataSize = dataBitsPerElement(oldSize) * ELEMENTS;
   1535       auto oldPointerCount = pointersPerElement(oldSize) * ELEMENTS;
   1536       auto oldStep = (oldDataSize + oldPointerCount * BITS_PER_POINTER) / ELEMENTS;
   1537       auto elementCount = oldRef->listRef.elementCount();
   1538 
   1539       if (oldSize == ElementSize::VOID) {
   1540         // Nothing to copy, just allocate a new list.
   1541         return initStructListPointer(origRef, origSegment, capTable, elementCount, elementSize);
   1542       } else {
   1543         // Upgrading to an inline composite list.
   1544 
   1545         KJ_REQUIRE(oldSize != ElementSize::BIT,
   1546             "Found bit list where struct list was expected; upgrading boolean lists to structs "
   1547             "is no longer supported.") {
   1548           goto useDefault;
   1549         }
   1550 
   1551         auto newDataSize = elementSize.data;
   1552         auto newPointerCount = elementSize.pointers;
   1553 
   1554         if (oldSize == ElementSize::POINTER) {
   1555           newPointerCount = kj::max(newPointerCount, ONE * POINTERS);
   1556         } else {
   1557           // Old list contains data elements, so we need at least 1 word of data.
   1558           newDataSize = kj::max(newDataSize, ONE * WORDS);
   1559         }
   1560 
   1561         auto newStep = (newDataSize + newPointerCount * WORDS_PER_POINTER) / ELEMENTS;
   1562         auto totalWords = assertMax<kj::maxValueForBits<SEGMENT_WORD_COUNT_BITS>() - 1>(
   1563               newStep * upgradeBound<uint64_t>(elementCount),
   1564               []() {KJ_FAIL_REQUIRE("total size of struct list is larger than max segment size");});
   1565 
   1566         // Don't let allocate() zero out the object just yet.
   1567         zeroPointerAndFars(origSegment, origRef);
   1568 
   1569         word* newPtr = allocate(origRef, origSegment, capTable, totalWords + POINTER_SIZE_IN_WORDS,
   1570                                 WirePointer::LIST, orphanArena);
   1571         origRef->listRef.setInlineComposite(totalWords);
   1572 
   1573         WirePointer* tag = reinterpret_cast<WirePointer*>(newPtr);
   1574         tag->setKindAndInlineCompositeListElementCount(WirePointer::STRUCT, elementCount);
   1575         tag->structRef.set(newDataSize, newPointerCount);
   1576         newPtr += POINTER_SIZE_IN_WORDS;
   1577 
   1578         if (oldSize == ElementSize::POINTER) {
   1579           WirePointer* dst = reinterpret_cast<WirePointer*>(newPtr + newDataSize);
   1580           WirePointer* src = reinterpret_cast<WirePointer*>(oldPtr);
   1581           for (auto i KJ_UNUSED: kj::zeroTo(elementCount)) {
   1582             transferPointer(origSegment, dst, oldSegment, src);
   1583             dst += newStep / WORDS_PER_POINTER * (ONE * ELEMENTS);
   1584             ++src;
   1585           }
   1586         } else {
   1587           byte* dst = reinterpret_cast<byte*>(newPtr);
   1588           byte* src = reinterpret_cast<byte*>(oldPtr);
   1589           auto newByteStep = newStep * (ONE * ELEMENTS) * BYTES_PER_WORD;
   1590           auto oldByteStep = oldDataSize / BITS_PER_BYTE;
   1591           for (auto i KJ_UNUSED: kj::zeroTo(elementCount)) {
   1592             copyMemory(dst, src, oldByteStep);
   1593             src += oldByteStep;
   1594             dst += newByteStep;
   1595           }
   1596         }
   1597 
   1598         auto oldSize = assertMax<kj::maxValueForBits<SEGMENT_WORD_COUNT_BITS>() - 1>(
   1599               roundBitsUpToWords(oldStep * upgradeBound<uint64_t>(elementCount)),
   1600               []() { KJ_FAIL_ASSERT("old size overflows but new size doesn't?"); });
   1601 
   1602         // Zero out old location.  See explanation in getWritableStructPointer().
   1603         zeroMemory(oldPtr, oldSize);
   1604 
   1605         return ListBuilder(origSegment, capTable, newPtr, newStep * BITS_PER_WORD, elementCount,
   1606                            newDataSize * BITS_PER_WORD, newPointerCount,
   1607                            ElementSize::INLINE_COMPOSITE);
   1608       }
   1609     }
   1610   }
   1611 
   1612   static KJ_ALWAYS_INLINE(SegmentAnd<Text::Builder> initTextPointer(
   1613       WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable, TextSize size,
   1614       BuilderArena* orphanArena = nullptr)) {
   1615     // The byte list must include a NUL terminator.
   1616     auto byteSize = size + ONE * BYTES;
   1617 
   1618     // Allocate the space.
   1619     word* ptr = allocate(
   1620         ref, segment, capTable, roundBytesUpToWords(byteSize), WirePointer::LIST, orphanArena);
   1621 
   1622     // Initialize the pointer.
   1623     ref->listRef.set(ElementSize::BYTE, byteSize * (ONE * ELEMENTS / BYTES));
   1624 
   1625     // Build the Text::Builder. Note that since allocate()ed memory is pre-zero'd, we don't need
   1626     // to initialize the NUL terminator.
   1627     return { segment, Text::Builder(reinterpret_cast<char*>(ptr), unbound(size / BYTES)) };
   1628   }
   1629 
   1630   static KJ_ALWAYS_INLINE(SegmentAnd<Text::Builder> setTextPointer(
   1631       WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable, Text::Reader value,
   1632       BuilderArena* orphanArena = nullptr)) {
   1633     TextSize size = assertMax<MAX_TEXT_SIZE>(bounded(value.size()),
   1634         []() { KJ_FAIL_REQUIRE("text blob too big"); }) * BYTES;
   1635 
   1636     auto allocation = initTextPointer(ref, segment, capTable, size, orphanArena);
   1637     copyMemory(allocation.value.begin(), value);
   1638     return allocation;
   1639   }
   1640 
   1641   static KJ_ALWAYS_INLINE(Text::Builder getWritableTextPointer(
   1642       WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable,
   1643       const void* defaultValue, TextSize defaultSize)) {
   1644     return getWritableTextPointer(ref, ref->target(), segment,capTable,  defaultValue, defaultSize);
   1645   }
   1646 
   1647   static KJ_ALWAYS_INLINE(Text::Builder getWritableTextPointer(
   1648       WirePointer* ref, word* refTarget, SegmentBuilder* segment, CapTableBuilder* capTable,
   1649       const void* defaultValue, TextSize defaultSize)) {
   1650     if (ref->isNull()) {
   1651     useDefault:
   1652       if (defaultSize == ZERO * BYTES) {
   1653         return nullptr;
   1654       } else {
   1655         Text::Builder builder = initTextPointer(ref, segment, capTable, defaultSize).value;
   1656         copyMemory(builder.asBytes().begin(), reinterpret_cast<const byte*>(defaultValue),
   1657                    defaultSize);
   1658         return builder;
   1659       }
   1660     } else {
   1661       word* ptr = followFars(ref, refTarget, segment);
   1662       byte* bptr = reinterpret_cast<byte*>(ptr);
   1663 
   1664       KJ_REQUIRE(ref->kind() == WirePointer::LIST,
   1665           "Called getText{Field,Element}() but existing pointer is not a list.") {
   1666         goto useDefault;
   1667       }
   1668       KJ_REQUIRE(ref->listRef.elementSize() == ElementSize::BYTE,
   1669           "Called getText{Field,Element}() but existing list pointer is not byte-sized.") {
   1670         goto useDefault;
   1671       }
   1672 
   1673       auto maybeSize = trySubtract(ref->listRef.elementCount() * (ONE * BYTES / ELEMENTS),
   1674                                    ONE * BYTES);
   1675       KJ_IF_MAYBE(size, maybeSize) {
   1676         KJ_REQUIRE(*(bptr + *size) == '\0', "Text blob missing NUL terminator.") {
   1677           goto useDefault;
   1678         }
   1679 
   1680         return Text::Builder(reinterpret_cast<char*>(bptr), unbound(*size / BYTES));
   1681       } else {
   1682         KJ_FAIL_REQUIRE("zero-size blob can't be text (need NUL terminator)") {
   1683           goto useDefault;
   1684         };
   1685       }
   1686     }
   1687   }
   1688 
   1689   static KJ_ALWAYS_INLINE(SegmentAnd<Data::Builder> initDataPointer(
   1690       WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable, BlobSize size,
   1691       BuilderArena* orphanArena = nullptr)) {
   1692     // Allocate the space.
   1693     word* ptr = allocate(ref, segment, capTable, roundBytesUpToWords(size),
   1694                          WirePointer::LIST, orphanArena);
   1695 
   1696     // Initialize the pointer.
   1697     ref->listRef.set(ElementSize::BYTE, size * (ONE * ELEMENTS / BYTES));
   1698 
   1699     // Build the Data::Builder.
   1700     return { segment, Data::Builder(reinterpret_cast<byte*>(ptr), unbound(size / BYTES)) };
   1701   }
   1702 
   1703   static KJ_ALWAYS_INLINE(SegmentAnd<Data::Builder> setDataPointer(
   1704       WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable, Data::Reader value,
   1705       BuilderArena* orphanArena = nullptr)) {
   1706     BlobSize size = assertMaxBits<BLOB_SIZE_BITS>(bounded(value.size()),
   1707         []() { KJ_FAIL_REQUIRE("text blob too big"); }) * BYTES;
   1708 
   1709     auto allocation = initDataPointer(ref, segment, capTable, size, orphanArena);
   1710     copyMemory(allocation.value.begin(), value);
   1711     return allocation;
   1712   }
   1713 
   1714   static KJ_ALWAYS_INLINE(Data::Builder getWritableDataPointer(
   1715       WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable,
   1716       const void* defaultValue, BlobSize defaultSize)) {
   1717     return getWritableDataPointer(ref, ref->target(), segment, capTable, defaultValue, defaultSize);
   1718   }
   1719 
   1720   static KJ_ALWAYS_INLINE(Data::Builder getWritableDataPointer(
   1721       WirePointer* ref, word* refTarget, SegmentBuilder* segment, CapTableBuilder* capTable,
   1722       const void* defaultValue, BlobSize defaultSize)) {
   1723     if (ref->isNull()) {
   1724     useDefault:
   1725       if (defaultSize == ZERO * BYTES) {
   1726         return nullptr;
   1727       } else {
   1728         Data::Builder builder = initDataPointer(ref, segment, capTable, defaultSize).value;
   1729         copyMemory(builder.begin(), reinterpret_cast<const byte*>(defaultValue), defaultSize);
   1730         return builder;
   1731       }
   1732     } else {
   1733       word* ptr = followFars(ref, refTarget, segment);
   1734 
   1735       KJ_REQUIRE(ref->kind() == WirePointer::LIST,
   1736           "Called getData{Field,Element}() but existing pointer is not a list.") {
   1737         goto useDefault;
   1738       }
   1739       KJ_REQUIRE(ref->listRef.elementSize() == ElementSize::BYTE,
   1740           "Called getData{Field,Element}() but existing list pointer is not byte-sized.") {
   1741         goto useDefault;
   1742       }
   1743 
   1744       return Data::Builder(reinterpret_cast<byte*>(ptr),
   1745           unbound(ref->listRef.elementCount() / ELEMENTS));
   1746     }
   1747   }
   1748 
   1749   static SegmentAnd<word*> setStructPointer(
   1750       SegmentBuilder* segment, CapTableBuilder* capTable, WirePointer* ref, StructReader value,
   1751       BuilderArena* orphanArena = nullptr, bool canonical = false) {
   1752     auto dataSize = roundBitsUpToBytes(value.dataSize);
   1753     auto ptrCount = value.pointerCount;
   1754 
   1755     if (canonical) {
   1756       // StructReaders should not have bitwidths other than 1, but let's be safe
   1757       KJ_REQUIRE((value.dataSize == ONE * BITS)
   1758                  || (value.dataSize % BITS_PER_BYTE == ZERO * BITS));
   1759 
   1760       if (value.dataSize == ONE * BITS) {
   1761         // Handle the truncation case where it's a false in a 1-bit struct
   1762         if (!value.getDataField<bool>(ZERO * ELEMENTS)) {
   1763           dataSize = ZERO * BYTES;
   1764         }
   1765       } else {
   1766         // Truncate the data section
   1767         auto data = value.getDataSectionAsBlob();
   1768         auto end = data.end();
   1769         while (end > data.begin() && end[-1] == 0) --end;
   1770         dataSize = intervalLength(data.begin(), end, MAX_STUCT_DATA_WORDS * BYTES_PER_WORD);
   1771       }
   1772 
   1773       // Truncate pointer section
   1774       const WirePointer* ptr = value.pointers + ptrCount;
   1775       while (ptr > value.pointers && ptr[-1].isNull()) --ptr;
   1776       ptrCount = intervalLength(value.pointers, ptr, MAX_STRUCT_POINTER_COUNT);
   1777     }
   1778 
   1779     auto dataWords = roundBytesUpToWords(dataSize);
   1780 
   1781     auto totalSize = dataWords + ptrCount * WORDS_PER_POINTER;
   1782 
   1783     word* ptr = allocate(ref, segment, capTable, totalSize, WirePointer::STRUCT, orphanArena);
   1784     ref->structRef.set(dataWords, ptrCount);
   1785 
   1786     if (value.dataSize == ONE * BITS) {
   1787       // Data size could be made 0 by truncation
   1788       if (dataSize != ZERO * BYTES) {
   1789         *reinterpret_cast<char*>(ptr) = value.getDataField<bool>(ZERO * ELEMENTS);
   1790       }
   1791     } else {
   1792       copyMemory(reinterpret_cast<byte*>(ptr),
   1793                  reinterpret_cast<const byte*>(value.data),
   1794                  dataSize);
   1795     }
   1796 
   1797     WirePointer* pointerSection = reinterpret_cast<WirePointer*>(ptr + dataWords);
   1798     for (auto i: kj::zeroTo(ptrCount)) {
   1799       copyPointer(segment, capTable, pointerSection + i,
   1800                   value.segment, value.capTable, value.pointers + i,
   1801                   value.nestingLimit, nullptr, canonical);
   1802     }
   1803 
   1804     return { segment, ptr };
   1805   }
   1806 
   1807 #if !CAPNP_LITE
   1808   static void setCapabilityPointer(
   1809       SegmentBuilder* segment, CapTableBuilder* capTable, WirePointer* ref,
   1810       kj::Own<ClientHook>&& cap) {
   1811     if (!ref->isNull()) {
   1812       zeroObject(segment, capTable, ref);
   1813     }
   1814     if (cap->isNull()) {
   1815       zeroMemory(ref);
   1816     } else {
   1817       ref->setCap(capTable->injectCap(kj::mv(cap)));
   1818     }
   1819   }
   1820 #endif  // !CAPNP_LITE
   1821 
   1822   static SegmentAnd<word*> setListPointer(
   1823       SegmentBuilder* segment, CapTableBuilder* capTable, WirePointer* ref, ListReader value,
   1824       BuilderArena* orphanArena = nullptr, bool canonical = false) {
   1825     auto totalSize = assertMax<kj::maxValueForBits<SEGMENT_WORD_COUNT_BITS>() - 1>(
   1826         roundBitsUpToWords(upgradeBound<uint64_t>(value.elementCount) * value.step),
   1827         []() { KJ_FAIL_ASSERT("encountered impossibly long struct list ListReader"); });
   1828 
   1829     if (value.elementSize != ElementSize::INLINE_COMPOSITE) {
   1830       // List of non-structs.
   1831       word* ptr = allocate(ref, segment, capTable, totalSize, WirePointer::LIST, orphanArena);
   1832 
   1833       if (value.elementSize == ElementSize::POINTER) {
   1834         // List of pointers.
   1835         ref->listRef.set(ElementSize::POINTER, value.elementCount);
   1836         for (auto i: kj::zeroTo(value.elementCount * (ONE * POINTERS / ELEMENTS))) {
   1837           copyPointer(segment, capTable, reinterpret_cast<WirePointer*>(ptr) + i,
   1838                       value.segment, value.capTable,
   1839                       reinterpret_cast<const WirePointer*>(value.ptr) + i,
   1840                       value.nestingLimit, nullptr, canonical);
   1841         }
   1842       } else {
   1843         // List of data.
   1844         ref->listRef.set(value.elementSize, value.elementCount);
   1845 
   1846         auto wholeByteSize =
   1847           assertMax(MAX_SEGMENT_WORDS * BYTES_PER_WORD,
   1848             upgradeBound<uint64_t>(value.elementCount) * value.step / BITS_PER_BYTE,
   1849             []() { KJ_FAIL_ASSERT("encountered impossibly long data ListReader"); });
   1850         copyMemory(reinterpret_cast<byte*>(ptr), value.ptr, wholeByteSize);
   1851         auto leftoverBits =
   1852           (upgradeBound<uint64_t>(value.elementCount) * value.step) % BITS_PER_BYTE;
   1853         if (leftoverBits > ZERO * BITS) {
   1854           // We need to copy a partial byte.
   1855           uint8_t mask = (1 << unbound(leftoverBits / BITS)) - 1;
   1856           *((reinterpret_cast<byte*>(ptr)) + wholeByteSize) = mask & *(value.ptr + wholeByteSize);
   1857         }
   1858       }
   1859 
   1860       return { segment, ptr };
   1861     } else {
   1862       // List of structs.
   1863       StructDataWordCount declDataSize = value.structDataSize / BITS_PER_WORD;
   1864       StructPointerCount declPointerCount = value.structPointerCount;
   1865 
   1866       StructDataWordCount dataSize = ZERO * WORDS;
   1867       StructPointerCount ptrCount = ZERO * POINTERS;
   1868 
   1869       if (canonical) {
   1870         for (auto i: kj::zeroTo(value.elementCount)) {
   1871           auto element = value.getStructElement(i);
   1872 
   1873           // Truncate the data section
   1874           auto data = element.getDataSectionAsBlob();
   1875           auto end = data.end();
   1876           while (end > data.begin() && end[-1] == 0) --end;
   1877           dataSize = kj::max(dataSize, roundBytesUpToWords(
   1878               intervalLength(data.begin(), end, MAX_STUCT_DATA_WORDS * BYTES_PER_WORD)));
   1879 
   1880           // Truncate pointer section
   1881           const WirePointer* ptr = element.pointers + element.pointerCount;
   1882           while (ptr > element.pointers && ptr[-1].isNull()) --ptr;
   1883           ptrCount = kj::max(ptrCount,
   1884               intervalLength(element.pointers, ptr, MAX_STRUCT_POINTER_COUNT));
   1885         }
   1886         auto newTotalSize = (dataSize + upgradeBound<uint64_t>(ptrCount) * WORDS_PER_POINTER)
   1887             / ELEMENTS * value.elementCount;
   1888         KJ_ASSERT(newTotalSize <= totalSize);  // we've only removed data!
   1889         totalSize = assumeMax<kj::maxValueForBits<SEGMENT_WORD_COUNT_BITS>() - 1>(newTotalSize);
   1890       } else {
   1891         dataSize = declDataSize;
   1892         ptrCount = declPointerCount;
   1893       }
   1894 
   1895       KJ_DASSERT(value.structDataSize % BITS_PER_WORD == ZERO * BITS);
   1896       word* ptr = allocate(ref, segment, capTable, totalSize + POINTER_SIZE_IN_WORDS,
   1897                            WirePointer::LIST, orphanArena);
   1898       ref->listRef.setInlineComposite(totalSize);
   1899 
   1900       WirePointer* tag = reinterpret_cast<WirePointer*>(ptr);
   1901       tag->setKindAndInlineCompositeListElementCount(WirePointer::STRUCT, value.elementCount);
   1902       tag->structRef.set(dataSize, ptrCount);
   1903       word* dst = ptr + POINTER_SIZE_IN_WORDS;
   1904 
   1905       const word* src = reinterpret_cast<const word*>(value.ptr);
   1906       for (auto i KJ_UNUSED: kj::zeroTo(value.elementCount)) {
   1907         copyMemory(dst, src, dataSize);
   1908         dst += dataSize;
   1909         src += declDataSize;
   1910 
   1911         for (auto j: kj::zeroTo(ptrCount)) {
   1912           copyPointer(segment, capTable, reinterpret_cast<WirePointer*>(dst) + j,
   1913               value.segment, value.capTable, reinterpret_cast<const WirePointer*>(src) + j,
   1914               value.nestingLimit, nullptr, canonical);
   1915         }
   1916         dst += ptrCount * WORDS_PER_POINTER;
   1917         src += declPointerCount * WORDS_PER_POINTER;
   1918       }
   1919 
   1920       return { segment, ptr };
   1921     }
   1922   }
   1923 
   1924   static KJ_ALWAYS_INLINE(SegmentAnd<word*> copyPointer(
   1925       SegmentBuilder* dstSegment, CapTableBuilder* dstCapTable, WirePointer* dst,
   1926       SegmentReader* srcSegment, CapTableReader* srcCapTable, const WirePointer* src,
   1927       int nestingLimit, BuilderArena* orphanArena = nullptr,
   1928       bool canonical = false)) {
   1929     return copyPointer(dstSegment, dstCapTable, dst,
   1930                        srcSegment, srcCapTable, src, src->target(srcSegment),
   1931                        nestingLimit, orphanArena, canonical);
   1932   }
   1933 
   1934   static SegmentAnd<word*> copyPointer(
   1935       SegmentBuilder* dstSegment, CapTableBuilder* dstCapTable, WirePointer* dst,
   1936       SegmentReader* srcSegment, CapTableReader* srcCapTable, const WirePointer* src,
   1937       const word* srcTarget, int nestingLimit,
   1938       BuilderArena* orphanArena = nullptr, bool canonical = false) {
   1939     // Deep-copy the object pointed to by src into dst.  It turns out we can't reuse
   1940     // readStructPointer(), etc. because they do type checking whereas here we want to accept any
   1941     // valid pointer.
   1942 
   1943     if (src->isNull()) {
   1944     useDefault:
   1945       if (!dst->isNull()) {
   1946         zeroObject(dstSegment, dstCapTable, dst);
   1947         zeroMemory(dst);
   1948       }
   1949       return { dstSegment, nullptr };
   1950     }
   1951 
   1952     const word* ptr;
   1953     KJ_IF_MAYBE(p, WireHelpers::followFars(src, srcTarget, srcSegment)) {
   1954       ptr = p;
   1955     } else {
   1956       goto useDefault;
   1957     }
   1958 
   1959     switch (src->kind()) {
   1960       case WirePointer::STRUCT:
   1961         KJ_REQUIRE(nestingLimit > 0,
   1962               "Message is too deeply-nested or contains cycles.  See capnp::ReaderOptions.") {
   1963           goto useDefault;
   1964         }
   1965 
   1966         KJ_REQUIRE(boundsCheck(srcSegment, ptr, src->structRef.wordSize()),
   1967                    "Message contained out-of-bounds struct pointer.") {
   1968           goto useDefault;
   1969         }
   1970         return setStructPointer(dstSegment, dstCapTable, dst,
   1971             StructReader(srcSegment, srcCapTable, ptr,
   1972                          reinterpret_cast<const WirePointer*>(ptr + src->structRef.dataSize.get()),
   1973                          src->structRef.dataSize.get() * BITS_PER_WORD,
   1974                          src->structRef.ptrCount.get(),
   1975                          nestingLimit - 1),
   1976             orphanArena, canonical);
   1977 
   1978       case WirePointer::LIST: {
   1979         ElementSize elementSize = src->listRef.elementSize();
   1980 
   1981         KJ_REQUIRE(nestingLimit > 0,
   1982               "Message is too deeply-nested or contains cycles.  See capnp::ReaderOptions.") {
   1983           goto useDefault;
   1984         }
   1985 
   1986         if (elementSize == ElementSize::INLINE_COMPOSITE) {
   1987           auto wordCount = src->listRef.inlineCompositeWordCount();
   1988           const WirePointer* tag = reinterpret_cast<const WirePointer*>(ptr);
   1989 
   1990           KJ_REQUIRE(boundsCheck(srcSegment, ptr, wordCount + POINTER_SIZE_IN_WORDS),
   1991                      "Message contains out-of-bounds list pointer.") {
   1992             goto useDefault;
   1993           }
   1994 
   1995           ptr += POINTER_SIZE_IN_WORDS;
   1996 
   1997           KJ_REQUIRE(tag->kind() == WirePointer::STRUCT,
   1998                      "INLINE_COMPOSITE lists of non-STRUCT type are not supported.") {
   1999             goto useDefault;
   2000           }
   2001 
   2002           auto elementCount = tag->inlineCompositeListElementCount();
   2003           auto wordsPerElement = tag->structRef.wordSize() / ELEMENTS;
   2004 
   2005           KJ_REQUIRE(wordsPerElement * upgradeBound<uint64_t>(elementCount) <= wordCount,
   2006                      "INLINE_COMPOSITE list's elements overrun its word count.") {
   2007             goto useDefault;
   2008           }
   2009 
   2010           if (wordsPerElement * (ONE * ELEMENTS) == ZERO * WORDS) {
   2011             // Watch out for lists of zero-sized structs, which can claim to be arbitrarily large
   2012             // without having sent actual data.
   2013             KJ_REQUIRE(amplifiedRead(srcSegment, elementCount * (ONE * WORDS / ELEMENTS)),
   2014                        "Message contains amplified list pointer.") {
   2015               goto useDefault;
   2016             }
   2017           }
   2018 
   2019           return setListPointer(dstSegment, dstCapTable, dst,
   2020               ListReader(srcSegment, srcCapTable, ptr,
   2021                          elementCount, wordsPerElement * BITS_PER_WORD,
   2022                          tag->structRef.dataSize.get() * BITS_PER_WORD,
   2023                          tag->structRef.ptrCount.get(), ElementSize::INLINE_COMPOSITE,
   2024                          nestingLimit - 1),
   2025               orphanArena, canonical);
   2026         } else {
   2027           auto dataSize = dataBitsPerElement(elementSize) * ELEMENTS;
   2028           auto pointerCount = pointersPerElement(elementSize) * ELEMENTS;
   2029           auto step = (dataSize + pointerCount * BITS_PER_POINTER) / ELEMENTS;
   2030           auto elementCount = src->listRef.elementCount();
   2031           auto wordCount = roundBitsUpToWords(upgradeBound<uint64_t>(elementCount) * step);
   2032 
   2033           KJ_REQUIRE(boundsCheck(srcSegment, ptr, wordCount),
   2034                      "Message contains out-of-bounds list pointer.") {
   2035             goto useDefault;
   2036           }
   2037 
   2038           if (elementSize == ElementSize::VOID) {
   2039             // Watch out for lists of void, which can claim to be arbitrarily large without having
   2040             // sent actual data.
   2041             KJ_REQUIRE(amplifiedRead(srcSegment, elementCount * (ONE * WORDS / ELEMENTS)),
   2042                        "Message contains amplified list pointer.") {
   2043               goto useDefault;
   2044             }
   2045           }
   2046 
   2047           return setListPointer(dstSegment, dstCapTable, dst,
   2048               ListReader(srcSegment, srcCapTable, ptr, elementCount, step, dataSize, pointerCount,
   2049                          elementSize, nestingLimit - 1),
   2050               orphanArena, canonical);
   2051         }
   2052       }
   2053 
   2054       case WirePointer::FAR:
   2055         KJ_FAIL_REQUIRE("Unexpected FAR pointer.") {
   2056           goto useDefault;
   2057         }
   2058 
   2059       case WirePointer::OTHER: {
   2060         KJ_REQUIRE(src->isCapability(), "Unknown pointer type.") {
   2061           goto useDefault;
   2062         }
   2063 
   2064         if (canonical) {
   2065           KJ_FAIL_REQUIRE("Cannot create a canonical message with a capability") {
   2066             break;
   2067           }
   2068         }
   2069 #if !CAPNP_LITE
   2070         KJ_IF_MAYBE(cap, srcCapTable->extractCap(src->capRef.index.get())) {
   2071           setCapabilityPointer(dstSegment, dstCapTable, dst, kj::mv(*cap));
   2072           // Return dummy non-null pointer so OrphanBuilder doesn't end up null.
   2073           return { dstSegment, reinterpret_cast<word*>(1) };
   2074         } else {
   2075 #endif  // !CAPNP_LITE
   2076           KJ_FAIL_REQUIRE("Message contained invalid capability pointer.") {
   2077             goto useDefault;
   2078           }
   2079 #if !CAPNP_LITE
   2080         }
   2081 #endif  // !CAPNP_LITE
   2082       }
   2083     }
   2084 
   2085     KJ_UNREACHABLE;
   2086   }
   2087 
   2088   static void adopt(SegmentBuilder* segment, CapTableBuilder* capTable,
   2089                     WirePointer* ref, OrphanBuilder&& value) {
   2090     KJ_REQUIRE(value.segment == nullptr || value.segment->getArena() == segment->getArena(),
   2091                "Adopted object must live in the same message.");
   2092 
   2093     if (!ref->isNull()) {
   2094       zeroObject(segment, capTable, ref);
   2095     }
   2096 
   2097     if (value == nullptr) {
   2098       // Set null.
   2099       zeroMemory(ref);
   2100     } else if (value.tagAsPtr()->isPositional()) {
   2101       WireHelpers::transferPointer(segment, ref, value.segment, value.tagAsPtr(), value.location);
   2102     } else {
   2103       // FAR and OTHER pointers are position-independent, so we can just copy.
   2104       copyMemory(ref, value.tagAsPtr());
   2105     }
   2106 
   2107     // Take ownership away from the OrphanBuilder.
   2108     zeroMemory(value.tagAsPtr());
   2109     value.location = nullptr;
   2110     value.segment = nullptr;
   2111   }
   2112 
   2113   static OrphanBuilder disown(SegmentBuilder* segment, CapTableBuilder* capTable,
   2114                               WirePointer* ref) {
   2115     word* location;
   2116 
   2117     if (ref->isNull()) {
   2118       location = nullptr;
   2119     } else if (ref->kind() == WirePointer::OTHER) {
   2120       KJ_REQUIRE(ref->isCapability(), "Unknown pointer type.") { break; }
   2121       location = reinterpret_cast<word*>(1);  // dummy so that it is non-null
   2122     } else {
   2123       WirePointer* refCopy = ref;
   2124       location = followFarsNoWritableCheck(refCopy, ref->target(), segment);
   2125     }
   2126 
   2127     OrphanBuilder result(ref, segment, capTable, location);
   2128 
   2129     if (!ref->isNull() && ref->isPositional()) {
   2130       result.tagAsPtr()->setKindForOrphan(ref->kind());
   2131     }
   2132 
   2133     // Zero out the pointer that was disowned.
   2134     zeroMemory(ref);
   2135 
   2136     return result;
   2137   }
   2138 
   2139   // -----------------------------------------------------------------
   2140 
   2141   static KJ_ALWAYS_INLINE(StructReader readStructPointer(
   2142       SegmentReader* segment, CapTableReader* capTable,
   2143       const WirePointer* ref, const word* defaultValue,
   2144       int nestingLimit)) {
   2145     return readStructPointer(segment, capTable, ref, ref->target(segment),
   2146                              defaultValue, nestingLimit);
   2147   }
   2148 
   2149   static KJ_ALWAYS_INLINE(StructReader readStructPointer(
   2150       SegmentReader* segment, CapTableReader* capTable,
   2151       const WirePointer* ref, const word* refTarget,
   2152       const word* defaultValue, int nestingLimit)) {
   2153     if (ref->isNull()) {
   2154     useDefault:
   2155       if (defaultValue == nullptr ||
   2156           reinterpret_cast<const WirePointer*>(defaultValue)->isNull()) {
   2157         return StructReader();
   2158       }
   2159       segment = nullptr;
   2160       ref = reinterpret_cast<const WirePointer*>(defaultValue);
   2161       refTarget = ref->target(segment);
   2162       defaultValue = nullptr;  // If the default value is itself invalid, don't use it again.
   2163     }
   2164 
   2165     KJ_REQUIRE(nestingLimit > 0,
   2166                "Message is too deeply-nested or contains cycles.  See capnp::ReaderOptions.") {
   2167       goto useDefault;
   2168     }
   2169 
   2170     const word* ptr;
   2171     KJ_IF_MAYBE(p, followFars(ref, refTarget, segment)) {
   2172       ptr = p;
   2173     } else {
   2174       goto useDefault;
   2175     }
   2176 
   2177     KJ_REQUIRE(ref->kind() == WirePointer::STRUCT,
   2178                "Message contains non-struct pointer where struct pointer was expected.") {
   2179       goto useDefault;
   2180     }
   2181 
   2182     KJ_REQUIRE(boundsCheck(segment, ptr, ref->structRef.wordSize()),
   2183                "Message contained out-of-bounds struct pointer.") {
   2184       goto useDefault;
   2185     }
   2186 
   2187     return StructReader(
   2188         segment, capTable,
   2189         ptr, reinterpret_cast<const WirePointer*>(ptr + ref->structRef.dataSize.get()),
   2190         ref->structRef.dataSize.get() * BITS_PER_WORD,
   2191         ref->structRef.ptrCount.get(),
   2192         nestingLimit - 1);
   2193   }
   2194 
   2195 #if !CAPNP_LITE
   2196   static KJ_ALWAYS_INLINE(kj::Own<ClientHook> readCapabilityPointer(
   2197       SegmentReader* segment, CapTableReader* capTable,
   2198       const WirePointer* ref, int nestingLimit)) {
   2199     kj::Maybe<kj::Own<ClientHook>> maybeCap;
   2200 
   2201     auto brokenCapFactory = readGlobalBrokenCapFactoryForLayoutCpp();
   2202 
   2203     KJ_REQUIRE(brokenCapFactory != nullptr,
   2204                "Trying to read capabilities without ever having created a capability context.  "
   2205                "To read capabilities from a message, you must imbue it with CapReaderContext, or "
   2206                "use the Cap'n Proto RPC system.");
   2207 
   2208     if (ref->isNull()) {
   2209       return brokenCapFactory->newNullCap();
   2210     } else if (!ref->isCapability()) {
   2211       KJ_FAIL_REQUIRE(
   2212           "Message contains non-capability pointer where capability pointer was expected.") {
   2213         break;
   2214       }
   2215       return brokenCapFactory->newBrokenCap(
   2216           "Calling capability extracted from a non-capability pointer.");
   2217     } else KJ_IF_MAYBE(cap, capTable->extractCap(ref->capRef.index.get())) {
   2218       return kj::mv(*cap);
   2219     } else {
   2220       KJ_FAIL_REQUIRE("Message contains invalid capability pointer.") {
   2221         break;
   2222       }
   2223       return brokenCapFactory->newBrokenCap("Calling invalid capability pointer.");
   2224     }
   2225   }
   2226 #endif  // !CAPNP_LITE
   2227 
   2228   static KJ_ALWAYS_INLINE(ListReader readListPointer(
   2229       SegmentReader* segment, CapTableReader* capTable,
   2230       const WirePointer* ref, const word* defaultValue,
   2231       ElementSize expectedElementSize, int nestingLimit, bool checkElementSize = true)) {
   2232     return readListPointer(segment, capTable, ref, ref->target(segment), defaultValue,
   2233                            expectedElementSize, nestingLimit, checkElementSize);
   2234   }
   2235 
   2236   static KJ_ALWAYS_INLINE(ListReader readListPointer(
   2237       SegmentReader* segment, CapTableReader* capTable,
   2238       const WirePointer* ref, const word* refTarget,
   2239       const word* defaultValue, ElementSize expectedElementSize, int nestingLimit,
   2240       bool checkElementSize = true)) {
   2241     if (ref->isNull()) {
   2242     useDefault:
   2243       if (defaultValue == nullptr ||
   2244           reinterpret_cast<const WirePointer*>(defaultValue)->isNull()) {
   2245         return ListReader(expectedElementSize);
   2246       }
   2247       segment = nullptr;
   2248       ref = reinterpret_cast<const WirePointer*>(defaultValue);
   2249       refTarget = ref->target(segment);
   2250       defaultValue = nullptr;  // If the default value is itself invalid, don't use it again.
   2251     }
   2252 
   2253     KJ_REQUIRE(nestingLimit > 0,
   2254                "Message is too deeply-nested or contains cycles.  See capnp::ReaderOptions.") {
   2255       goto useDefault;
   2256     }
   2257 
   2258     const word* ptr;
   2259     KJ_IF_MAYBE(p, followFars(ref, refTarget, segment)) {
   2260       ptr = p;
   2261     } else {
   2262       goto useDefault;
   2263     }
   2264 
   2265     KJ_REQUIRE(ref->kind() == WirePointer::LIST,
   2266                "Message contains non-list pointer where list pointer was expected.") {
   2267       goto useDefault;
   2268     }
   2269 
   2270     ElementSize elementSize = ref->listRef.elementSize();
   2271     if (elementSize == ElementSize::INLINE_COMPOSITE) {
   2272       auto wordCount = ref->listRef.inlineCompositeWordCount();
   2273 
   2274       // An INLINE_COMPOSITE list points to a tag, which is formatted like a pointer.
   2275       const WirePointer* tag = reinterpret_cast<const WirePointer*>(ptr);
   2276 
   2277       KJ_REQUIRE(boundsCheck(segment, ptr, wordCount + POINTER_SIZE_IN_WORDS),
   2278                  "Message contains out-of-bounds list pointer.") {
   2279         goto useDefault;
   2280       }
   2281 
   2282       ptr += POINTER_SIZE_IN_WORDS;
   2283 
   2284       KJ_REQUIRE(tag->kind() == WirePointer::STRUCT,
   2285                  "INLINE_COMPOSITE lists of non-STRUCT type are not supported.") {
   2286         goto useDefault;
   2287       }
   2288 
   2289       auto size = tag->inlineCompositeListElementCount();
   2290       auto wordsPerElement = tag->structRef.wordSize() / ELEMENTS;
   2291 
   2292       KJ_REQUIRE(upgradeBound<uint64_t>(size) * wordsPerElement <= wordCount,
   2293                  "INLINE_COMPOSITE list's elements overrun its word count.") {
   2294         goto useDefault;
   2295       }
   2296 
   2297       if (wordsPerElement * (ONE * ELEMENTS) == ZERO * WORDS) {
   2298         // Watch out for lists of zero-sized structs, which can claim to be arbitrarily large
   2299         // without having sent actual data.
   2300         KJ_REQUIRE(amplifiedRead(segment, size * (ONE * WORDS / ELEMENTS)),
   2301                    "Message contains amplified list pointer.") {
   2302           goto useDefault;
   2303         }
   2304       }
   2305 
   2306       if (checkElementSize) {
   2307         // If a struct list was not expected, then presumably a non-struct list was upgraded to a
   2308         // struct list. We need to manipulate the pointer to point at the first field of the
   2309         // struct. Together with the `step` field, this will allow the struct list to be accessed
   2310         // as if it were a primitive list without branching.
   2311 
   2312         // Check whether the size is compatible.
   2313         switch (expectedElementSize) {
   2314           case ElementSize::VOID:
   2315             break;
   2316 
   2317           case ElementSize::BIT:
   2318             KJ_FAIL_REQUIRE(
   2319                 "Found struct list where bit list was expected; upgrading boolean lists to structs "
   2320                 "is no longer supported.") {
   2321               goto useDefault;
   2322             }
   2323             break;
   2324 
   2325           case ElementSize::BYTE:
   2326           case ElementSize::TWO_BYTES:
   2327           case ElementSize::FOUR_BYTES:
   2328           case ElementSize::EIGHT_BYTES:
   2329             KJ_REQUIRE(tag->structRef.dataSize.get() > ZERO * WORDS,
   2330                        "Expected a primitive list, but got a list of pointer-only structs.") {
   2331               goto useDefault;
   2332             }
   2333             break;
   2334 
   2335           case ElementSize::POINTER:
   2336             // We expected a list of pointers but got a list of structs.  Assuming the first field
   2337             // in the struct is the pointer we were looking for, we want to munge the pointer to
   2338             // point at the first element's pointer section.
   2339             ptr += tag->structRef.dataSize.get();
   2340             KJ_REQUIRE(tag->structRef.ptrCount.get() > ZERO * POINTERS,
   2341                        "Expected a pointer list, but got a list of data-only structs.") {
   2342               goto useDefault;
   2343             }
   2344             break;
   2345 
   2346           case ElementSize::INLINE_COMPOSITE:
   2347             break;
   2348         }
   2349       }
   2350 
   2351       return ListReader(
   2352           segment, capTable, ptr, size, wordsPerElement * BITS_PER_WORD,
   2353           tag->structRef.dataSize.get() * BITS_PER_WORD,
   2354           tag->structRef.ptrCount.get(), ElementSize::INLINE_COMPOSITE,
   2355           nestingLimit - 1);
   2356 
   2357     } else {
   2358       // This is a primitive or pointer list, but all such lists can also be interpreted as struct
   2359       // lists.  We need to compute the data size and pointer count for such structs.
   2360       auto dataSize = dataBitsPerElement(ref->listRef.elementSize()) * ELEMENTS;
   2361       auto pointerCount = pointersPerElement(ref->listRef.elementSize()) * ELEMENTS;
   2362       auto elementCount = ref->listRef.elementCount();
   2363       auto step = (dataSize + pointerCount * BITS_PER_POINTER) / ELEMENTS;
   2364 
   2365       auto wordCount = roundBitsUpToWords(upgradeBound<uint64_t>(elementCount) * step);
   2366       KJ_REQUIRE(boundsCheck(segment, ptr, wordCount),
   2367             "Message contains out-of-bounds list pointer.") {
   2368         goto useDefault;
   2369       }
   2370 
   2371       if (elementSize == ElementSize::VOID) {
   2372         // Watch out for lists of void, which can claim to be arbitrarily large without having sent
   2373         // actual data.
   2374         KJ_REQUIRE(amplifiedRead(segment, elementCount * (ONE * WORDS / ELEMENTS)),
   2375                    "Message contains amplified list pointer.") {
   2376           goto useDefault;
   2377         }
   2378       }
   2379 
   2380       if (checkElementSize) {
   2381         if (elementSize == ElementSize::BIT && expectedElementSize != ElementSize::BIT) {
   2382           KJ_FAIL_REQUIRE(
   2383               "Found bit list where struct list was expected; upgrading boolean lists to structs "
   2384               "is no longer supported.") {
   2385             goto useDefault;
   2386           }
   2387         }
   2388 
   2389         // Verify that the elements are at least as large as the expected type.  Note that if we
   2390         // expected INLINE_COMPOSITE, the expected sizes here will be zero, because bounds checking
   2391         // will be performed at field access time.  So this check here is for the case where we
   2392         // expected a list of some primitive or pointer type.
   2393 
   2394         BitCount expectedDataBitsPerElement =
   2395             dataBitsPerElement(expectedElementSize) * ELEMENTS;
   2396         WirePointerCount expectedPointersPerElement =
   2397             pointersPerElement(expectedElementSize) * ELEMENTS;
   2398 
   2399         KJ_REQUIRE(expectedDataBitsPerElement <= dataSize,
   2400                    "Message contained list with incompatible element type.") {
   2401           goto useDefault;
   2402         }
   2403         KJ_REQUIRE(expectedPointersPerElement <= pointerCount,
   2404                    "Message contained list with incompatible element type.") {
   2405           goto useDefault;
   2406         }
   2407       }
   2408 
   2409       return ListReader(segment, capTable, ptr, elementCount, step,
   2410                         dataSize, pointerCount, elementSize, nestingLimit - 1);
   2411     }
   2412   }
   2413 
   2414   static KJ_ALWAYS_INLINE(Text::Reader readTextPointer(
   2415       SegmentReader* segment, const WirePointer* ref,
   2416       const void* defaultValue, ByteCount defaultSize)) {
   2417     return readTextPointer(segment, ref, ref->target(segment), defaultValue, defaultSize);
   2418   }
   2419 
   2420   static KJ_ALWAYS_INLINE(Text::Reader readTextPointer(
   2421       SegmentReader* segment, const WirePointer* ref, const word* refTarget,
   2422       const void* defaultValue, ByteCount defaultSize)) {
   2423     if (ref->isNull()) {
   2424     useDefault:
   2425       if (defaultValue == nullptr) defaultValue = "";
   2426       return Text::Reader(reinterpret_cast<const char*>(defaultValue),
   2427           unbound(defaultSize / BYTES));
   2428     } else {
   2429       const word* ptr;
   2430       KJ_IF_MAYBE(p, followFars(ref, refTarget, segment)) {
   2431         ptr = p;
   2432       } else {
   2433         goto useDefault;
   2434       }
   2435 
   2436       auto size = ref->listRef.elementCount() * (ONE * BYTES / ELEMENTS);
   2437 
   2438       KJ_REQUIRE(ref->kind() == WirePointer::LIST,
   2439                  "Message contains non-list pointer where text was expected.") {
   2440         goto useDefault;
   2441       }
   2442 
   2443       KJ_REQUIRE(ref->listRef.elementSize() == ElementSize::BYTE,
   2444                  "Message contains list pointer of non-bytes where text was expected.") {
   2445         goto useDefault;
   2446       }
   2447 
   2448       KJ_REQUIRE(boundsCheck(segment, ptr, roundBytesUpToWords(size)),
   2449                  "Message contained out-of-bounds text pointer.") {
   2450         goto useDefault;
   2451       }
   2452 
   2453       KJ_REQUIRE(size > ZERO * BYTES, "Message contains text that is not NUL-terminated.") {
   2454         goto useDefault;
   2455       }
   2456 
   2457       const char* cptr = reinterpret_cast<const char*>(ptr);
   2458       uint unboundedSize = unbound(size / BYTES) - 1;
   2459 
   2460       KJ_REQUIRE(cptr[unboundedSize] == '\0', "Message contains text that is not NUL-terminated.") {
   2461         goto useDefault;
   2462       }
   2463 
   2464       return Text::Reader(cptr, unboundedSize);
   2465     }
   2466   }
   2467 
   2468   static KJ_ALWAYS_INLINE(Data::Reader readDataPointer(
   2469       SegmentReader* segment, const WirePointer* ref,
   2470       const void* defaultValue, BlobSize defaultSize)) {
   2471     return readDataPointer(segment, ref, ref->target(segment), defaultValue, defaultSize);
   2472   }
   2473 
   2474   static KJ_ALWAYS_INLINE(Data::Reader readDataPointer(
   2475       SegmentReader* segment, const WirePointer* ref, const word* refTarget,
   2476       const void* defaultValue, BlobSize defaultSize)) {
   2477     if (ref->isNull()) {
   2478     useDefault:
   2479       return Data::Reader(reinterpret_cast<const byte*>(defaultValue),
   2480           unbound(defaultSize / BYTES));
   2481     } else {
   2482       const word* ptr;
   2483       KJ_IF_MAYBE(p, followFars(ref, refTarget, segment)) {
   2484         ptr = p;
   2485       } else {
   2486         goto useDefault;
   2487       }
   2488 
   2489       if (KJ_UNLIKELY(ptr == nullptr)) {
   2490         // Already reported error.
   2491         goto useDefault;
   2492       }
   2493 
   2494       auto size = ref->listRef.elementCount() * (ONE * BYTES / ELEMENTS);
   2495 
   2496       KJ_REQUIRE(ref->kind() == WirePointer::LIST,
   2497                  "Message contains non-list pointer where data was expected.") {
   2498         goto useDefault;
   2499       }
   2500 
   2501       KJ_REQUIRE(ref->listRef.elementSize() == ElementSize::BYTE,
   2502                  "Message contains list pointer of non-bytes where data was expected.") {
   2503         goto useDefault;
   2504       }
   2505 
   2506       KJ_REQUIRE(boundsCheck(segment, ptr, roundBytesUpToWords(size)),
   2507                  "Message contained out-of-bounds data pointer.") {
   2508         goto useDefault;
   2509       }
   2510 
   2511       return Data::Reader(reinterpret_cast<const byte*>(ptr), unbound(size / BYTES));
   2512     }
   2513   }
   2514 };
   2515 
   2516 // =======================================================================================
   2517 // PointerBuilder
   2518 
   2519 StructBuilder PointerBuilder::initStruct(StructSize size) {
   2520   return WireHelpers::initStructPointer(pointer, segment, capTable, size);
   2521 }
   2522 
   2523 StructBuilder PointerBuilder::getStruct(StructSize size, const word* defaultValue) {
   2524   return WireHelpers::getWritableStructPointer(pointer, segment, capTable, size, defaultValue);
   2525 }
   2526 
   2527 ListBuilder PointerBuilder::initList(ElementSize elementSize, ElementCount elementCount) {
   2528   return WireHelpers::initListPointer(pointer, segment, capTable, elementCount, elementSize);
   2529 }
   2530 
   2531 ListBuilder PointerBuilder::initStructList(ElementCount elementCount, StructSize elementSize) {
   2532   return WireHelpers::initStructListPointer(pointer, segment, capTable, elementCount, elementSize);
   2533 }
   2534 
   2535 ListBuilder PointerBuilder::getList(ElementSize elementSize, const word* defaultValue) {
   2536   return WireHelpers::getWritableListPointer(pointer, segment, capTable, elementSize, defaultValue);
   2537 }
   2538 
   2539 ListBuilder PointerBuilder::getStructList(StructSize elementSize, const word* defaultValue) {
   2540   return WireHelpers::getWritableStructListPointer(
   2541       pointer, segment, capTable, elementSize, defaultValue);
   2542 }
   2543 
   2544 ListBuilder PointerBuilder::getListAnySize(const word* defaultValue) {
   2545   return WireHelpers::getWritableListPointerAnySize(pointer, segment, capTable, defaultValue);
   2546 }
   2547 
   2548 template <>
   2549 Text::Builder PointerBuilder::initBlob<Text>(ByteCount size) {
   2550   return WireHelpers::initTextPointer(pointer, segment, capTable,
   2551       assertMax<MAX_TEXT_SIZE>(size, ThrowOverflow())).value;
   2552 }
   2553 template <>
   2554 void PointerBuilder::setBlob<Text>(Text::Reader value) {
   2555   WireHelpers::setTextPointer(pointer, segment, capTable, value);
   2556 }
   2557 template <>
   2558 Text::Builder PointerBuilder::getBlob<Text>(const void* defaultValue, ByteCount defaultSize) {
   2559   return WireHelpers::getWritableTextPointer(pointer, segment, capTable, defaultValue,
   2560       assertMax<MAX_TEXT_SIZE>(defaultSize, ThrowOverflow()));
   2561 }
   2562 
   2563 template <>
   2564 Data::Builder PointerBuilder::initBlob<Data>(ByteCount size) {
   2565   return WireHelpers::initDataPointer(pointer, segment, capTable,
   2566       assertMaxBits<BLOB_SIZE_BITS>(size, ThrowOverflow())).value;
   2567 }
   2568 template <>
   2569 void PointerBuilder::setBlob<Data>(Data::Reader value) {
   2570   WireHelpers::setDataPointer(pointer, segment, capTable, value);
   2571 }
   2572 template <>
   2573 Data::Builder PointerBuilder::getBlob<Data>(const void* defaultValue, ByteCount defaultSize) {
   2574   return WireHelpers::getWritableDataPointer(pointer, segment, capTable, defaultValue,
   2575       assertMaxBits<BLOB_SIZE_BITS>(defaultSize, ThrowOverflow()));
   2576 }
   2577 
   2578 void PointerBuilder::setStruct(const StructReader& value, bool canonical) {
   2579   WireHelpers::setStructPointer(segment, capTable, pointer, value, nullptr, canonical);
   2580 }
   2581 
   2582 void PointerBuilder::setList(const ListReader& value, bool canonical) {
   2583   WireHelpers::setListPointer(segment, capTable, pointer, value, nullptr, canonical);
   2584 }
   2585 
   2586 #if !CAPNP_LITE
   2587 kj::Own<ClientHook> PointerBuilder::getCapability() {
   2588   return WireHelpers::readCapabilityPointer(
   2589       segment, capTable, pointer, kj::maxValue);
   2590 }
   2591 
   2592 void PointerBuilder::setCapability(kj::Own<ClientHook>&& cap) {
   2593   WireHelpers::setCapabilityPointer(segment, capTable, pointer, kj::mv(cap));
   2594 }
   2595 #endif  // !CAPNP_LITE
   2596 
   2597 void PointerBuilder::adopt(OrphanBuilder&& value) {
   2598   WireHelpers::adopt(segment, capTable, pointer, kj::mv(value));
   2599 }
   2600 
   2601 OrphanBuilder PointerBuilder::disown() {
   2602   return WireHelpers::disown(segment, capTable, pointer);
   2603 }
   2604 
   2605 void PointerBuilder::clear() {
   2606   WireHelpers::zeroObject(segment, capTable, pointer);
   2607   WireHelpers::zeroMemory(pointer);
   2608 }
   2609 
   2610 PointerType PointerBuilder::getPointerType() const {
   2611   if(pointer->isNull()) {
   2612     return PointerType::NULL_;
   2613   } else {
   2614     WirePointer* ptr = pointer;
   2615     SegmentBuilder* sgmt = segment;
   2616     WireHelpers::followFars(ptr, ptr->target(), sgmt);
   2617     switch(ptr->kind()) {
   2618       case WirePointer::FAR:
   2619         KJ_FAIL_ASSERT("far pointer not followed?");
   2620       case WirePointer::STRUCT:
   2621         return PointerType::STRUCT;
   2622       case WirePointer::LIST:
   2623         return PointerType::LIST;
   2624       case WirePointer::OTHER:
   2625         KJ_REQUIRE(ptr->isCapability(), "unknown pointer type");
   2626         return PointerType::CAPABILITY;
   2627     }
   2628     KJ_UNREACHABLE;
   2629   }
   2630 }
   2631 
   2632 void PointerBuilder::transferFrom(PointerBuilder other) {
   2633   if (!pointer->isNull()) {
   2634     WireHelpers::zeroObject(segment, capTable, pointer);
   2635     WireHelpers::zeroMemory(pointer);
   2636   }
   2637   WireHelpers::transferPointer(segment, pointer, other.segment, other.pointer);
   2638   WireHelpers::zeroMemory(other.pointer);
   2639 }
   2640 
   2641 void PointerBuilder::copyFrom(PointerReader other, bool canonical) {
   2642   if (other.pointer == nullptr) {
   2643     if (!pointer->isNull()) {
   2644       WireHelpers::zeroObject(segment, capTable, pointer);
   2645       WireHelpers::zeroMemory(pointer);
   2646     }
   2647   } else {
   2648     WireHelpers::copyPointer(segment, capTable, pointer,
   2649                              other.segment, other.capTable, other.pointer, other.nestingLimit,
   2650                              nullptr,
   2651                              canonical);
   2652   }
   2653 }
   2654 
   2655 PointerReader PointerBuilder::asReader() const {
   2656   return PointerReader(segment, capTable, pointer, kj::maxValue);
   2657 }
   2658 
   2659 BuilderArena* PointerBuilder::getArena() const {
   2660   return segment->getArena();
   2661 }
   2662 
   2663 CapTableBuilder* PointerBuilder::getCapTable() {
   2664   return capTable;
   2665 }
   2666 
   2667 PointerBuilder PointerBuilder::imbue(CapTableBuilder* capTable) {
   2668   auto result = *this;
   2669   result.capTable = capTable;
   2670   return result;
   2671 }
   2672 
   2673 // =======================================================================================
   2674 // PointerReader
   2675 
   2676 PointerReader PointerReader::getRoot(SegmentReader* segment, CapTableReader* capTable,
   2677                                      const word* location, int nestingLimit) {
   2678   KJ_REQUIRE(WireHelpers::boundsCheck(segment, location, POINTER_SIZE_IN_WORDS),
   2679              "Root location out-of-bounds.") {
   2680     location = nullptr;
   2681   }
   2682 
   2683   return PointerReader(segment, capTable,
   2684       reinterpret_cast<const WirePointer*>(location), nestingLimit);
   2685 }
   2686 
   2687 StructReader PointerReader::getStruct(const word* defaultValue) const {
   2688   const WirePointer* ref = pointer == nullptr ? &zero.pointer : pointer;
   2689   return WireHelpers::readStructPointer(segment, capTable, ref, defaultValue, nestingLimit);
   2690 }
   2691 
   2692 ListReader PointerReader::getList(ElementSize expectedElementSize, const word* defaultValue) const {
   2693   const WirePointer* ref = pointer == nullptr ? &zero.pointer : pointer;
   2694   return WireHelpers::readListPointer(
   2695       segment, capTable, ref, defaultValue, expectedElementSize, nestingLimit);
   2696 }
   2697 
   2698 ListReader PointerReader::getListAnySize(const word* defaultValue) const {
   2699   const WirePointer* ref = pointer == nullptr ? &zero.pointer : pointer;
   2700   return WireHelpers::readListPointer(
   2701       segment, capTable, ref, defaultValue, ElementSize::VOID /* dummy */, nestingLimit, false);
   2702 }
   2703 
   2704 template <>
   2705 Text::Reader PointerReader::getBlob<Text>(const void* defaultValue, ByteCount defaultSize) const {
   2706   const WirePointer* ref = pointer == nullptr ? &zero.pointer : pointer;
   2707   return WireHelpers::readTextPointer(segment, ref, defaultValue, defaultSize);
   2708 }
   2709 
   2710 template <>
   2711 Data::Reader PointerReader::getBlob<Data>(const void* defaultValue, ByteCount defaultSize) const {
   2712   const WirePointer* ref = pointer == nullptr ? &zero.pointer : pointer;
   2713   return WireHelpers::readDataPointer(segment, ref, defaultValue,
   2714       assertMaxBits<BLOB_SIZE_BITS>(defaultSize, ThrowOverflow()));
   2715 }
   2716 
   2717 #if !CAPNP_LITE
   2718 kj::Own<ClientHook> PointerReader::getCapability() const {
   2719   const WirePointer* ref = pointer == nullptr ? &zero.pointer : pointer;
   2720   return WireHelpers::readCapabilityPointer(segment, capTable, ref, nestingLimit);
   2721 }
   2722 #endif  // !CAPNP_LITE
   2723 
   2724 const word* PointerReader::getUnchecked() const {
   2725   KJ_REQUIRE(segment == nullptr, "getUncheckedPointer() only allowed on unchecked messages.");
   2726   return reinterpret_cast<const word*>(pointer);
   2727 }
   2728 
   2729 MessageSizeCounts PointerReader::targetSize() const {
   2730   return pointer == nullptr ? MessageSizeCounts { ZERO * WORDS, 0 }
   2731                             : WireHelpers::totalSize(segment, pointer, nestingLimit);
   2732 }
   2733 
   2734 PointerType PointerReader::getPointerType() const {
   2735   if(pointer == nullptr || pointer->isNull()) {
   2736     return PointerType::NULL_;
   2737   } else {
   2738     const WirePointer* ptr = pointer;
   2739     const word* refTarget = ptr->target(segment);
   2740     SegmentReader* sgmt = segment;
   2741     if (WireHelpers::followFars(ptr, refTarget, sgmt) == nullptr) return PointerType::NULL_;
   2742     switch(ptr->kind()) {
   2743       case WirePointer::FAR:
   2744         KJ_FAIL_ASSERT("far pointer not followed?") { return PointerType::NULL_; }
   2745       case WirePointer::STRUCT:
   2746         return PointerType::STRUCT;
   2747       case WirePointer::LIST:
   2748         return PointerType::LIST;
   2749       case WirePointer::OTHER:
   2750         KJ_REQUIRE(ptr->isCapability(), "unknown pointer type") { return PointerType::NULL_; }
   2751         return PointerType::CAPABILITY;
   2752     }
   2753     KJ_UNREACHABLE;
   2754   }
   2755 }
   2756 
   2757 kj::Maybe<Arena&> PointerReader::getArena() const {
   2758   return segment == nullptr ? nullptr : segment->getArena();
   2759 }
   2760 
   2761 CapTableReader* PointerReader::getCapTable() {
   2762   return capTable;
   2763 }
   2764 
   2765 PointerReader PointerReader::imbue(CapTableReader* capTable) const {
   2766   auto result = *this;
   2767   result.capTable = capTable;
   2768   return result;
   2769 }
   2770 
   2771 bool PointerReader::isCanonical(const word **readHead) {
   2772   if (!this->pointer) {
   2773     // The pointer is null, so we are canonical and do not read
   2774     return true;
   2775   }
   2776 
   2777   if (!this->pointer->isPositional()) {
   2778     // The pointer is a FAR or OTHER pointer, and is non-canonical
   2779     return false;
   2780   }
   2781 
   2782   switch (this->getPointerType()) {
   2783     case PointerType::NULL_:
   2784       // The pointer is null, we are canonical and do not read
   2785       return true;
   2786     case PointerType::STRUCT: {
   2787       bool dataTrunc = false, ptrTrunc = false;
   2788       auto structReader = this->getStruct(nullptr);
   2789       if (structReader.getDataSectionSize() == ZERO * BITS &&
   2790           structReader.getPointerSectionSize() == ZERO * POINTERS) {
   2791         return reinterpret_cast<const word*>(this->pointer) == structReader.getLocation();
   2792       } else {
   2793         // Fun fact: Once this call to isCanonical() returns, Clang may re-order the evaluation of
   2794         //   the && operators. In theory this is wrong because && is short-circuiting, but Clang
   2795         //   apparently sees that there are no side effects to the right of &&, so decides it is
   2796         //   safe to skip short-circuiting. It turns out, though, this is observable under
   2797         //   valgrind: if we don't initialize `dataTrunc` when declaring it above, then valgrind
   2798         //   reports "Conditional jump or move depends on uninitialised value(s)". Specifically
   2799         //   this happens in cases where structReader.isCanonical() returns false -- it is allowed
   2800         //   to skip initializing `dataTrunc` in that case. The short-circuiting && should mean
   2801         //   that we don't read `dataTrunc` in that case, except Clang's optimizations. Ultimately
   2802         //   the uninitialized read is fine because eventually the whole expression evaluates false
   2803         //   either way. But, to make valgrind happy, we initialize the bools above...
   2804         return structReader.isCanonical(readHead, readHead, &dataTrunc, &ptrTrunc) && dataTrunc && ptrTrunc;
   2805       }
   2806     }
   2807     case PointerType::LIST:
   2808       return this->getListAnySize(nullptr).isCanonical(readHead, pointer);
   2809     case PointerType::CAPABILITY:
   2810       KJ_FAIL_ASSERT("Capabilities are not positional");
   2811   }
   2812   KJ_UNREACHABLE;
   2813 }
   2814 
   2815 // =======================================================================================
   2816 // StructBuilder
   2817 
   2818 void StructBuilder::clearAll() {
   2819   if (dataSize == ONE * BITS) {
   2820     setDataField<bool>(ONE * ELEMENTS, false);
   2821   } else {
   2822     WireHelpers::zeroMemory(reinterpret_cast<byte*>(data), dataSize / BITS_PER_BYTE);
   2823   }
   2824 
   2825   for (auto i: kj::zeroTo(pointerCount)) {
   2826     WireHelpers::zeroObject(segment, capTable, pointers + i);
   2827   }
   2828   WireHelpers::zeroMemory(pointers, pointerCount);
   2829 }
   2830 
   2831 void StructBuilder::transferContentFrom(StructBuilder other) {
   2832   // Determine the amount of data the builders have in common.
   2833   auto sharedDataSize = kj::min(dataSize, other.dataSize);
   2834 
   2835   if (dataSize > sharedDataSize) {
   2836     // Since the target is larger than the source, make sure to zero out the extra bits that the
   2837     // source doesn't have.
   2838     if (dataSize == ONE * BITS) {
   2839       setDataField<bool>(ZERO * ELEMENTS, false);
   2840     } else {
   2841       byte* unshared = reinterpret_cast<byte*>(data) + sharedDataSize / BITS_PER_BYTE;
   2842       // Note: this subtraction can't fail due to the if() above
   2843       WireHelpers::zeroMemory(unshared,
   2844           subtractChecked(dataSize, sharedDataSize, []() {}) / BITS_PER_BYTE);
   2845     }
   2846   }
   2847 
   2848   // Copy over the shared part.
   2849   if (sharedDataSize == ONE * BITS) {
   2850     setDataField<bool>(ZERO * ELEMENTS, other.getDataField<bool>(ZERO * ELEMENTS));
   2851   } else {
   2852     WireHelpers::copyMemory(reinterpret_cast<byte*>(data),
   2853                             reinterpret_cast<byte*>(other.data),
   2854                             sharedDataSize / BITS_PER_BYTE);
   2855   }
   2856 
   2857   // Zero out all pointers in the target.
   2858   for (auto i: kj::zeroTo(pointerCount)) {
   2859     WireHelpers::zeroObject(segment, capTable, pointers + i);
   2860   }
   2861   WireHelpers::zeroMemory(pointers, pointerCount);
   2862 
   2863   // Transfer the pointers.
   2864   auto sharedPointerCount = kj::min(pointerCount, other.pointerCount);
   2865   for (auto i: kj::zeroTo(sharedPointerCount)) {
   2866     WireHelpers::transferPointer(segment, pointers + i, other.segment, other.pointers + i);
   2867   }
   2868 
   2869   // Zero out the pointers that were transferred in the source because it no longer has ownership.
   2870   // If the source had any extra pointers that the destination didn't have space for, we
   2871   // intentionally leave them be, so that they'll be cleaned up later.
   2872   WireHelpers::zeroMemory(other.pointers, sharedPointerCount);
   2873 }
   2874 
   2875 void StructBuilder::copyContentFrom(StructReader other) {
   2876   // Determine the amount of data the builders have in common.
   2877   auto sharedDataSize = kj::min(dataSize, other.dataSize);
   2878   auto sharedPointerCount = kj::min(pointerCount, other.pointerCount);
   2879 
   2880   if ((sharedDataSize > ZERO * BITS && other.data == data) ||
   2881       (sharedPointerCount > ZERO * POINTERS && other.pointers == pointers)) {
   2882     // At least one of the section pointers is pointing to ourself. Verify that the other is two
   2883     // (but ignore empty sections).
   2884     KJ_ASSERT((sharedDataSize == ZERO * BITS || other.data == data) &&
   2885               (sharedPointerCount == ZERO * POINTERS || other.pointers == pointers));
   2886     // So `other` appears to be a reader for this same struct. No coping is needed.
   2887     return;
   2888   }
   2889 
   2890   if (dataSize > sharedDataSize) {
   2891     // Since the target is larger than the source, make sure to zero out the extra bits that the
   2892     // source doesn't have.
   2893     if (dataSize == ONE * BITS) {
   2894       setDataField<bool>(ZERO * ELEMENTS, false);
   2895     } else {
   2896       byte* unshared = reinterpret_cast<byte*>(data) + sharedDataSize / BITS_PER_BYTE;
   2897       WireHelpers::zeroMemory(unshared,
   2898           subtractChecked(dataSize, sharedDataSize, []() {}) / BITS_PER_BYTE);
   2899     }
   2900   }
   2901 
   2902   // Copy over the shared part.
   2903   if (sharedDataSize == ONE * BITS) {
   2904     setDataField<bool>(ZERO * ELEMENTS, other.getDataField<bool>(ZERO * ELEMENTS));
   2905   } else {
   2906     WireHelpers::copyMemory(reinterpret_cast<byte*>(data),
   2907                             reinterpret_cast<const byte*>(other.data),
   2908                             sharedDataSize / BITS_PER_BYTE);
   2909   }
   2910 
   2911   // Zero out all pointers in the target.
   2912   for (auto i: kj::zeroTo(pointerCount)) {
   2913     WireHelpers::zeroObject(segment, capTable, pointers + i);
   2914   }
   2915   WireHelpers::zeroMemory(pointers, pointerCount);
   2916 
   2917   // Copy the pointers.
   2918   for (auto i: kj::zeroTo(sharedPointerCount)) {
   2919     WireHelpers::copyPointer(segment, capTable, pointers + i,
   2920         other.segment, other.capTable, other.pointers + i, other.nestingLimit);
   2921   }
   2922 }
   2923 
   2924 StructReader StructBuilder::asReader() const {
   2925   return StructReader(segment, capTable, data, pointers,
   2926       dataSize, pointerCount, kj::maxValue);
   2927 }
   2928 
   2929 BuilderArena* StructBuilder::getArena() {
   2930   return segment->getArena();
   2931 }
   2932 
   2933 CapTableBuilder* StructBuilder::getCapTable() {
   2934   return capTable;
   2935 }
   2936 
   2937 StructBuilder StructBuilder::imbue(CapTableBuilder* capTable) {
   2938   auto result = *this;
   2939   result.capTable = capTable;
   2940   return result;
   2941 }
   2942 
   2943 // =======================================================================================
   2944 // StructReader
   2945 
   2946 MessageSizeCounts StructReader::totalSize() const {
   2947   MessageSizeCounts result = {
   2948     WireHelpers::roundBitsUpToWords(dataSize) + pointerCount * WORDS_PER_POINTER, 0 };
   2949 
   2950   for (auto i: kj::zeroTo(pointerCount)) {
   2951     result += WireHelpers::totalSize(segment, pointers + i, nestingLimit);
   2952   }
   2953 
   2954   if (segment != nullptr) {
   2955     // This traversal should not count against the read limit, because it's highly likely that
   2956     // the caller is going to traverse the object again, e.g. to copy it.
   2957     segment->unread(result.wordCount);
   2958   }
   2959 
   2960   return result;
   2961 }
   2962 
   2963 kj::Array<word> StructReader::canonicalize() {
   2964   auto size = totalSize().wordCount + POINTER_SIZE_IN_WORDS;
   2965   kj::Array<word> backing = kj::heapArray<word>(unbound(size / WORDS));
   2966   WireHelpers::zeroMemory(backing.asPtr());
   2967   FlatMessageBuilder builder(backing);
   2968   _::PointerHelpers<AnyPointer>::getInternalBuilder(builder.initRoot<AnyPointer>()).setStruct(*this, true);
   2969   KJ_ASSERT(builder.isCanonical());
   2970   auto output = builder.getSegmentsForOutput()[0];
   2971   kj::Array<word> trunc = kj::heapArray<word>(output.size());
   2972   WireHelpers::copyMemory(trunc.begin(), output);
   2973   return trunc;
   2974 }
   2975 
   2976 CapTableReader* StructReader::getCapTable() {
   2977   return capTable;
   2978 }
   2979 
   2980 StructReader StructReader::imbue(CapTableReader* capTable) const {
   2981   auto result = *this;
   2982   result.capTable = capTable;
   2983   return result;
   2984 }
   2985 
   2986 bool StructReader::isCanonical(const word **readHead,
   2987                                const word **ptrHead,
   2988                                bool *dataTrunc,
   2989                                bool *ptrTrunc) {
   2990   if (this->getLocation() != *readHead) {
   2991     // Our target area is not at the readHead, preorder fails
   2992     return false;
   2993   }
   2994 
   2995   if (this->getDataSectionSize() % BITS_PER_WORD != ZERO * BITS) {
   2996     // Using legacy non-word-size structs, reject
   2997     return false;
   2998   }
   2999   auto dataSize = this->getDataSectionSize() / BITS_PER_WORD;
   3000 
   3001   // Mark whether the struct is properly truncated
   3002   KJ_IF_MAYBE(diff, trySubtract(dataSize, ONE * WORDS)) {
   3003     *dataTrunc = this->getDataField<uint64_t>(*diff / WORDS * ELEMENTS) != 0;
   3004   } else {
   3005     // Data segment empty.
   3006     *dataTrunc = true;
   3007   }
   3008 
   3009   KJ_IF_MAYBE(diff, trySubtract(this->pointerCount, ONE * POINTERS)) {
   3010     *ptrTrunc  = !this->getPointerField(*diff).isNull();
   3011   } else {
   3012     *ptrTrunc = true;
   3013   }
   3014 
   3015   // Advance the read head
   3016   *readHead += (dataSize + (this->pointerCount * WORDS_PER_POINTER));
   3017 
   3018   // Check each pointer field for canonicity
   3019   for (auto ptrIndex: kj::zeroTo(this->pointerCount)) {
   3020     if (!this->getPointerField(ptrIndex).isCanonical(ptrHead)) {
   3021       return false;
   3022     }
   3023   }
   3024 
   3025   return true;
   3026 }
   3027 
   3028 // =======================================================================================
   3029 // ListBuilder
   3030 
   3031 Text::Builder ListBuilder::asText() {
   3032   KJ_REQUIRE(structDataSize == G(8) * BITS && structPointerCount == ZERO * POINTERS,
   3033              "Expected Text, got list of non-bytes.") {
   3034     return Text::Builder();
   3035   }
   3036 
   3037   size_t size = unbound(elementCount / ELEMENTS);
   3038 
   3039   KJ_REQUIRE(size > 0, "Message contains text that is not NUL-terminated.") {
   3040     return Text::Builder();
   3041   }
   3042 
   3043   char* cptr = reinterpret_cast<char*>(ptr);
   3044   --size;  // NUL terminator
   3045 
   3046   KJ_REQUIRE(cptr[size] == '\0', "Message contains text that is not NUL-terminated.") {
   3047     return Text::Builder();
   3048   }
   3049 
   3050   return Text::Builder(cptr, size);
   3051 }
   3052 
   3053 Data::Builder ListBuilder::asData() {
   3054   KJ_REQUIRE(structDataSize == G(8) * BITS && structPointerCount == ZERO * POINTERS,
   3055              "Expected Text, got list of non-bytes.") {
   3056     return Data::Builder();
   3057   }
   3058 
   3059   return Data::Builder(reinterpret_cast<byte*>(ptr), unbound(elementCount / ELEMENTS));
   3060 }
   3061 
   3062 StructBuilder ListBuilder::getStructElement(ElementCount index) {
   3063   auto indexBit = upgradeBound<uint64_t>(index) * step;
   3064   byte* structData = ptr + indexBit / BITS_PER_BYTE;
   3065   KJ_DASSERT(indexBit % BITS_PER_BYTE == ZERO * BITS);
   3066   return StructBuilder(segment, capTable, structData,
   3067       reinterpret_cast<WirePointer*>(structData + structDataSize / BITS_PER_BYTE),
   3068       structDataSize, structPointerCount);
   3069 }
   3070 
   3071 ListReader ListBuilder::asReader() const {
   3072   return ListReader(segment, capTable, ptr, elementCount, step, structDataSize, structPointerCount,
   3073                     elementSize, kj::maxValue);
   3074 }
   3075 
   3076 BuilderArena* ListBuilder::getArena() {
   3077   return segment->getArena();
   3078 }
   3079 
   3080 CapTableBuilder* ListBuilder::getCapTable() {
   3081   return capTable;
   3082 }
   3083 
   3084 ListBuilder ListBuilder::imbue(CapTableBuilder* capTable) {
   3085   auto result = *this;
   3086   result.capTable = capTable;
   3087   return result;
   3088 }
   3089 
   3090 // =======================================================================================
   3091 // ListReader
   3092 
   3093 Text::Reader ListReader::asText() {
   3094   KJ_REQUIRE(structDataSize == G(8) * BITS && structPointerCount == ZERO * POINTERS,
   3095              "Expected Text, got list of non-bytes.") {
   3096     return Text::Reader();
   3097   }
   3098 
   3099   size_t size = unbound(elementCount / ELEMENTS);
   3100 
   3101   KJ_REQUIRE(size > 0, "Message contains text that is not NUL-terminated.") {
   3102     return Text::Reader();
   3103   }
   3104 
   3105   const char* cptr = reinterpret_cast<const char*>(ptr);
   3106   --size;  // NUL terminator
   3107 
   3108   KJ_REQUIRE(cptr[size] == '\0', "Message contains text that is not NUL-terminated.") {
   3109     return Text::Reader();
   3110   }
   3111 
   3112   return Text::Reader(cptr, size);
   3113 }
   3114 
   3115 Data::Reader ListReader::asData() {
   3116   KJ_REQUIRE(structDataSize == G(8) * BITS && structPointerCount == ZERO * POINTERS,
   3117              "Expected Text, got list of non-bytes.") {
   3118     return Data::Reader();
   3119   }
   3120 
   3121   return Data::Reader(reinterpret_cast<const byte*>(ptr), unbound(elementCount / ELEMENTS));
   3122 }
   3123 
   3124 kj::ArrayPtr<const byte> ListReader::asRawBytes() const {
   3125   KJ_REQUIRE(structPointerCount == ZERO * POINTERS,
   3126              "Expected data only, got pointers.") {
   3127     return kj::ArrayPtr<const byte>();
   3128   }
   3129 
   3130   return arrayPtr(reinterpret_cast<const byte*>(ptr),
   3131       WireHelpers::roundBitsUpToBytes(
   3132           upgradeBound<uint64_t>(elementCount) * (structDataSize / ELEMENTS)));
   3133 }
   3134 
   3135 StructReader ListReader::getStructElement(ElementCount index) const {
   3136   KJ_REQUIRE(nestingLimit > 0,
   3137              "Message is too deeply-nested or contains cycles.  See capnp::ReaderOptions.") {
   3138     return StructReader();
   3139   }
   3140 
   3141   auto indexBit = upgradeBound<uint64_t>(index) * step;
   3142   const byte* structData = ptr + indexBit / BITS_PER_BYTE;
   3143   const WirePointer* structPointers =
   3144       reinterpret_cast<const WirePointer*>(structData + structDataSize / BITS_PER_BYTE);
   3145 
   3146   KJ_DASSERT(indexBit % BITS_PER_BYTE == ZERO * BITS);
   3147   return StructReader(
   3148       segment, capTable, structData, structPointers,
   3149       structDataSize, structPointerCount,
   3150       nestingLimit - 1);
   3151 }
   3152 
   3153 MessageSizeCounts ListReader::totalSize() const {
   3154   // TODO(cleanup): This is kind of a lot of logic duplicated from WireHelpers::totalSize(), but
   3155   //   it's unclear how to share it effectively.
   3156 
   3157   MessageSizeCounts result = { ZERO * WORDS, 0 };
   3158 
   3159   switch (elementSize) {
   3160     case ElementSize::VOID:
   3161       // Nothing.
   3162       break;
   3163     case ElementSize::BIT:
   3164     case ElementSize::BYTE:
   3165     case ElementSize::TWO_BYTES:
   3166     case ElementSize::FOUR_BYTES:
   3167     case ElementSize::EIGHT_BYTES:
   3168       result.addWords(WireHelpers::roundBitsUpToWords(
   3169           upgradeBound<uint64_t>(elementCount) * dataBitsPerElement(elementSize)));
   3170       break;
   3171     case ElementSize::POINTER: {
   3172       auto count = elementCount * (POINTERS / ELEMENTS);
   3173       result.addWords(count * WORDS_PER_POINTER);
   3174 
   3175       for (auto i: kj::zeroTo(count)) {
   3176         result += WireHelpers::totalSize(segment, reinterpret_cast<const WirePointer*>(ptr) + i,
   3177                                          nestingLimit);
   3178       }
   3179       break;
   3180     }
   3181     case ElementSize::INLINE_COMPOSITE: {
   3182       // Don't forget to count the tag word.
   3183       auto wordSize = upgradeBound<uint64_t>(elementCount) * step / BITS_PER_WORD;
   3184       result.addWords(wordSize + POINTER_SIZE_IN_WORDS);
   3185 
   3186       if (structPointerCount > ZERO * POINTERS) {
   3187         const word* pos = reinterpret_cast<const word*>(ptr);
   3188         for (auto i KJ_UNUSED: kj::zeroTo(elementCount)) {
   3189           pos += structDataSize / BITS_PER_WORD;
   3190 
   3191           for (auto j KJ_UNUSED: kj::zeroTo(structPointerCount)) {
   3192             result += WireHelpers::totalSize(segment, reinterpret_cast<const WirePointer*>(pos),
   3193                                              nestingLimit);
   3194             pos += POINTER_SIZE_IN_WORDS;
   3195           }
   3196         }
   3197       }
   3198       break;
   3199     }
   3200   }
   3201 
   3202   if (segment != nullptr) {
   3203     // This traversal should not count against the read limit, because it's highly likely that
   3204     // the caller is going to traverse the object again, e.g. to copy it.
   3205     segment->unread(result.wordCount);
   3206   }
   3207 
   3208   return result;
   3209 }
   3210 
   3211 CapTableReader* ListReader::getCapTable() {
   3212   return capTable;
   3213 }
   3214 
   3215 ListReader ListReader::imbue(CapTableReader* capTable) const {
   3216   auto result = *this;
   3217   result.capTable = capTable;
   3218   return result;
   3219 }
   3220 
   3221 bool ListReader::isCanonical(const word **readHead, const WirePointer *ref) {
   3222   switch (this->getElementSize()) {
   3223     case ElementSize::INLINE_COMPOSITE: {
   3224       *readHead += 1;
   3225       if (reinterpret_cast<const word*>(this->ptr) != *readHead) {
   3226         // The next word to read is the tag word, but the pointer is in
   3227         // front of it, so our check is slightly different
   3228         return false;
   3229       }
   3230       if (this->structDataSize % BITS_PER_WORD != ZERO * BITS) {
   3231         return false;
   3232       }
   3233       auto elementSize = StructSize(this->structDataSize / BITS_PER_WORD,
   3234                                     this->structPointerCount).total() / ELEMENTS;
   3235       auto totalSize = upgradeBound<uint64_t>(this->elementCount) * elementSize;
   3236       if (totalSize != ref->listRef.inlineCompositeWordCount()) {
   3237         return false;
   3238       }
   3239       if (elementSize == ZERO * WORDS / ELEMENTS) {
   3240         return true;
   3241       }
   3242       auto listEnd = *readHead + totalSize;
   3243       auto pointerHead = listEnd;
   3244       bool listDataTrunc = false;
   3245       bool listPtrTrunc = false;
   3246       for (auto ec: kj::zeroTo(this->elementCount)) {
   3247         bool dataTrunc, ptrTrunc;
   3248         if (!this->getStructElement(ec).isCanonical(readHead,
   3249                                                     &pointerHead,
   3250                                                     &dataTrunc,
   3251                                                     &ptrTrunc)) {
   3252           return false;
   3253         }
   3254         listDataTrunc |= dataTrunc;
   3255         listPtrTrunc  |= ptrTrunc;
   3256       }
   3257       KJ_REQUIRE(*readHead == listEnd, *readHead, listEnd);
   3258       *readHead = pointerHead;
   3259       return listDataTrunc && listPtrTrunc;
   3260     }
   3261     case ElementSize::POINTER: {
   3262       if (reinterpret_cast<const word*>(this->ptr) != *readHead) {
   3263         return false;
   3264       }
   3265       *readHead += this->elementCount * (POINTERS / ELEMENTS) * WORDS_PER_POINTER;
   3266       for (auto ec: kj::zeroTo(this->elementCount)) {
   3267         if (!this->getPointerElement(ec).isCanonical(readHead)) {
   3268           return false;
   3269         }
   3270       }
   3271       return true;
   3272     }
   3273     default: {
   3274       if (reinterpret_cast<const word*>(this->ptr) != *readHead) {
   3275         return false;
   3276       }
   3277 
   3278       auto bitSize = upgradeBound<uint64_t>(this->elementCount) *
   3279                      dataBitsPerElement(this->elementSize);
   3280       auto truncatedByteSize = bitSize / BITS_PER_BYTE;
   3281       auto byteReadHead = reinterpret_cast<const uint8_t*>(*readHead) + truncatedByteSize;
   3282       auto readHeadEnd = *readHead + WireHelpers::roundBitsUpToWords(bitSize);
   3283 
   3284       auto leftoverBits = bitSize % BITS_PER_BYTE;
   3285       if (leftoverBits > ZERO * BITS) {
   3286         auto mask = ~((1 << unbound(leftoverBits / BITS)) - 1);
   3287 
   3288         if (mask & *byteReadHead) {
   3289           return false;
   3290         }
   3291         byteReadHead += 1;
   3292       }
   3293 
   3294       while (byteReadHead != reinterpret_cast<const uint8_t*>(readHeadEnd)) {
   3295         if (*byteReadHead != 0) {
   3296           return false;
   3297         }
   3298         byteReadHead += 1;
   3299       }
   3300 
   3301       *readHead = readHeadEnd;
   3302       return true;
   3303     }
   3304   }
   3305   KJ_UNREACHABLE;
   3306 }
   3307 
   3308 // =======================================================================================
   3309 // OrphanBuilder
   3310 
   3311 OrphanBuilder OrphanBuilder::initStruct(
   3312     BuilderArena* arena, CapTableBuilder* capTable, StructSize size) {
   3313   OrphanBuilder result;
   3314   StructBuilder builder = WireHelpers::initStructPointer(
   3315       result.tagAsPtr(), nullptr, capTable, size, arena);
   3316   result.segment = builder.segment;
   3317   result.capTable = capTable;
   3318   result.location = builder.getLocation();
   3319   return result;
   3320 }
   3321 
   3322 OrphanBuilder OrphanBuilder::initList(
   3323     BuilderArena* arena, CapTableBuilder* capTable,
   3324     ElementCount elementCount, ElementSize elementSize) {
   3325   OrphanBuilder result;
   3326   ListBuilder builder = WireHelpers::initListPointer(
   3327       result.tagAsPtr(), nullptr, capTable, elementCount, elementSize, arena);
   3328   result.segment = builder.segment;
   3329   result.capTable = capTable;
   3330   result.location = builder.getLocation();
   3331   return result;
   3332 }
   3333 
   3334 OrphanBuilder OrphanBuilder::initStructList(
   3335     BuilderArena* arena, CapTableBuilder* capTable,
   3336     ElementCount elementCount, StructSize elementSize) {
   3337   OrphanBuilder result;
   3338   ListBuilder builder = WireHelpers::initStructListPointer(
   3339       result.tagAsPtr(), nullptr, capTable, elementCount, elementSize, arena);
   3340   result.segment = builder.segment;
   3341   result.capTable = capTable;
   3342   result.location = builder.getLocation();
   3343   return result;
   3344 }
   3345 
   3346 OrphanBuilder OrphanBuilder::initText(
   3347     BuilderArena* arena, CapTableBuilder* capTable, ByteCount size) {
   3348   OrphanBuilder result;
   3349   auto allocation = WireHelpers::initTextPointer(result.tagAsPtr(), nullptr, capTable,
   3350       assertMax<MAX_TEXT_SIZE>(size, ThrowOverflow()), arena);
   3351   result.segment = allocation.segment;
   3352   result.capTable = capTable;
   3353   result.location = reinterpret_cast<word*>(allocation.value.begin());
   3354   return result;
   3355 }
   3356 
   3357 OrphanBuilder OrphanBuilder::initData(
   3358     BuilderArena* arena, CapTableBuilder* capTable, ByteCount size) {
   3359   OrphanBuilder result;
   3360   auto allocation = WireHelpers::initDataPointer(result.tagAsPtr(), nullptr, capTable,
   3361       assertMaxBits<BLOB_SIZE_BITS>(size), arena);
   3362   result.segment = allocation.segment;
   3363   result.capTable = capTable;
   3364   result.location = reinterpret_cast<word*>(allocation.value.begin());
   3365   return result;
   3366 }
   3367 
   3368 OrphanBuilder OrphanBuilder::copy(
   3369     BuilderArena* arena, CapTableBuilder* capTable, StructReader copyFrom) {
   3370   OrphanBuilder result;
   3371   auto allocation = WireHelpers::setStructPointer(
   3372       nullptr, capTable, result.tagAsPtr(), copyFrom, arena);
   3373   result.segment = allocation.segment;
   3374   result.capTable = capTable;
   3375   result.location = reinterpret_cast<word*>(allocation.value);
   3376   return result;
   3377 }
   3378 
   3379 OrphanBuilder OrphanBuilder::copy(
   3380     BuilderArena* arena, CapTableBuilder* capTable, ListReader copyFrom) {
   3381   OrphanBuilder result;
   3382   auto allocation = WireHelpers::setListPointer(
   3383       nullptr, capTable, result.tagAsPtr(), copyFrom, arena);
   3384   result.segment = allocation.segment;
   3385   result.capTable = capTable;
   3386   result.location = reinterpret_cast<word*>(allocation.value);
   3387   return result;
   3388 }
   3389 
   3390 OrphanBuilder OrphanBuilder::copy(
   3391     BuilderArena* arena, CapTableBuilder* capTable, PointerReader copyFrom) {
   3392   OrphanBuilder result;
   3393   auto allocation = WireHelpers::copyPointer(
   3394       nullptr, capTable, result.tagAsPtr(),
   3395       copyFrom.segment, copyFrom.capTable, copyFrom.pointer, copyFrom.nestingLimit, arena);
   3396   result.segment = allocation.segment;
   3397   result.capTable = capTable;
   3398   result.location = reinterpret_cast<word*>(allocation.value);
   3399   return result;
   3400 }
   3401 
   3402 OrphanBuilder OrphanBuilder::copy(
   3403     BuilderArena* arena, CapTableBuilder* capTable, Text::Reader copyFrom) {
   3404   OrphanBuilder result;
   3405   auto allocation = WireHelpers::setTextPointer(
   3406       result.tagAsPtr(), nullptr, capTable, copyFrom, arena);
   3407   result.segment = allocation.segment;
   3408   result.capTable = capTable;
   3409   result.location = reinterpret_cast<word*>(allocation.value.begin());
   3410   return result;
   3411 }
   3412 
   3413 OrphanBuilder OrphanBuilder::copy(
   3414     BuilderArena* arena, CapTableBuilder* capTable, Data::Reader copyFrom) {
   3415   OrphanBuilder result;
   3416   auto allocation = WireHelpers::setDataPointer(
   3417       result.tagAsPtr(), nullptr, capTable, copyFrom, arena);
   3418   result.segment = allocation.segment;
   3419   result.capTable = capTable;
   3420   result.location = reinterpret_cast<word*>(allocation.value.begin());
   3421   return result;
   3422 }
   3423 
   3424 #if !CAPNP_LITE
   3425 OrphanBuilder OrphanBuilder::copy(
   3426     BuilderArena* arena, CapTableBuilder* capTable, kj::Own<ClientHook> copyFrom) {
   3427   OrphanBuilder result;
   3428   WireHelpers::setCapabilityPointer(nullptr, capTable, result.tagAsPtr(), kj::mv(copyFrom));
   3429   result.segment = arena->getSegment(SegmentId(0));
   3430   result.capTable = capTable;
   3431   result.location = &result.tag;  // dummy to make location non-null
   3432   return result;
   3433 }
   3434 #endif  // !CAPNP_LITE
   3435 
   3436 OrphanBuilder OrphanBuilder::concat(
   3437     BuilderArena* arena, CapTableBuilder* capTable,
   3438     ElementSize elementSize, StructSize structSize,
   3439     kj::ArrayPtr<const ListReader> lists) {
   3440   KJ_REQUIRE(lists.size() > 0, "Can't concat empty list ");
   3441 
   3442   // Find the overall element count and size.
   3443   ListElementCount elementCount = ZERO * ELEMENTS;
   3444   for (auto& list: lists) {
   3445     elementCount = assertMaxBits<LIST_ELEMENT_COUNT_BITS>(elementCount + list.elementCount,
   3446         []() { KJ_FAIL_REQUIRE("concatenated list exceeds list size limit"); });
   3447     if (list.elementSize != elementSize) {
   3448       // If element sizes don't all match, upgrade to struct list.
   3449       KJ_REQUIRE(list.elementSize != ElementSize::BIT && elementSize != ElementSize::BIT,
   3450                  "can't upgrade bit lists to struct lists");
   3451       elementSize = ElementSize::INLINE_COMPOSITE;
   3452     }
   3453     structSize.data = kj::max(structSize.data,
   3454         WireHelpers::roundBitsUpToWords(list.structDataSize));
   3455     structSize.pointers = kj::max(structSize.pointers, list.structPointerCount);
   3456   }
   3457 
   3458   // Allocate the list.
   3459   OrphanBuilder result;
   3460   ListBuilder builder = (elementSize == ElementSize::INLINE_COMPOSITE)
   3461       ? WireHelpers::initStructListPointer(
   3462           result.tagAsPtr(), nullptr, capTable, elementCount, structSize, arena)
   3463       : WireHelpers::initListPointer(
   3464           result.tagAsPtr(), nullptr, capTable, elementCount, elementSize, arena);
   3465 
   3466   // Copy elements.
   3467   switch (elementSize) {
   3468     case ElementSize::INLINE_COMPOSITE: {
   3469       ListElementCount pos = ZERO * ELEMENTS;
   3470       for (auto& list: lists) {
   3471         for (auto i: kj::zeroTo(list.size())) {
   3472           builder.getStructElement(pos).copyContentFrom(list.getStructElement(i));
   3473           // assumeBits() safe because we checked total size earlier.
   3474           pos = assumeBits<LIST_ELEMENT_COUNT_BITS>(pos + ONE * ELEMENTS);
   3475         }
   3476       }
   3477       break;
   3478     }
   3479     case ElementSize::POINTER: {
   3480       ListElementCount pos = ZERO * ELEMENTS;
   3481       for (auto& list: lists) {
   3482         for (auto i: kj::zeroTo(list.size())) {
   3483           builder.getPointerElement(pos).copyFrom(list.getPointerElement(i));
   3484           // assumeBits() safe because we checked total size earlier.
   3485           pos = assumeBits<LIST_ELEMENT_COUNT_BITS>(pos + ONE * ELEMENTS);
   3486         }
   3487       }
   3488       break;
   3489     }
   3490     case ElementSize::BIT: {
   3491       // It's difficult to memcpy() bits since a list could start or end mid-byte. For now we
   3492       // do a slow, naive loop. Probably no one will ever care.
   3493       ListElementCount pos = ZERO * ELEMENTS;
   3494       for (auto& list: lists) {
   3495         for (auto i: kj::zeroTo(list.size())) {
   3496           builder.setDataElement<bool>(pos, list.getDataElement<bool>(i));
   3497           // assumeBits() safe because we checked total size earlier.
   3498           pos = assumeBits<LIST_ELEMENT_COUNT_BITS>(pos + ONE * ELEMENTS);
   3499         }
   3500       }
   3501       break;
   3502     }
   3503     default: {
   3504       // We know all the inputs are primitives with identical size because otherwise we would have
   3505       // chosen INLINE_COMPOSITE. Therefore, we can safely use memcpy() here instead of copying
   3506       // each element manually.
   3507       byte* target = builder.ptr;
   3508       auto step = builder.step / BITS_PER_BYTE;
   3509       for (auto& list: lists) {
   3510         auto count = step * upgradeBound<uint64_t>(list.size());
   3511         WireHelpers::copyMemory(target, list.ptr, assumeBits<SEGMENT_WORD_COUNT_BITS>(count));
   3512         target += count;
   3513       }
   3514       break;
   3515     }
   3516   }
   3517 
   3518   // Return orphan.
   3519   result.segment = builder.segment;
   3520   result.capTable = capTable;
   3521   result.location = builder.getLocation();
   3522   return result;
   3523 }
   3524 
   3525 OrphanBuilder OrphanBuilder::referenceExternalData(BuilderArena* arena, Data::Reader data) {
   3526   KJ_REQUIRE(reinterpret_cast<uintptr_t>(data.begin()) % sizeof(void*) == 0,
   3527              "Cannot referenceExternalData() that is not aligned.");
   3528 
   3529   auto checkedSize = assertMaxBits<BLOB_SIZE_BITS>(bounded(data.size()));
   3530   auto wordCount = WireHelpers::roundBytesUpToWords(checkedSize * BYTES);
   3531   kj::ArrayPtr<const word> words(reinterpret_cast<const word*>(data.begin()),
   3532                                  unbound(wordCount / WORDS));
   3533 
   3534   OrphanBuilder result;
   3535   result.tagAsPtr()->setKindForOrphan(WirePointer::LIST);
   3536   result.tagAsPtr()->listRef.set(ElementSize::BYTE, checkedSize * ELEMENTS);
   3537   result.segment = arena->addExternalSegment(words);
   3538 
   3539   // External data cannot possibly contain capabilities.
   3540   result.capTable = nullptr;
   3541 
   3542   // const_cast OK here because we will check whether the segment is writable when we try to get
   3543   // a builder.
   3544   result.location = const_cast<word*>(words.begin());
   3545 
   3546   return result;
   3547 }
   3548 
   3549 StructBuilder OrphanBuilder::asStruct(StructSize size) {
   3550   KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));
   3551 
   3552   StructBuilder result = WireHelpers::getWritableStructPointer(
   3553       tagAsPtr(), location, segment, capTable, size, nullptr, segment->getArena());
   3554 
   3555   // Watch out, the pointer could have been updated if the object had to be relocated.
   3556   location = reinterpret_cast<word*>(result.data);
   3557 
   3558   return result;
   3559 }
   3560 
   3561 ListBuilder OrphanBuilder::asList(ElementSize elementSize) {
   3562   KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));
   3563 
   3564   ListBuilder result = WireHelpers::getWritableListPointer(
   3565       tagAsPtr(), location, segment, capTable, elementSize, nullptr, segment->getArena());
   3566 
   3567   // Watch out, the pointer could have been updated if the object had to be relocated.
   3568   // (Actually, currently this is not true for primitive lists, but let's not turn into a bug if
   3569   // it changes!)
   3570   location = result.getLocation();
   3571 
   3572   return result;
   3573 }
   3574 
   3575 ListBuilder OrphanBuilder::asStructList(StructSize elementSize) {
   3576   KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));
   3577 
   3578   ListBuilder result = WireHelpers::getWritableStructListPointer(
   3579       tagAsPtr(), location, segment, capTable, elementSize, nullptr, segment->getArena());
   3580 
   3581   // Watch out, the pointer could have been updated if the object had to be relocated.
   3582   location = result.getLocation();
   3583 
   3584   return result;
   3585 }
   3586 
   3587 ListBuilder OrphanBuilder::asListAnySize() {
   3588   KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));
   3589 
   3590   ListBuilder result = WireHelpers::getWritableListPointerAnySize(
   3591       tagAsPtr(), location, segment, capTable, nullptr, segment->getArena());
   3592 
   3593   // Watch out, the pointer could have been updated if the object had to be relocated.
   3594   location = result.getLocation();
   3595 
   3596   return result;
   3597 }
   3598 
   3599 Text::Builder OrphanBuilder::asText() {
   3600   KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));
   3601 
   3602   // Never relocates.
   3603   return WireHelpers::getWritableTextPointer(
   3604       tagAsPtr(), location, segment, capTable, nullptr, ZERO * BYTES);
   3605 }
   3606 
   3607 Data::Builder OrphanBuilder::asData() {
   3608   KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));
   3609 
   3610   // Never relocates.
   3611   return WireHelpers::getWritableDataPointer(
   3612       tagAsPtr(), location, segment, capTable, nullptr, ZERO * BYTES);
   3613 }
   3614 
   3615 StructReader OrphanBuilder::asStructReader(StructSize size) const {
   3616   KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));
   3617   return WireHelpers::readStructPointer(
   3618       segment, capTable, tagAsPtr(), location, nullptr, kj::maxValue);
   3619 }
   3620 
   3621 ListReader OrphanBuilder::asListReader(ElementSize elementSize) const {
   3622   KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));
   3623   return WireHelpers::readListPointer(
   3624       segment, capTable, tagAsPtr(), location, nullptr, elementSize, kj::maxValue);
   3625 }
   3626 
   3627 ListReader OrphanBuilder::asListReaderAnySize() const {
   3628   KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));
   3629   return WireHelpers::readListPointer(
   3630       segment, capTable, tagAsPtr(), location, nullptr, ElementSize::VOID /* dummy */,
   3631       kj::maxValue);
   3632 }
   3633 
   3634 #if !CAPNP_LITE
   3635 kj::Own<ClientHook> OrphanBuilder::asCapability() const {
   3636   return WireHelpers::readCapabilityPointer(segment, capTable, tagAsPtr(), kj::maxValue);
   3637 }
   3638 #endif  // !CAPNP_LITE
   3639 
   3640 Text::Reader OrphanBuilder::asTextReader() const {
   3641   KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));
   3642   return WireHelpers::readTextPointer(segment, tagAsPtr(), location, nullptr, ZERO * BYTES);
   3643 }
   3644 
   3645 Data::Reader OrphanBuilder::asDataReader() const {
   3646   KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));
   3647   return WireHelpers::readDataPointer(segment, tagAsPtr(), location, nullptr, ZERO * BYTES);
   3648 }
   3649 
   3650 bool OrphanBuilder::truncate(ElementCount uncheckedSize, bool isText) {
   3651   ListElementCount size = assertMaxBits<LIST_ELEMENT_COUNT_BITS>(uncheckedSize,
   3652       []() { KJ_FAIL_REQUIRE("requested list size is too large"); });
   3653 
   3654   WirePointer* ref = tagAsPtr();
   3655   SegmentBuilder* segment = this->segment;
   3656 
   3657   word* target = WireHelpers::followFars(ref, location, segment);
   3658 
   3659   if (ref->isNull()) {
   3660     // We don't know the right element size, so we can't resize this list.
   3661     return size == ZERO * ELEMENTS;
   3662   }
   3663 
   3664   KJ_REQUIRE(ref->kind() == WirePointer::LIST, "Can't truncate non-list.") {
   3665     return false;
   3666   }
   3667 
   3668   if (isText) {
   3669     // Add space for the NUL terminator.
   3670     size = assertMaxBits<LIST_ELEMENT_COUNT_BITS>(size + ONE * ELEMENTS,
   3671         []() { KJ_FAIL_REQUIRE("requested list size is too large"); });
   3672   }
   3673 
   3674   auto elementSize = ref->listRef.elementSize();
   3675 
   3676   if (elementSize == ElementSize::INLINE_COMPOSITE) {
   3677     auto oldWordCount = ref->listRef.inlineCompositeWordCount();
   3678 
   3679     WirePointer* tag = reinterpret_cast<WirePointer*>(target);
   3680     ++target;
   3681     KJ_REQUIRE(tag->kind() == WirePointer::STRUCT,
   3682                "INLINE_COMPOSITE lists of non-STRUCT type are not supported.") {
   3683       return false;
   3684     }
   3685     StructSize structSize(tag->structRef.dataSize.get(), tag->structRef.ptrCount.get());
   3686     auto elementStep = structSize.total() / ELEMENTS;
   3687 
   3688     auto oldSize = tag->inlineCompositeListElementCount();
   3689 
   3690     SegmentWordCount sizeWords = assertMaxBits<SEGMENT_WORD_COUNT_BITS>(
   3691         upgradeBound<uint64_t>(size) * elementStep,
   3692         []() { KJ_FAIL_ASSERT("requested list size too large to fit in message segment"); });
   3693     SegmentWordCount oldSizeWords = assertMaxBits<SEGMENT_WORD_COUNT_BITS>(
   3694         upgradeBound<uint64_t>(oldSize) * elementStep,
   3695         []() { KJ_FAIL_ASSERT("prior to truncate, list is larger than max segment size?"); });
   3696 
   3697     word* newEndWord = target + sizeWords;
   3698     word* oldEndWord = target + oldWordCount;
   3699 
   3700     if (size <= oldSize) {
   3701       // Zero the trailing elements.
   3702       for (auto i: kj::range(size, oldSize)) {
   3703         // assumeBits() safe because we checked that both sizeWords and oldSizeWords are in-range
   3704         // above.
   3705         WireHelpers::zeroObject(segment, capTable, tag, target +
   3706             assumeBits<SEGMENT_WORD_COUNT_BITS>(upgradeBound<uint64_t>(i) * elementStep));
   3707       }
   3708       ref->listRef.setInlineComposite(sizeWords);
   3709       tag->setKindAndInlineCompositeListElementCount(WirePointer::STRUCT, size);
   3710       segment->tryTruncate(oldEndWord, newEndWord);
   3711     } else if (newEndWord <= oldEndWord) {
   3712       // Apparently the old list was over-allocated? The word count is more than needed to store
   3713       // the elements. This is "valid" but shouldn't happen in practice unless someone is toying
   3714       // with us.
   3715       word* expectedEnd = target + oldSizeWords;
   3716       KJ_ASSERT(newEndWord >= expectedEnd);
   3717       WireHelpers::zeroMemory(expectedEnd,
   3718           intervalLength(expectedEnd, newEndWord, MAX_SEGMENT_WORDS));
   3719       tag->setKindAndInlineCompositeListElementCount(WirePointer::STRUCT, size);
   3720     } else {
   3721       if (segment->tryExtend(oldEndWord, newEndWord)) {
   3722         // Done in-place. Nothing else to do now; the new memory is already zero'd.
   3723         ref->listRef.setInlineComposite(sizeWords);
   3724         tag->setKindAndInlineCompositeListElementCount(WirePointer::STRUCT, size);
   3725       } else {
   3726         // Need to re-allocate and transfer.
   3727         OrphanBuilder replacement = initStructList(segment->getArena(), capTable, size, structSize);
   3728 
   3729         ListBuilder newList = replacement.asStructList(structSize);
   3730         for (auto i: kj::zeroTo(oldSize)) {
   3731           // assumeBits() safe because we checked that both sizeWords and oldSizeWords are in-range
   3732           // above.
   3733           word* element = target +
   3734               assumeBits<SEGMENT_WORD_COUNT_BITS>(upgradeBound<uint64_t>(i) * elementStep);
   3735           newList.getStructElement(i).transferContentFrom(
   3736               StructBuilder(segment, capTable, element,
   3737                             reinterpret_cast<WirePointer*>(element + structSize.data),
   3738                             structSize.data * BITS_PER_WORD, structSize.pointers));
   3739         }
   3740 
   3741         *this = kj::mv(replacement);
   3742       }
   3743     }
   3744   } else if (elementSize == ElementSize::POINTER) {
   3745     // TODO(cleanup): GCC won't let me declare this constexpr, claiming POINTERS is not constexpr,
   3746     //   but it is?
   3747     const auto POINTERS_PER_ELEMENT = ONE * POINTERS / ELEMENTS;
   3748 
   3749     auto oldSize = ref->listRef.elementCount();
   3750     word* newEndWord = target + size * POINTERS_PER_ELEMENT * WORDS_PER_POINTER;
   3751     word* oldEndWord = target + oldSize * POINTERS_PER_ELEMENT * WORDS_PER_POINTER;
   3752 
   3753     if (size <= oldSize) {
   3754       // Zero the trailing elements.
   3755       for (WirePointer* element = reinterpret_cast<WirePointer*>(newEndWord);
   3756            element < reinterpret_cast<WirePointer*>(oldEndWord); ++element) {
   3757         WireHelpers::zeroPointerAndFars(segment, element);
   3758       }
   3759       ref->listRef.set(ElementSize::POINTER, size);
   3760       segment->tryTruncate(oldEndWord, newEndWord);
   3761     } else {
   3762       if (segment->tryExtend(oldEndWord, newEndWord)) {
   3763         // Done in-place. Nothing else to do now; the new memory is already zero'd.
   3764         ref->listRef.set(ElementSize::POINTER, size);
   3765       } else {
   3766         // Need to re-allocate and transfer.
   3767         OrphanBuilder replacement = initList(
   3768             segment->getArena(), capTable, size, ElementSize::POINTER);
   3769         ListBuilder newList = replacement.asList(ElementSize::POINTER);
   3770         WirePointer* oldPointers = reinterpret_cast<WirePointer*>(target);
   3771         for (auto i: kj::zeroTo(oldSize)) {
   3772           newList.getPointerElement(i).transferFrom(
   3773               PointerBuilder(segment, capTable, oldPointers + i * POINTERS_PER_ELEMENT));
   3774         }
   3775         *this = kj::mv(replacement);
   3776       }
   3777     }
   3778   } else {
   3779     auto oldSize = ref->listRef.elementCount();
   3780     auto step = dataBitsPerElement(elementSize);
   3781     const auto MAX_STEP_BYTES = ONE * WORDS / ELEMENTS * BYTES_PER_WORD;
   3782     word* newEndWord = target + WireHelpers::roundBitsUpToWords(
   3783         upgradeBound<uint64_t>(size) * step);
   3784     word* oldEndWord = target + WireHelpers::roundBitsUpToWords(
   3785         upgradeBound<uint64_t>(oldSize) * step);
   3786 
   3787     if (size <= oldSize) {
   3788       // When truncating text, we want to set the null terminator as well, so we'll do our zeroing
   3789       // at the byte level.
   3790       byte* begin = reinterpret_cast<byte*>(target);
   3791       byte* newEndByte = begin + WireHelpers::roundBitsUpToBytes(
   3792           upgradeBound<uint64_t>(size) * step) - isText;
   3793       byte* oldEndByte = reinterpret_cast<byte*>(oldEndWord);
   3794 
   3795       WireHelpers::zeroMemory(newEndByte,
   3796           intervalLength(newEndByte, oldEndByte, MAX_LIST_ELEMENTS * MAX_STEP_BYTES));
   3797       ref->listRef.set(elementSize, size);
   3798       segment->tryTruncate(oldEndWord, newEndWord);
   3799     } else {
   3800       // We're trying to extend, not truncate.
   3801       if (segment->tryExtend(oldEndWord, newEndWord)) {
   3802         // Done in-place. Nothing else to do now; the memory is already zero'd.
   3803         ref->listRef.set(elementSize, size);
   3804       } else {
   3805         // Need to re-allocate and transfer.
   3806         OrphanBuilder replacement = initList(segment->getArena(), capTable, size, elementSize);
   3807         ListBuilder newList = replacement.asList(elementSize);
   3808         auto words = WireHelpers::roundBitsUpToWords(
   3809             dataBitsPerElement(elementSize) * upgradeBound<uint64_t>(oldSize));
   3810         WireHelpers::copyMemory(reinterpret_cast<word*>(newList.ptr), target, words);
   3811         *this = kj::mv(replacement);
   3812       }
   3813     }
   3814   }
   3815 
   3816   return true;
   3817 }
   3818 
   3819 void OrphanBuilder::truncate(ElementCount size, ElementSize elementSize) {
   3820   if (!truncate(size, false)) {
   3821     // assumeBits() safe since it's checked inside truncate()
   3822     *this = initList(segment->getArena(), capTable,
   3823         assumeBits<LIST_ELEMENT_COUNT_BITS>(size), elementSize);
   3824   }
   3825 }
   3826 
   3827 void OrphanBuilder::truncate(ElementCount size, StructSize elementSize) {
   3828   if (!truncate(size, false)) {
   3829     // assumeBits() safe since it's checked inside truncate()
   3830     *this = initStructList(segment->getArena(), capTable,
   3831         assumeBits<LIST_ELEMENT_COUNT_BITS>(size), elementSize);
   3832   }
   3833 }
   3834 
   3835 void OrphanBuilder::truncateText(ElementCount size) {
   3836   if (!truncate(size, true)) {
   3837     // assumeBits() safe since it's checked inside truncate()
   3838     *this = initText(segment->getArena(), capTable,
   3839         assumeBits<LIST_ELEMENT_COUNT_BITS>(size) * (ONE * BYTES / ELEMENTS));
   3840   }
   3841 }
   3842 
   3843 void OrphanBuilder::euthanize() {
   3844   // Carefully catch any exceptions and rethrow them as recoverable exceptions since we may be in
   3845   // a destructor.
   3846   auto exception = kj::runCatchingExceptions([&]() {
   3847     if (tagAsPtr()->isPositional()) {
   3848       WireHelpers::zeroObject(segment, capTable, tagAsPtr(), location);
   3849     } else {
   3850       WireHelpers::zeroObject(segment, capTable, tagAsPtr());
   3851     }
   3852 
   3853     WireHelpers::zeroMemory(&tag, ONE * WORDS);
   3854     segment = nullptr;
   3855     location = nullptr;
   3856   });
   3857 
   3858   KJ_IF_MAYBE(e, exception) {
   3859     kj::getExceptionCallback().onRecoverableException(kj::mv(*e));
   3860   }
   3861 }
   3862 
   3863 }  // namespace _ (private)
   3864 }  // namespace capnp