diff --git a/include/swift/ABI/KeyPath.h b/include/swift/ABI/KeyPath.h index dd48ae3cdedd7..199598da6340b 100644 --- a/include/swift/ABI/KeyPath.h +++ b/include/swift/ABI/KeyPath.h @@ -173,14 +173,13 @@ class KeyPathComponentHeader { enum ComputedPropertyIDKind { Pointer, - StoredPropertyOffset, + StoredPropertyIndex, VTableOffset, }; constexpr static uint32_t getResolutionStrategy(ComputedPropertyIDKind idKind) { return idKind == Pointer ? _SwiftKeyPathComponentHeader_ComputedIDUnresolvedIndirectPointer - : idKind == StoredPropertyOffset ? _SwiftKeyPathComponentHeader_ComputedIDUnresolvedFieldOffset : (assert("no resolution strategy implemented" && false), 0); } @@ -196,7 +195,7 @@ class KeyPathComponentHeader { ? _SwiftKeyPathComponentHeader_ComputedSettableFlag : 0) | (kind == SettableMutating ? _SwiftKeyPathComponentHeader_ComputedMutatingFlag : 0) - | (idKind == StoredPropertyOffset + | (idKind == StoredPropertyIndex ? _SwiftKeyPathComponentHeader_ComputedIDByStoredPropertyFlag : 0) | (idKind == VTableOffset ? _SwiftKeyPathComponentHeader_ComputedIDByVTableOffsetFlag : 0) diff --git a/lib/IRGen/ConstantBuilder.h b/lib/IRGen/ConstantBuilder.h index c0c975e05f165..71eaa00711d29 100644 --- a/lib/IRGen/ConstantBuilder.h +++ b/lib/IRGen/ConstantBuilder.h @@ -107,6 +107,14 @@ class ConstantAggregateBuilderBase Size getNextOffsetFromGlobal() const { return Size(super::getNextOffsetFromGlobal().getQuantity()); } + + void addAlignmentPadding(Alignment align) { + auto misalignment = getNextOffsetFromGlobal() % IGM().getPointerAlignment(); + if (misalignment != Size(0)) + add(llvm::ConstantAggregateZero::get( + llvm::ArrayType::get(IGM().Int8Ty, + align.getValue() - misalignment.getValue()))); + } }; class ConstantArrayBuilder diff --git a/lib/IRGen/GenClass.cpp b/lib/IRGen/GenClass.cpp index b1232004cb8c9..56e74029ef69e 100644 --- a/lib/IRGen/GenClass.cpp +++ b/lib/IRGen/GenClass.cpp @@ -506,6 +506,13 @@ irgen::tryEmitConstantClassFragilePhysicalMemberOffset(IRGenModule &IGM, } } +unsigned +irgen::getClassFieldIndex(IRGenModule &IGM, SILType baseType, VarDecl *field) { + auto &baseClassTI = IGM.getTypeInfo(baseType).as(); + auto &classLayout = baseClassTI.getClassLayout(IGM, baseType); + return classLayout.getFieldIndex(field); +} + FieldAccess irgen::getClassFieldAccess(IRGenModule &IGM, SILType baseType, VarDecl *field) { auto &baseClassTI = IGM.getTypeInfo(baseType).as(); diff --git a/lib/IRGen/GenClass.h b/lib/IRGen/GenClass.h index 55f664c3c9b5c..6656897c46058 100644 --- a/lib/IRGen/GenClass.h +++ b/lib/IRGen/GenClass.h @@ -140,6 +140,10 @@ namespace irgen { SILType baseType, VarDecl *field); + unsigned getClassFieldIndex(IRGenModule &IGM, + SILType baseType, + VarDecl *field); + FieldAccess getClassFieldAccess(IRGenModule &IGM, SILType baseType, VarDecl *field); diff --git a/lib/IRGen/GenKeyPath.cpp b/lib/IRGen/GenKeyPath.cpp index daf39d9b30a27..e7bf0732d4cf1 100644 --- a/lib/IRGen/GenKeyPath.cpp +++ b/lib/IRGen/GenKeyPath.cpp @@ -123,7 +123,7 @@ IRGenModule::getAddrOfKeyPathPattern(KeyPathPattern *pattern, fields.add(emitMetadataGenerator(rootTy)); fields.add(emitMetadataGenerator(valueTy)); - // TODO: 32-bit still has a padding word + // TODO: 32-bit heap object header still has an extra word if (SizeTy == Int32Ty) { fields.addInt32(0); } @@ -155,65 +155,31 @@ IRGenModule::getAddrOfKeyPathPattern(KeyPathPattern *pattern, // Leave a placeholder for the buffer header, since we need to know the full // buffer size to fill it in. auto headerPlaceholder = fields.addPlaceholderWithSize(Int32Ty); + fields.addAlignmentPadding(getPointerAlignment()); auto startOfKeyPathBuffer = fields.getNextOffsetFromGlobal(); // Build out the components. auto baseTy = rootTy; - auto getPropertyOffsetOrIndirectOffset - = [&](SILType loweredBaseTy, VarDecl *property) - -> std::pair { - llvm::Constant *offset; - bool isResolved; - bool isStruct; - if (loweredBaseTy.getStructOrBoundGenericStruct()) { - offset = emitPhysicalStructMemberFixedOffset(*this, - loweredBaseTy, - property); - isStruct = true; - } else if (loweredBaseTy.getClassOrBoundGenericClass()) { - offset = tryEmitConstantClassFragilePhysicalMemberOffset(*this, - loweredBaseTy, - property); - isStruct = false; - } else { - llvm_unreachable("property of non-struct, non-class?!"); - } - - // If the offset isn't fixed, try instead to get the field offset vector - // offset for the field to look it up dynamically. - isResolved = offset != nullptr; - if (!isResolved) { - if (isStruct) { - offset = emitPhysicalStructMemberOffsetOfFieldOffset( - *this, loweredBaseTy, property); - assert(offset && "field is neither fixed-offset nor in offset vector"); - } else { - auto offsetValue = getClassFieldOffset(*this, - loweredBaseTy.getClassOrBoundGenericClass(), - property); - offset = llvm::ConstantInt::get(Int32Ty, offsetValue.getValue()); - } - } - - return {offset, isResolved}; - }; + auto assertPointerAlignment = [&]{ + assert(fields.getNextOffsetFromGlobal() % getPointerAlignment() == Size(0) + && "must be pointer-aligned here"); + }; for (unsigned i : indices(pattern->getComponents())) { + assertPointerAlignment(); SILType loweredBaseTy; Lowering::GenericContextScope scope(getSILTypes(), pattern->getGenericSignature()); loweredBaseTy = getLoweredType(AbstractionPattern::getOpaque(), baseTy->getLValueOrInOutObjectType()); - auto &component = pattern->getComponents()[i]; switch (auto kind = component.getKind()) { case KeyPathPatternComponent::Kind::StoredProperty: { auto property = cast(component.getStoredPropertyDecl()); auto addFixedOffset = [&](bool isStruct, llvm::Constant *offset) { - offset = llvm::ConstantExpr::getTruncOrBitCast(offset, Int32Ty); if (auto offsetInt = dyn_cast_or_null(offset)) { auto offsetValue = offsetInt->getValue().getZExtValue(); if (KeyPathComponentHeader::offsetCanBeInline(offsetValue)) { @@ -228,7 +194,7 @@ IRGenModule::getAddrOfKeyPathPattern(KeyPathPattern *pattern, ? KeyPathComponentHeader::forStructComponentWithOutOfLineOffset() : KeyPathComponentHeader::forClassComponentWithOutOfLineOffset(); fields.addInt32(header.getData()); - fields.add(offset); + fields.add(llvm::ConstantExpr::getTruncOrBitCast(offset, Int32Ty)); }; // For a struct stored property, we may know the fixed offset of the field, @@ -247,11 +213,10 @@ IRGenModule::getAddrOfKeyPathPattern(KeyPathPattern *pattern, // of the type metadata at instantiation time. auto fieldOffset = emitPhysicalStructMemberOffsetOfFieldOffset( *this, loweredBaseTy, property); - fieldOffset = llvm::ConstantExpr::getTruncOrBitCast(fieldOffset, - Int32Ty); auto header = KeyPathComponentHeader::forStructComponentWithUnresolvedFieldOffset(); fields.addInt32(header.getData()); - fields.add(fieldOffset); + fields.add(llvm::ConstantExpr::getTruncOrBitCast(fieldOffset, + Int32Ty)); break; } @@ -276,6 +241,7 @@ IRGenModule::getAddrOfKeyPathPattern(KeyPathPattern *pattern, auto header = KeyPathComponentHeader::forClassComponentWithUnresolvedIndirectOffset(); fields.addInt32(header.getData()); + fields.addAlignmentPadding(getPointerAlignment()); auto offsetVar = getAddrOfFieldOffset(property, /*indirect*/ false, NotForDefinition); fields.add(cast(offsetVar.getAddress())); @@ -358,10 +324,36 @@ IRGenModule::getAddrOfKeyPathPattern(KeyPathPattern *pattern, break; } case KeyPathPatternComponent::ComputedPropertyId::Property: - idKind = KeyPathComponentHeader::StoredPropertyOffset; - std::tie(idValue, idResolved) = - getPropertyOffsetOrIndirectOffset(loweredBaseTy, id.getProperty()); - idValue = llvm::ConstantExpr::getZExtOrBitCast(idValue, SizeTy); + // Use the index of the stored property within the aggregate to key + // the property. + auto property = id.getProperty(); + idKind = KeyPathComponentHeader::StoredPropertyIndex; + if (baseTy->getStructOrBoundGenericStruct()) { + idResolved = true; + idValue = llvm::ConstantInt::get(SizeTy, + getPhysicalStructFieldIndex(*this, + SILType::getPrimitiveAddressType(baseTy), property)); + } else if (baseTy->getClassOrBoundGenericClass()) { + // TODO: This field index would require runtime resolution with Swift + // native class resilience. We never directly access ObjC-imported + // ivars so we can disregard ObjC ivar resilience for this computation + // and start counting at the Swift native root. + switch (getClassFieldAccess(*this, loweredBaseTy, property)) { + case FieldAccess::ConstantDirect: + case FieldAccess::ConstantIndirect: + case FieldAccess::NonConstantDirect: + idResolved = true; + idValue = llvm::ConstantInt::get(SizeTy, + getClassFieldIndex(*this, + SILType::getPrimitiveAddressType(baseTy), property)); + break; + case FieldAccess::NonConstantIndirect: + llvm_unreachable("not implemented"); + } + + } else { + llvm_unreachable("neither struct nor class"); + } break; } @@ -369,6 +361,7 @@ IRGenModule::getAddrOfKeyPathPattern(KeyPathPattern *pattern, idKind, !isInstantiableInPlace, idResolved); fields.addInt32(header.getData()); + fields.addAlignmentPadding(getPointerAlignment()); fields.add(idValue); if (isInstantiableInPlace) { @@ -392,6 +385,7 @@ IRGenModule::getAddrOfKeyPathPattern(KeyPathPattern *pattern, // For all but the last component, we pack in the type of the component. if (i + 1 != pattern->getComponents().size()) { + fields.addAlignmentPadding(getPointerAlignment()); fields.add(emitMetadataGenerator(component.getComponentType())); } baseTy = component.getComponentType(); diff --git a/stdlib/public/SwiftShims/KeyPath.h b/stdlib/public/SwiftShims/KeyPath.h index b8846e79fd94d..5f3b0218a8384 100644 --- a/stdlib/public/SwiftShims/KeyPath.h +++ b/stdlib/public/SwiftShims/KeyPath.h @@ -90,8 +90,6 @@ static const __swift_uint32_t _SwiftKeyPathComponentHeader_ComputedIDResolutionM = 0x0000000FU; static const __swift_uint32_t _SwiftKeyPathComponentHeader_ComputedIDResolved = 0x00000000U; -static const __swift_uint32_t _SwiftKeyPathComponentHeader_ComputedIDUnresolvedFieldOffset - = 0x00000001U; static const __swift_uint32_t _SwiftKeyPathComponentHeader_ComputedIDUnresolvedIndirectPointer = 0x00000002U; diff --git a/stdlib/public/core/KeyPath.swift b/stdlib/public/core/KeyPath.swift index 632e9cac96578..7fcd866b2007b 100644 --- a/stdlib/public/core/KeyPath.swift +++ b/stdlib/public/core/KeyPath.swift @@ -350,58 +350,6 @@ public class ReferenceWritableKeyPath: WritableKeyPath // MARK: Implementation details -// Keypaths store word-sized values with 32-bit alignment for memory efficiency. -// Since RawPointer's APIs currently require alignment, this means we need -// to do some shuffling for the unaligned load/stores. - -extension UnsafeRawBufferPointer { - internal func _loadKeyPathWord(fromByteOffset offset: Int = 0, - as _: T.Type) -> T { - _sanityCheck(_isPOD(T.self) && - MemoryLayout.size == MemoryLayout.size, - "not a word-sized trivial type") - if MemoryLayout.size == 8 { - let words = load(fromByteOffset: offset, as: (Int32, Int32).self) - return unsafeBitCast(words, to: T.self) - } else if MemoryLayout.size == 4 { - return load(fromByteOffset: offset, as: T.self) - } else { - _sanityCheckFailure("unsupported architecture") - } - } -} - -extension UnsafeMutableRawBufferPointer { - internal func _loadKeyPathWord(fromByteOffset offset: Int = 0, - as _: T.Type) -> T { - _sanityCheck(_isPOD(T.self) && - MemoryLayout.size == MemoryLayout.size, - "not a word-sized trivial type") - if MemoryLayout.size == 8 { - let words = load(fromByteOffset: offset, as: (Int32, Int32).self) - return unsafeBitCast(words, to: T.self) - } else if MemoryLayout.size == 4 { - return load(fromByteOffset: offset, as: T.self) - } else { - _sanityCheckFailure("unsupported architecture") - } - } - internal func _storeKeyPathWord(of value: T, - toByteOffset offset: Int = 0) { - _sanityCheck(_isPOD(T.self) && - MemoryLayout.size == MemoryLayout.size, - "not a word-sized trivial type") - if MemoryLayout.size == 8 { - let words = unsafeBitCast(value, to: (Int32, Int32).self) - storeBytes(of: words, toByteOffset: offset, as: (Int32,Int32).self) - } else if MemoryLayout.size == 4 { - storeBytes(of: value, toByteOffset: offset, as: T.self) - } else { - _sanityCheckFailure("unsupported architecture") - } - } -} - internal enum KeyPathComponentKind { /// The keypath projects within the storage of the outer value, like a /// stored property in a struct. @@ -642,9 +590,6 @@ internal struct RawKeyPathComponent { static var computedIDResolved: UInt32 { return _SwiftKeyPathComponentHeader_ComputedIDResolved } - static var computedIDUnresolvedFieldOffset: UInt32 { - return _SwiftKeyPathComponentHeader_ComputedIDUnresolvedFieldOffset - } static var computedIDUnresolvedIndirectPointer: UInt32 { return _SwiftKeyPathComponentHeader_ComputedIDUnresolvedIndirectPointer } @@ -695,6 +640,12 @@ internal struct RawKeyPathComponent { _sanityCheckFailure("invalid header") } } + + // The component header is 4 bytes, but may be followed by an aligned + // pointer field for some kinds of component, forcing padding. + static var pointerAlignmentSkew: Int { + return MemoryLayout.size - MemoryLayout.size + } var bodySize: Int { switch kind { @@ -705,8 +656,8 @@ internal struct RawKeyPathComponent { return 0 case .computed: let ptrSize = MemoryLayout.size - // minimum two pointers for id and get - var total = ptrSize * 2 + // align to pointer, minimum two pointers for id and get + var total = Header.pointerAlignmentSkew + ptrSize * 2 // additional word for a setter if payload & Header.computedSettableFlag != 0 { total += ptrSize @@ -748,9 +699,8 @@ internal struct RawKeyPathComponent { var _computedIDValue: Int { _sanityCheck(header.kind == .computed, "not a computed property") - _sanityCheck(body.count >= MemoryLayout.size, - "component is not big enough") - return body._loadKeyPathWord(as: Int.self) + return body.load(fromByteOffset: Header.pointerAlignmentSkew, + as: Int.self) } var _computedID: ComputedPropertyID { @@ -764,11 +714,10 @@ internal struct RawKeyPathComponent { var _computedGetter: UnsafeRawPointer { _sanityCheck(header.kind == .computed, "not a computed property") - _sanityCheck(body.count >= MemoryLayout.size * 2, - "component is not big enough") - return body._loadKeyPathWord(fromByteOffset: MemoryLayout.size, - as: UnsafeRawPointer.self) + return body.load( + fromByteOffset: Header.pointerAlignmentSkew + MemoryLayout.size, + as: UnsafeRawPointer.self) } var _computedSetter: UnsafeRawPointer { @@ -776,11 +725,10 @@ internal struct RawKeyPathComponent { "not a computed property") _sanityCheck(header.payload & Header.computedSettableFlag != 0, "not a settable property") - _sanityCheck(body.count >= MemoryLayout.size * 3, - "component is not big enough") - return body._loadKeyPathWord(fromByteOffset: MemoryLayout.size * 2, - as: UnsafeRawPointer.self) + return body.load( + fromByteOffset: Header.pointerAlignmentSkew + MemoryLayout.size * 2, + as: UnsafeRawPointer.self) } var value: KeyPathComponent { @@ -838,7 +786,7 @@ internal struct RawKeyPathComponent { return } } - + func clone(into buffer: inout UnsafeMutableRawBufferPointer, endOfReferencePrefix: Bool) { var newHeader = header @@ -860,28 +808,32 @@ internal struct RawKeyPathComponent { .optionalWrap: break case .computed: + // Fields are pointer-aligned after the header + componentSize += Header.pointerAlignmentSkew // TODO: nontrivial arguments need to be copied by value witness _sanityCheck(header.payload & Header.computedHasArgumentsFlag == 0, "arguments not implemented") - buffer._storeKeyPathWord(of: _computedIDValue, toByteOffset: 4) - buffer._storeKeyPathWord(of: _computedGetter, - toByteOffset: 4 + MemoryLayout.size) + buffer.storeBytes(of: _computedIDValue, + toByteOffset: MemoryLayout.size, + as: Int.self) + buffer.storeBytes(of: _computedGetter, + toByteOffset: 2 * MemoryLayout.size, + as: UnsafeRawPointer.self) componentSize += MemoryLayout.size * 2 if header.payload & Header.computedSettableFlag != 0 { - buffer._storeKeyPathWord(of: _computedSetter, - toByteOffset: 4 + MemoryLayout.size * 2) + buffer.storeBytes(of: _computedSetter, + toByteOffset: MemoryLayout.size * 3, + as: UnsafeRawPointer.self) componentSize += MemoryLayout.size } } - _sanityCheck(buffer.count >= componentSize) buffer = UnsafeMutableRawBufferPointer( start: buffer.baseAddress.unsafelyUnwrapped + componentSize, - count: buffer.count - componentSize - ) + count: buffer.count - componentSize) } - + func projectReadOnly(_ base: CurValue) -> NewValue { switch value { case .struct(let offset): @@ -1062,9 +1014,8 @@ internal struct KeyPathBuffer { init(base: UnsafeRawPointer) { let header = base.load(as: Header.self) data = UnsafeRawBufferPointer( - start: base + MemoryLayout
.size, - count: header.size - ) + start: base + MemoryLayout.size, + count: header.size) trivial = header.trivial hasReferencePrefix = header.hasReferencePrefix } @@ -1086,7 +1037,7 @@ internal struct KeyPathBuffer { let body: UnsafeRawBufferPointer let size = header.bodySize if size != 0 { - body = popRaw(size) + body = popRaw(size: size, alignment: 4) } else { body = UnsafeRawBufferPointer(start: nil, count: 0) } @@ -1097,22 +1048,15 @@ internal struct KeyPathBuffer { if data.count == 0 { nextType = nil } else { - if MemoryLayout.size == 8 { - // Words in the key path buffer are 32-bit aligned - nextType = unsafeBitCast(pop((Int32, Int32).self), - to: Any.Type.self) - } else if MemoryLayout.size == 4 { - nextType = pop(Any.Type.self) - } else { - _sanityCheckFailure("unexpected word size") - } + nextType = pop(Any.Type.self) } return (component, nextType) } mutating func pop(_ type: T.Type) -> T { _sanityCheck(_isPOD(T.self), "should be POD") - let raw = popRaw(MemoryLayout.size) + let raw = popRaw(size: MemoryLayout.size, + alignment: MemoryLayout.alignment) let resultBuf = UnsafeMutablePointer.allocate(capacity: 1) _memcpy(dest: resultBuf, src: UnsafeMutableRawPointer(mutating: raw.baseAddress.unsafelyUnwrapped), @@ -1121,13 +1065,18 @@ internal struct KeyPathBuffer { resultBuf.deallocate(capacity: 1) return result } - mutating func popRaw(_ size: Int) -> UnsafeRawBufferPointer { - _sanityCheck(data.count >= size, - "not enough space for next component?") - let result = UnsafeRawBufferPointer(start: data.baseAddress, count: size) + mutating func popRaw(size: Int, alignment: Int) -> UnsafeRawBufferPointer { + var baseAddress = data.baseAddress.unsafelyUnwrapped + var misalignment = Int(bitPattern: baseAddress) % alignment + if misalignment != 0 { + misalignment = alignment - misalignment + baseAddress += misalignment + } + + let result = UnsafeRawBufferPointer(start: baseAddress, count: size) data = UnsafeRawBufferPointer( - start: data.baseAddress.unsafelyUnwrapped + size, - count: data.count - size + start: baseAddress + size, + count: data.count - size - misalignment ) return result } @@ -1340,9 +1289,11 @@ public func _appendingKeyPaths< // Result buffer has room for both key paths' components, plus the // header, plus space for the middle type. - let resultSize = rootBuffer.data.count + leafBuffer.data.count - + MemoryLayout.size - + MemoryLayout.size + // Align up the root so that we can put the component type after it. + let alignMask = MemoryLayout.alignment - 1 + let rootSize = (rootBuffer.data.count + alignMask) & ~alignMask + let resultSize = rootSize + leafBuffer.data.count + + 2 * MemoryLayout.size // Tail-allocate space for the KVC string. let totalResultSize = (resultSize + appendedKVCLength + 3) & ~3 @@ -1360,40 +1311,40 @@ public func _appendingKeyPaths< count: resultSize) } - func pushRaw(_ count: Int) { - _sanityCheck(destBuffer.count >= count) + func pushRaw(size: Int, alignment: Int) + -> UnsafeMutableRawBufferPointer { + var baseAddress = destBuffer.baseAddress.unsafelyUnwrapped + var misalign = Int(bitPattern: baseAddress) % alignment + if misalign != 0 { + misalign = alignment - misalign + baseAddress = baseAddress.advanced(by: misalign) + } + let result = UnsafeMutableRawBufferPointer( + start: baseAddress, + count: size) destBuffer = UnsafeMutableRawBufferPointer( - start: destBuffer.baseAddress.unsafelyUnwrapped + count, - count: destBuffer.count - count - ) + start: baseAddress + size, + count: destBuffer.count - size - misalign) + return result } - func pushType(_ type: Any.Type) { - let intSize = MemoryLayout.size - _sanityCheck(destBuffer.count >= intSize) - if intSize == 8 { - let words = unsafeBitCast(type, to: (UInt32, UInt32).self) - destBuffer.storeBytes(of: words.0, - as: UInt32.self) - destBuffer.storeBytes(of: words.1, toByteOffset: 4, - as: UInt32.self) - } else if intSize == 4 { - destBuffer.storeBytes(of: type, as: Any.Type.self) - } else { - _sanityCheckFailure("unsupported architecture") - } - pushRaw(intSize) + func push(_ value: T) { + let buf = pushRaw(size: MemoryLayout.size, + alignment: MemoryLayout.alignment) + buf.storeBytes(of: value, as: T.self) } // Save space for the header. let leafIsReferenceWritable = type(of: leaf).kind == .reference let header = KeyPathBuffer.Header( - size: resultSize - MemoryLayout.size, + size: resultSize - MemoryLayout.size, trivial: rootBuffer.trivial && leafBuffer.trivial, hasReferencePrefix: rootBuffer.hasReferencePrefix || leafIsReferenceWritable ) - destBuffer.storeBytes(of: header, as: KeyPathBuffer.Header.self) - pushRaw(MemoryLayout.size) + push(header) + // Start the components at pointer alignment + _ = pushRaw(size: RawKeyPathComponent.Header.pointerAlignmentSkew, + alignment: 4) let leafHasReferencePrefix = leafBuffer.hasReferencePrefix @@ -1415,13 +1366,12 @@ public func _appendingKeyPaths< component.clone( into: &destBuffer, - endOfReferencePrefix: endOfReferencePrefix - ) + endOfReferencePrefix: endOfReferencePrefix) if let type = type { - pushType(type) + push(type) } else { // Insert our endpoint type between the root and leaf components. - pushType(Value.self) + push(Value.self as Any.Type) break } } @@ -1432,11 +1382,10 @@ public func _appendingKeyPaths< component.clone( into: &destBuffer, - endOfReferencePrefix: component.header.endOfReferencePrefix - ) + endOfReferencePrefix: component.header.endOfReferencePrefix) if let type = type { - pushType(type) + push(type) } else { break } @@ -1570,7 +1519,7 @@ internal func _getKeyPath_instantiateInline( let bufferPtr = objectPtr.advanced(by: keyPathObjectHeaderSize) let buffer = KeyPathBuffer(base: bufferPtr) - let totalSize = buffer.data.count + MemoryLayout.size + let totalSize = buffer.data.count + MemoryLayout.size let bufferData = UnsafeMutableRawBufferPointer( start: bufferPtr, count: totalSize) @@ -1610,7 +1559,7 @@ internal func _getKeyPathClassAndInstanceSize( let bufferPtr = pattern.advanced(by: keyPathObjectHeaderSize) var buffer = KeyPathBuffer(base: bufferPtr) - let size = buffer.data.count + MemoryLayout.size + let size = buffer.data.count + MemoryLayout.size scanComponents: while true { let header = buffer.pop(RawKeyPathComponent.Header.self) @@ -1661,7 +1610,8 @@ internal func _getKeyPathClassAndInstanceSize( header.payload & RawKeyPathComponent.Header.computedHasArgumentsFlag == 0, "arguments not implemented yet") - _ = buffer.popRaw(MemoryLayout.size * (settable ? 3 : 2)) + _ = buffer.popRaw(size: MemoryLayout.size * (settable ? 3 : 2), + alignment: MemoryLayout.alignment) case .optionalChain, .optionalWrap: @@ -1678,7 +1628,8 @@ internal func _getKeyPathClassAndInstanceSize( if buffer.data.count == 0 { break } // Pop the type accessor reference. - _ = buffer.popRaw(MemoryLayout.size) + _ = buffer.popRaw(size: MemoryLayout.size, + alignment: MemoryLayout.alignment) } // Grab the class object for the key path type we'll end up with. @@ -1712,27 +1663,26 @@ internal func _instantiateKeyPathBuffer( var patternBuffer = origPatternBuffer let destHeaderPtr = origDestData.baseAddress.unsafelyUnwrapped - _sanityCheck(origDestData.count >= MemoryLayout.size) var destData = UnsafeMutableRawBufferPointer( - start: destHeaderPtr.advanced(by: MemoryLayout.size), - count: origDestData.count - MemoryLayout.size) + start: destHeaderPtr.advanced(by: MemoryLayout.size), + count: origDestData.count - MemoryLayout.size) func pushDest(_ value: T) { - // TODO: If key path patterns were better optimized to try to be constant- - // memory objects, then it might become profitable to try to avoid writes - // here in the case when the dest memory contains the value we want to write - // here so that we don't dirty memory. (In practice the current - // implementation will always dirty the page when the key path is - // instantiated.) _sanityCheck(_isPOD(T.self)) var value2 = value let size = MemoryLayout.size - _sanityCheck(destData.count >= size) - _memcpy(dest: destData.baseAddress.unsafelyUnwrapped, src: &value2, + let alignment = MemoryLayout.alignment + var baseAddress = destData.baseAddress.unsafelyUnwrapped + var misalign = Int(bitPattern: baseAddress) % alignment + if misalign != 0 { + misalign = alignment - misalign + baseAddress = baseAddress.advanced(by: misalign) + } + _memcpy(dest: baseAddress, src: &value2, size: UInt(size)) destData = UnsafeMutableRawBufferPointer( - start: destData.baseAddress.unsafelyUnwrapped.advanced(by: size), - count: destData.count - size) + start: baseAddress.advanced(by: size), + count: destData.count - size - misalign) } // Track where the reference prefix begins. @@ -1748,6 +1698,7 @@ internal func _instantiateKeyPathBuffer( let componentAddr = destData.baseAddress.unsafelyUnwrapped let header = patternBuffer.pop(RawKeyPathComponent.Header.self) + func tryToResolveOffset() { if header.payload == RawKeyPathComponent.Header.unresolvedFieldOffsetPayload { // Look up offset in type metadata. The value in the pattern is the @@ -1774,8 +1725,12 @@ internal func _instantiateKeyPathBuffer( newHeader.payload = RawKeyPathComponent.Header.outOfLineOffsetPayload pushDest(newHeader) pushDest(offsetValue) - shrinkage += MemoryLayout.size - - MemoryLayout.size + // On 64-bit systems the pointer to the ivar offset variable is + // pointer-sized and -aligned, but the resulting offset ought to be + // 32 bits only, so we can shrink the result object a bit. + if MemoryLayout.size == 8 { + shrinkage += MemoryLayout.size + } return } @@ -1820,15 +1775,6 @@ internal func _instantiateKeyPathBuffer( case RawKeyPathComponent.Header.computedIDResolved: // Nothing to do. break - case RawKeyPathComponent.Header.computedIDUnresolvedFieldOffset: - // The value in the pattern is an offset into the type metadata that - // points to the field offset for the stored property identifying the - // component. - _sanityCheck(header.payload - & RawKeyPathComponent.Header.computedIDByStoredPropertyFlag != 0, - "only stored property IDs should need offset resolution") - let metadataPtr = unsafeBitCast(base, to: UnsafeRawPointer.self) - id = metadataPtr.load(fromByteOffset: id, as: Int.self) case RawKeyPathComponent.Header.computedIDUnresolvedIndirectPointer: // The value in the pattern is a pointer to the actual unique word-sized // value in memory. @@ -1857,21 +1803,9 @@ internal func _instantiateKeyPathBuffer( if patternBuffer.data.count == 0 { break } // Resolve the component type. - if MemoryLayout.size == 4 { - let componentTyAccessor = patternBuffer.pop(MetadataAccessor.self) - base = unsafeBitCast(componentTyAccessor(arguments), to: Any.Type.self) - pushDest(base) - } else if MemoryLayout.size == 8 { - let componentTyAccessorWords = patternBuffer.pop((UInt32,UInt32).self) - let componentTyAccessor = unsafeBitCast(componentTyAccessorWords, - to: MetadataAccessor.self) - base = unsafeBitCast(componentTyAccessor(arguments), to: Any.Type.self) - let componentTyWords = unsafeBitCast(base, - to: (UInt32, UInt32).self) - pushDest(componentTyWords) - } else { - fatalError("unsupported architecture") - } + let componentTyAccessor = patternBuffer.pop(MetadataAccessor.self) + base = unsafeBitCast(componentTyAccessor(arguments), to: Any.Type.self) + pushDest(base) previousComponentAddr = componentAddr } diff --git a/test/IRGen/keypaths.sil b/test/IRGen/keypaths.sil index 851792fceeb86..977131856e2e9 100644 --- a/test/IRGen/keypaths.sil +++ b/test/IRGen/keypaths.sil @@ -27,8 +27,9 @@ sil_vtable C {} // CHECK-SAME: [[WORD]] 0, // CHECK-SAME: %swift.type* (i8*)* // CHECK-SAME: %swift.type* (i8*)* -// -- 0x8000_0018 - instantiable in-line, size 4 +// -- 0x8000_0004 - instantiable in-line, size 4 // CHECK-SAME: i32 -2147483644, +// CHECK-64-SAME: [4 x i8] zeroinitializer, // -- offset of S.x // CHECK-SAME: i32 0 }> @@ -37,8 +38,9 @@ sil_vtable C {} // CHECK-SAME: [[WORD]] 0, // CHECK-SAME: %swift.type* (i8*)* // CHECK-SAME: %swift.type* (i8*)* -// -- 0x8000_0018 - instantiable in-line, size 4 +// -- 0x8000_0004 - instantiable in-line, size 4 // CHECK-SAME: i32 -2147483644, +// CHECK-64-SAME: [4 x i8] zeroinitializer, // -- offset of S.y // CHECK-32-SAME: i32 4 }> // CHECK-64-SAME: i32 8 }> @@ -48,8 +50,9 @@ sil_vtable C {} // CHECK-SAME: [[WORD]] 0, // CHECK-SAME: %swift.type* (i8*)* // CHECK-SAME: %swift.type* (i8*)* -// -- 0x8000_0018 - instantiable in-line, size 4 +// -- 0x8000_0004 - instantiable in-line, size 4 // CHECK-SAME: i32 -2147483644, +// CHECK-64-SAME: [4 x i8] zeroinitializer, // -- offset of S.z // CHECK-32-SAME: i32 16 }> // CHECK-64-SAME: i32 32 }> @@ -59,8 +62,9 @@ sil_vtable C {} // CHECK-SAME: [[WORD]] 0, // CHECK-SAME: %swift.type* (i8*)* // CHECK-SAME: %swift.type* (i8*)* -// -- 0x8000_0018 - instantiable in-line, size 4 +// -- 0x8000_0004 - instantiable in-line, size 4 // CHECK-SAME: i32 -2147483644, +// CHECK-64-SAME: [4 x i8] zeroinitializer, // -- 0x4000_0000 (class) + offset of C.x // CHECK-32-SAME: i32 1073741836 }> // CHECK-64-SAME: i32 1073741840 }> @@ -70,8 +74,9 @@ sil_vtable C {} // CHECK-SAME: [[WORD]] 0, // CHECK-SAME: %swift.type* (i8*)* // CHECK-SAME: %swift.type* (i8*)* -// -- 0x8000_0018 - instantiable in-line, size 4 +// -- 0x8000_0004 - instantiable in-line, size 4 // CHECK-SAME: i32 -2147483644, +// CHECK-64-SAME: [4 x i8] zeroinitializer, // -- 0x4000_0000 (class) + offset of C.y // CHECK-32-SAME: i32 1073741840 }> // CHECK-64-SAME: i32 1073741848 }> @@ -81,8 +86,9 @@ sil_vtable C {} // CHECK-SAME: [[WORD]] 0, // CHECK-SAME: %swift.type* (i8*)* // CHECK-SAME: %swift.type* (i8*)* -// -- 0x8000_0018 - instantiable in-line, size 4 +// -- 0x8000_0004 - instantiable in-line, size 4 // CHECK-SAME: i32 -2147483644, +// CHECK-64-SAME: [4 x i8] zeroinitializer, // -- 0x4000_0000 (class) + offset of C.z // CHECK-32-SAME: i32 1073741852 }> // CHECK-64-SAME: i32 1073741872 }> @@ -92,13 +98,15 @@ sil_vtable C {} // CHECK-SAME: [[WORD]] 0, // CHECK-SAME: %swift.type* (i8*)* // CHECK-SAME: %swift.type* (i8*)* -// -- 0x8000_0010 - instantiable in-line, size 12 +// -- 0x8000_000c - instantiable in-line, size 12 // CHECK-32-SAME: i32 -2147483636, -// -- 0x8000_0018 - instantiable in-line, size 16 -// CHECK-64-SAME: i32 -2147483632, +// -- 0x8000_0014 - instantiable in-line, size 16 +// CHECK-64-SAME: i32 -2147483628, +// CHECK-64-SAME: [4 x i8] zeroinitializer, // -- offset of S.z // CHECK-32-SAME: i32 16, // CHECK-64-SAME: i32 32, +// CHECK-64-SAME: [4 x i8] zeroinitializer, // CHECK: %swift.type* (i8*)* // -- 0x4000_0000 (class) + offset of C.x // CHECK-32-SAME: i32 1073741836 }> @@ -109,13 +117,15 @@ sil_vtable C {} // CHECK-SAME: [[WORD]] 0, // CHECK-SAME: %swift.type* (i8*)* // CHECK-SAME: %swift.type* (i8*)* -// -- 0x8000_0010 - instantiable in-line, size 12 +// -- 0x8000_000c - instantiable in-line, size 12 // CHECK-32-SAME: i32 -2147483636, -// -- 0x8000_0018 - instantiable in-line, size 16 -// CHECK-64-SAME: i32 -2147483632, +// -- 0x8000_0014 - instantiable in-line, size 16 +// CHECK-64-SAME: i32 -2147483628, +// CHECK-64-SAME: [4 x i8] zeroinitializer, // -- 0x4000_0000 (class) + offset of C.z // CHECK-32-SAME: i32 1073741852, // CHECK-64-SAME: i32 1073741872, +// CHECK-64-SAME: [4 x i8] zeroinitializer, // CHECK: %swift.type* (i8*)* // -- offset of S.x // CHECK-SAME: i32 0 }> @@ -125,12 +135,14 @@ sil_vtable C {} // CHECK-SAME: [[WORD]] 0, // CHECK-SAME: %swift.type* (i8*)* // CHECK-SAME: %swift.type* (i8*)* -// -- 0x8000_0014 - instantiable in-line, size 20 -// CHECK-64-SAME: i32 -2147483628, +// -- 0x8000_0018 - instantiable in-line, size 24 +// CHECK-64-SAME: i32 -2147483624, +// CHECK-64-SAME: [4 x i8] zeroinitializer, // -- 0x8000_000c - instantiable in-line, size 12 // CHECK-32-SAME: i32 -2147483636, // -- 0x2000_0000 - computed, get-only, identified by function pointer, no args // CHECK-SAME: i32 536870912, +// CHECK-64-SAME: [4 x i8] zeroinitializer, // CHECK-SAME: void ()* @k_id, // CHECK-SAME: void (%TSi*, %T8keypaths1SV*)* @k_get }> @@ -139,12 +151,14 @@ sil_vtable C {} // CHECK-SAME: [[WORD]] 0, // CHECK-SAME: %swift.type* (i8*)* // CHECK-SAME: %swift.type* (i8*)* -// -- 0x8000_001c - instantiable in-line, size 28 -// CHECK-64-SAME: i32 -2147483620, +// -- 0x8000_0020 - instantiable in-line, size 32 +// CHECK-64-SAME: i32 -2147483616, +// CHECK-64-SAME: [4 x i8] zeroinitializer, // -- 0x8000_0010 - instantiable in-line, size 16 // CHECK-32-SAME: i32 -2147483632, // -- 0x2a00_0000 - computed, settable, nonmutating, identified by vtable, no args // CHECK-SAME: i32 704643072, +// CHECK-64-SAME: [4 x i8] zeroinitializer, // CHECK-SAME: [[WORD]] // CHECK-SAME: void (%TSi*, %T8keypaths1CC**)* @l_get, // CHECK-SAME: void (%TSi*, %T8keypaths1CC**)* @l_set }> @@ -154,12 +168,14 @@ sil_vtable C {} // CHECK-SAME: [[WORD]] 0, // CHECK-SAME: %swift.type* (i8*)* // CHECK-SAME: %swift.type* (i8*)* -// -- 0x8000_001c - instantiable in-line, size 28 -// CHECK-64-SAME: i32 -2147483620, +// -- 0x8000_0020 - instantiable in-line, size 32 +// CHECK-64-SAME: i32 -2147483616, +// CHECK-64-SAME: [4 x i8] zeroinitializer, // -- 0x8000_0010 - instantiable in-line, size 16 // CHECK-32-SAME: i32 -2147483632, // -- 0x3c00_0000 - computed, settable, nonmutating, identified by property offset, no args // CHECK-SAME: i32 1006632960, +// CHECK-64-SAME: [4 x i8] zeroinitializer, // CHECK-SAME: [[WORD]] // CHECK-SAME: void (%swift.function*, %T8keypaths1SV*)* @m_get, // CHECK-SAME: void (%swift.function*, %T8keypaths1SV*)* @m_set }> @@ -172,6 +188,7 @@ sil_vtable C {} // CHECK-SAME: %swift.type* (i8*)* [[I_GET_A:@[a-z_.0-9]+]], // -- size 8 // CHECK-SAME: i32 8, +// CHECK-64-SAME: [4 x i8] zeroinitializer, // -- 0x1ffffffe - struct with runtime-resolved offset // CHECK-SAME: i32 536870910, // CHECK-32-SAME: i32 12 }> @@ -184,6 +201,7 @@ sil_vtable C {} // CHECK-SAME: %swift.type* (i8*)* [[J_GET_A:@[a-z_.0-9]+]], // -- size 8 // CHECK-SAME: i32 8, +// CHECK-64-SAME: [4 x i8] zeroinitializer, // -- 0x1ffffffe - struct with runtime-resolved offset // CHECK-SAME: i32 536870910, // CHECK-32-SAME: i32 16 }> diff --git a/test/stdlib/KeyPathImplementation.swift b/test/stdlib/KeyPathImplementation.swift index 70e9f1c198882..06ff84b6f30b9 100644 --- a/test/stdlib/KeyPathImplementation.swift +++ b/test/stdlib/KeyPathImplementation.swift @@ -135,20 +135,33 @@ struct TestKeyPathBuilder { } mutating func push(_ value: UInt32) { - assert(buffer.count >= 4, "not enough room") buffer.storeBytes(of: value, as: UInt32.self) buffer = .init(start: buffer.baseAddress! + 4, count: buffer.count - 4) } + mutating func push(_ value: Any.Type) { + var misalign = Int(bitPattern: buffer.baseAddress) % MemoryLayout.alignment + if misalign != 0 { + misalign = MemoryLayout.alignment - misalign + buffer = .init(start: buffer.baseAddress! + misalign, + count: buffer.count - misalign) + } + buffer.storeBytes(of: value, as: Any.Type.self) + buffer = .init(start: buffer.baseAddress! + MemoryLayout.size, + count: buffer.count - MemoryLayout.size) + } mutating func addHeader(trivial: Bool, hasReferencePrefix: Bool) { assert(state == .header, "not expecting a header") - let size = buffer.count - 4 + let size = buffer.count - MemoryLayout.size assert(buffer.count > 0 && buffer.count <= 0x3FFF_FFFF, "invalid buffer size") let header: UInt32 = UInt32(size) | (trivial ? 0x8000_0000 : 0) | (hasReferencePrefix ? 0x4000_0000 : 0) push(header) + if MemoryLayout.size == 8 { + push(0) + } self.hasReferencePrefix = hasReferencePrefix state.advance() } @@ -194,18 +207,7 @@ struct TestKeyPathBuilder { mutating func addType(_ type: Any.Type) { assert(state == .type, "not expecting a type") - if MemoryLayout.size == 8 { - // Components are 4-byte aligned, but pointers are 8-byte aligned, so - // we have to store word-by-word - let words = unsafeBitCast(type, to: (UInt32, UInt32).self) - push(words.0) - push(words.1) - } else if MemoryLayout.size == 4 { - let word = unsafeBitCast(type, to: UInt32.self) - push(word) - } else { - fatalError("unsupported architecture") - } + push(type) state.advance() } } @@ -223,30 +225,30 @@ extension AnyKeyPath { keyPathImpl.test("struct components") { let s_x = WritableKeyPath, Int> - .build(capacityInBytes: 8) { + .build(capacityInBytes: MemoryLayout.size + 4) { $0.addHeader(trivial: true, hasReferencePrefix: false) $0.addStructComponent(offset: S.x_offset) } let s_y = WritableKeyPath, LifetimeTracked?> - .build(capacityInBytes: 8) { + .build(capacityInBytes: MemoryLayout.size + 4) { $0.addHeader(trivial: true, hasReferencePrefix: false) $0.addStructComponent(offset: S.y_offset) } let s_z = WritableKeyPath, String> - .build(capacityInBytes: 8) { + .build(capacityInBytes: MemoryLayout.size + 4) { $0.addHeader(trivial: true, hasReferencePrefix: false) $0.addStructComponent(offset: S.z_offset) } let s_p = WritableKeyPath, Point> - .build(capacityInBytes: 8) { + .build(capacityInBytes: MemoryLayout.size + 4) { $0.addHeader(trivial: true, hasReferencePrefix: false) $0.addStructComponent(offset: S.p_offset) } - let twoComponentSize = 12 + MemoryLayout.size + let twoComponentSize = MemoryLayout.size * 3 + 4 let s_p_x = WritableKeyPath, Double> .build(capacityInBytes: twoComponentSize) { $0.addHeader(trivial: true, hasReferencePrefix: false) @@ -331,19 +333,19 @@ keyPathImpl.test("struct components") { keyPathImpl.test("class components") { let c_x = ReferenceWritableKeyPath, Int> - .build(capacityInBytes: 8) { + .build(capacityInBytes: MemoryLayout.size + 4) { $0.addHeader(trivial: true, hasReferencePrefix: false) $0.addClassComponent(offset: C.x_offset) } let c_y = ReferenceWritableKeyPath, LifetimeTracked?> - .build(capacityInBytes: 8) { + .build(capacityInBytes: MemoryLayout.size + 4) { $0.addHeader(trivial: true, hasReferencePrefix: false) $0.addClassComponent(offset: C.y_offset) } let c_z = ReferenceWritableKeyPath, String> - .build(capacityInBytes: 8) { + .build(capacityInBytes: MemoryLayout.size + 4) { $0.addHeader(trivial: true, hasReferencePrefix: false) $0.addClassComponent(offset: C.z_offset) } @@ -394,7 +396,7 @@ keyPathImpl.test("class components") { keyPathImpl.test("reference prefix") { let s_c_x = ReferenceWritableKeyPath, Int> - .build(capacityInBytes: 12 + MemoryLayout.size) { + .build(capacityInBytes: 3 * MemoryLayout.size + 4) { $0.addHeader(trivial: true, hasReferencePrefix: true) $0.addStructComponent(offset: S.c_offset, endsReferencePrefix: true) @@ -403,7 +405,7 @@ keyPathImpl.test("reference prefix") { } let s_c_y = ReferenceWritableKeyPath, LifetimeTracked?> - .build(capacityInBytes: 12 + MemoryLayout.size) { + .build(capacityInBytes: 3 * MemoryLayout.size + 4) { $0.addHeader(trivial: true, hasReferencePrefix: true) $0.addStructComponent(offset: S.c_offset, endsReferencePrefix: true) @@ -412,7 +414,7 @@ keyPathImpl.test("reference prefix") { } let s_c_z = ReferenceWritableKeyPath, String> - .build(capacityInBytes: 12 + MemoryLayout.size) { + .build(capacityInBytes: 3 * MemoryLayout.size + 4) { $0.addHeader(trivial: true, hasReferencePrefix: true) $0.addStructComponent(offset: S.c_offset, endsReferencePrefix: true) @@ -484,14 +486,14 @@ keyPathImpl.test("reference prefix") { keyPathImpl.test("overflowed offsets") { let s_p = WritableKeyPath, Point> - .build(capacityInBytes: 12) { + .build(capacityInBytes: MemoryLayout.size + 8) { $0.addHeader(trivial: true, hasReferencePrefix: false) $0.addStructComponent(offset: S.p_offset, forceOverflow: true) } let c_z = ReferenceWritableKeyPath, String> - .build(capacityInBytes: 12) { + .build(capacityInBytes: MemoryLayout.size + 8) { $0.addHeader(trivial: true, hasReferencePrefix: false) $0.addClassComponent(offset: C.z_offset, forceOverflow: true) @@ -516,7 +518,7 @@ keyPathImpl.test("overflowed offsets") { keyPathImpl.test("equality") { let s_c_z_p_x = ReferenceWritableKeyPath>, Double> - .build(capacityInBytes: 20 + 3 * MemoryLayout.size) { + .build(capacityInBytes: 7 * MemoryLayout.size + 4) { $0.addHeader(trivial: true, hasReferencePrefix: true) // S>.c $0.addStructComponent(offset: S>.c_offset, @@ -537,7 +539,7 @@ keyPathImpl.test("equality") { // Structurally equivalent to s_c_z_p_x let s_c_z_p_x_2 = ReferenceWritableKeyPath>, Double> - .build(capacityInBytes: 20 + 3 * MemoryLayout.size) { + .build(capacityInBytes: 7 * MemoryLayout.size + 4) { $0.addHeader(trivial: true, hasReferencePrefix: true) // S>.c $0.addStructComponent(offset: S>.c_offset, @@ -561,7 +563,7 @@ keyPathImpl.test("equality") { // Structurally equivalent, force-overflowed offset components let s_c_z_p_x_3 = ReferenceWritableKeyPath>, Double> - .build(capacityInBytes: 36 + 3 * MemoryLayout.size) { + .build(capacityInBytes: 4 * MemoryLayout.size + 4 * 8) { $0.addHeader(trivial: true, hasReferencePrefix: true) // S>.c $0.addStructComponent(offset: S>.c_offset, @@ -589,7 +591,7 @@ keyPathImpl.test("equality") { // Same path type, different suffixes let s_c_z_p_y = ReferenceWritableKeyPath>, Double> - .build(capacityInBytes: 20 + 3 * MemoryLayout.size) { + .build(capacityInBytes: 7 * MemoryLayout.size + 4) { $0.addHeader(trivial: true, hasReferencePrefix: true) // S>.c $0.addStructComponent(offset: S>.c_offset, @@ -610,7 +612,7 @@ keyPathImpl.test("equality") { // Different path type let s_c_z_p = ReferenceWritableKeyPath>, Point> - .build(capacityInBytes: 16 + 2 * MemoryLayout.size) { + .build(capacityInBytes: 5 * MemoryLayout.size + 4) { $0.addHeader(trivial: true, hasReferencePrefix: true) // S>.c $0.addStructComponent(offset: S>.c_offset, @@ -628,7 +630,7 @@ keyPathImpl.test("equality") { // Same path, no reference prefix let s_c_z_p_x_readonly = KeyPath>, Double> - .build(capacityInBytes: 20 + 3 * MemoryLayout.size) { + .build(capacityInBytes: 7 * MemoryLayout.size + 4) { $0.addHeader(trivial: true, hasReferencePrefix: false) // S>.c $0.addStructComponent(offset: S>.c_offset) @@ -648,7 +650,7 @@ keyPathImpl.test("equality") { // Same path type, different paths let s_p_y_readonly = KeyPath>, Double> - .build(capacityInBytes: 12 + MemoryLayout.size) { + .build(capacityInBytes: 3 * MemoryLayout.size + 4) { $0.addHeader(trivial: true, hasReferencePrefix: false) // S>.p $0.addStructComponent(offset: S>.p_offset) @@ -661,7 +663,7 @@ keyPathImpl.test("equality") { expectNotEqual(s_c_z_p_x_readonly, s_p_y_readonly) let o_o_o_o = ReferenceWritableKeyPath - .build(capacityInBytes: 16 + 2*MemoryLayout.size) { + .build(capacityInBytes: 5 * MemoryLayout.size + 4) { $0.addHeader(trivial: true, hasReferencePrefix: false) // O.o $0.addClassComponent(offset: classHeaderSize) @@ -675,7 +677,7 @@ keyPathImpl.test("equality") { // Different reference prefix length let o_o_o_o_rp1 = ReferenceWritableKeyPath - .build(capacityInBytes: 16 + 2*MemoryLayout.size) { + .build(capacityInBytes: 5 * MemoryLayout.size + 4) { $0.addHeader(trivial: true, hasReferencePrefix: true) // O.o $0.addClassComponent(offset: classHeaderSize, @@ -688,7 +690,7 @@ keyPathImpl.test("equality") { $0.addClassComponent(offset: classHeaderSize) } let o_o_o_o_rp2 = ReferenceWritableKeyPath - .build(capacityInBytes: 16 + 2*MemoryLayout.size) { + .build(capacityInBytes: 5 * MemoryLayout.size + 4) { $0.addHeader(trivial: true, hasReferencePrefix: true) // O.o $0.addClassComponent(offset: classHeaderSize) @@ -701,7 +703,7 @@ keyPathImpl.test("equality") { $0.addClassComponent(offset: classHeaderSize) } let o_o_o_o_rp2_2 = ReferenceWritableKeyPath - .build(capacityInBytes: 16 + 2*MemoryLayout.size) { + .build(capacityInBytes: 5 * MemoryLayout.size + 4) { $0.addHeader(trivial: true, hasReferencePrefix: true) // O.o $0.addClassComponent(offset: classHeaderSize) @@ -729,7 +731,7 @@ keyPathImpl.test("equality") { // Same type, different length of components with same prefix let o_o_o = ReferenceWritableKeyPath - .build(capacityInBytes: 12 + MemoryLayout.size) { + .build(capacityInBytes: 3 * MemoryLayout.size + 4) { $0.addHeader(trivial: true, hasReferencePrefix: false) // O.o $0.addClassComponent(offset: classHeaderSize) @@ -744,12 +746,12 @@ keyPathImpl.test("equality") { keyPathImpl.test("appending") { let s_p = WritableKeyPath, Point> - .build(capacityInBytes: 8) { + .build(capacityInBytes: MemoryLayout.size + 4) { $0.addHeader(trivial: true, hasReferencePrefix: false) $0.addStructComponent(offset: S.p_offset) } let p_y = WritableKeyPath - .build(capacityInBytes: 8) { + .build(capacityInBytes: MemoryLayout.size + 4) { $0.addHeader(trivial: true, hasReferencePrefix: false) $0.addStructComponent(offset: Point.y_offset) } @@ -772,7 +774,7 @@ keyPathImpl.test("appending") { expectEqual(s_p_y.hashValue, s_p_y2.hashValue) let s_p_y_manual = WritableKeyPath, Double> - .build(capacityInBytes: 12 + MemoryLayout.size) { + .build(capacityInBytes: 3 * MemoryLayout.size + 4) { $0.addHeader(trivial: true, hasReferencePrefix: false) $0.addStructComponent(offset: S.p_offset) $0.addType(Point.self) @@ -783,7 +785,7 @@ keyPathImpl.test("appending") { expectEqual(s_p_y.hashValue, s_p_y_manual.hashValue) let c_z = ReferenceWritableKeyPath>, S> - .build(capacityInBytes: 8) { + .build(capacityInBytes: MemoryLayout.size + 4) { $0.addHeader(trivial: true, hasReferencePrefix: false) $0.addClassComponent(offset: C>.z_offset) } @@ -799,7 +801,7 @@ keyPathImpl.test("appending") { expectEqual(value2.z.p.x, 0.5) let c_z_p_y_manual = ReferenceWritableKeyPath>, Double> - .build(capacityInBytes: 16 + MemoryLayout.size * 2) { + .build(capacityInBytes: 5 * MemoryLayout.size + 4) { $0.addHeader(trivial: true, hasReferencePrefix: false) $0.addClassComponent(offset: C>.z_offset) $0.addType(S.self) @@ -813,7 +815,7 @@ keyPathImpl.test("appending") { expectEqual(c_z_p_y.hashValue, c_z_p_y_manual.hashValue) let s_c = WritableKeyPath>, C>> - .build(capacityInBytes: 8) { + .build(capacityInBytes: MemoryLayout.size + 4) { $0.addHeader(trivial: true, hasReferencePrefix: false) $0.addStructComponent(offset: S>.c_offset) } @@ -828,7 +830,7 @@ keyPathImpl.test("appending") { expectEqual(value2[keyPath: c_z_p_y], 11.0) let s_c_z_p_y_manual = ReferenceWritableKeyPath>, Double> - .build(capacityInBytes: 20 + MemoryLayout.size * 3) { + .build(capacityInBytes: 7 * MemoryLayout.size + 4) { $0.addHeader(trivial: true, hasReferencePrefix: true) $0.addStructComponent(offset: S>.c_offset, endsReferencePrefix: true) @@ -846,7 +848,7 @@ keyPathImpl.test("appending") { typealias CP = CratePair>, Int> let cratePair_left_value = ReferenceWritableKeyPath>> - .build(capacityInBytes: 12 + MemoryLayout.size) { + .build(capacityInBytes: 3 * MemoryLayout.size + 4) { $0.addHeader(trivial: true, hasReferencePrefix: true) $0.addStructComponent(offset: CratePair>, Int>.left_offset, endsReferencePrefix: true) @@ -868,7 +870,7 @@ keyPathImpl.test("appending") { let cratePair_left_value_c_z_p_y_manual = ReferenceWritableKeyPath - .build(capacityInBytes: 28 + 5*MemoryLayout.size) { + .build(capacityInBytes: 11 * MemoryLayout.size + 4) { $0.addHeader(trivial: true, hasReferencePrefix: true) $0.addStructComponent(offset: CP.left_offset) $0.addType(Crate>>.self)