From ebdd00c0718e5243ca1108661be09c0088f4c0bc Mon Sep 17 00:00:00 2001 From: Martin Cwikla Date: Wed, 24 Aug 2022 16:16:45 -0600 Subject: [PATCH 01/14] Proposed design for skipping of KeyPath projections across trivially-typed memory. --- stdlib/public/core/KeyPath.swift | 153 ++++++++++++++++++++++++++----- 1 file changed, 129 insertions(+), 24 deletions(-) diff --git a/stdlib/public/core/KeyPath.swift b/stdlib/public/core/KeyPath.swift index 12bdb3e579778..294bf3ad37650 100644 --- a/stdlib/public/core/KeyPath.swift +++ b/stdlib/public/core/KeyPath.swift @@ -35,6 +35,8 @@ internal func _abstract( /// type. @_objcRuntimeName(_TtCs11_AnyKeyPath) public class AnyKeyPath: Hashable, _AppendKeyPath { + internal var _isPureStructKeyPath: Bool? + internal var _pureStructValueOffset: Int = 0 /// The root type for this key path. @inlinable public static var rootType: Any.Type { @@ -150,15 +152,15 @@ public class AnyKeyPath: Hashable, _AppendKeyPath { ) -> Self { _internalInvariant(bytes > 0 && bytes % 4 == 0, "capacity must be multiple of 4 bytes") - let result = Builtin.allocWithTailElems_1(self, (bytes/4)._builtinWordValue, + let keypath = Builtin.allocWithTailElems_1(self, (bytes/4)._builtinWordValue, Int32.self) - result._kvcKeyPathStringPtr = nil - let base = UnsafeMutableRawPointer(Builtin.projectTailElems(result, + keypath._kvcKeyPathStringPtr = nil + let base = UnsafeMutableRawPointer(Builtin.projectTailElems(keypath, Int32.self)) body(UnsafeMutableRawBufferPointer(start: base, count: bytes)) - return result + keypath._computeOffsetForPureStructKeypath() + return keypath } - final internal func withBuffer(_ f: (KeyPathBuffer) throws -> T) rethrows -> T { defer { _fixLifetime(self) } @@ -166,31 +168,94 @@ public class AnyKeyPath: Hashable, _AppendKeyPath { return try f(KeyPathBuffer(base: base)) } - @usableFromInline // Exposed as public API by MemoryLayout.offset(of:) - internal var _storedInlineOffset: Int? { - return withBuffer { - var buffer = $0 + public var isPureStructKeyPath: Bool { + return _isPureStructKeyPath ?? false + } - // The identity key path is effectively a stored keypath of type Self - // at offset zero - if buffer.data.isEmpty { return 0 } + internal func isClass(_ item: Any.Type) -> Bool { + // Displays "warning: 'is' test is always true" at compile time, but that's not actually the case. + return (item is AnyObject) + } - var offset = 0 - while true { - let (rawComponent, optNextType) = buffer.next() - switch rawComponent.header.kind { - case .struct: - offset += rawComponent._structOrClassOffset + // TODO: Find a quicker way to see if this is a tuple. + internal func isTuple(_ item: Any) -> Bool { + // Unwraps type information if possible. + // Otherwise, everything the Mirror sees would be "Optional". + func unwrapType(_ any: T) -> Any { + let mirror = Mirror(reflecting: any) + guard mirror.displayStyle == .optional, let first = mirror.children.first else { + return any + } + return first.value + } - case .class, .computed, .optionalChain, .optionalForce, .optionalWrap, .external: - return .none + let mirror = Mirror(reflecting: unwrapType(item)) + let description = mirror.description + let offsetOfFirstBracketForMirrorDescriptionOfTuple = 11 + let idx = description.index(description.startIndex, offsetBy: offsetOfFirstBracketForMirrorDescriptionOfTuple) + if description[idx] == "(" { + return true + } + return false + } + + // If this keypath traverses structs only, it'll have a predictable memory layout. + // We can then jump to the value directly in _projectReadOnly(). + internal func _computeOffsets() -> (offset: Int, isPureStruct: Bool, isTuple: Bool) { + _pureStructValueOffset = 0 + var isPureStruct = true + var _isTuple = false + defer { + _isPureStructKeyPath = isPureStruct + } + withBuffer { + var buffer = $0 + if buffer.data.isEmpty { + if isClass(Self._rootAndValueType.root) { + isPureStruct = false + } + } else { + while true { + let (rawComponent, optNextType) = buffer.next() + if isTuple(optNextType as Any) { + isPureStruct = false + _isTuple = true + } + switch rawComponent.header.kind { + case .struct: + _pureStructValueOffset += rawComponent._structOrClassOffset + case .class, .computed, .optionalChain, .optionalForce, .optionalWrap, .external: + isPureStruct = false + } + if optNextType == nil { + break + } + } } + } + return (_pureStructValueOffset, isPureStruct, _isTuple) + } + + internal func _computeOffsetForPureStructKeypath() { + _ = _computeOffsets() + } - if optNextType == nil { return .some(offset) } + // This function was refactored since _computeOffsets() was performing + // essentially the same computation. + @usableFromInline // Exposed as public API by MemoryLayout.offset(of:) + internal var _storedInlineOffset: Int? { + // TODO: Cache this value in a similar manner to _pureStructValueOffset. + // The current design assumes keypath read and write operations will be called + // much more often than MemoryLayout.offset(of:). + let offsetInformation = _computeOffsets() + if offsetInformation.isPureStruct || offsetInformation.isTuple { + return .some(offsetInformation.offset) + } else { + return .none } } } -} + /// A partially type-erased key path, from a concrete root type to any /// resulting value type. @@ -246,6 +311,17 @@ public class KeyPath: PartialKeyPath { @usableFromInline internal final func _projectReadOnly(from root: Root) -> Value { + + //One performance improvement is to skip right to Value + //if this keypath traverses through structs only. + if isPureStructKeyPath + { + return withUnsafeBytes(of: root) { + let pointer = $0.baseAddress.unsafelyUnwrapped.advanced(by: _pureStructValueOffset) + return pointer.assumingMemoryBound(to: Value.self).pointee + } + } + // TODO: For perf, we could use a local growable buffer instead of Any var curBase: Any = root return withBuffer { @@ -302,6 +378,14 @@ public class WritableKeyPath: KeyPath { @usableFromInline internal func _projectMutableAddress(from base: UnsafePointer) -> (pointer: UnsafeMutablePointer, owner: AnyObject?) { + + // Don't declare "p" above this if-statement; it may slow things down. + if isPureStructKeyPath + { + let p = UnsafeRawPointer(base).advanced(by: _pureStructValueOffset) + return (pointer: UnsafeMutablePointer( + mutating: p.assumingMemoryBound(to: Value.self)), owner: nil) + } var p = UnsafeRawPointer(base) var type: Any.Type = Root.self var keepAlive: AnyObject? @@ -2252,6 +2336,19 @@ extension _AppendKeyPath /* where Self == ReferenceWritableKeyPath */ { } } +/// Updates information pertaining to the types associated with each key path. +/// +/// Note: Currently we only distinguish between keypaths that traverse only structs to get to the final value, +/// and all other types. This is done for performance reasons. +/// Other type information may be handled in the future to improve performance. +internal func _processAppendingKeyPathType(root: inout AnyKeyPath, leaf: AnyKeyPath) { + root._isPureStructKeyPath = root.isPureStructKeyPath && leaf.isPureStructKeyPath + if let isPureStruct = root._isPureStructKeyPath, isPureStruct { + root._computeOffsetForPureStructKeypath() + } +} + + @usableFromInline internal func _tryToAppendKeyPaths( root: AnyKeyPath, @@ -2277,7 +2374,13 @@ internal func _tryToAppendKeyPaths( } return _openExistential(rootValue, do: open2) } - return _openExistential(rootRoot, do: open) + var returnValue:AnyKeyPath = _openExistential(rootRoot, do: open) + _processAppendingKeyPathType(root: &returnValue, leaf: leaf) + if let returnValue = returnValue as? Result + { + return returnValue + } + return nil } @usableFromInline @@ -2289,7 +2392,7 @@ internal func _appendingKeyPaths< leaf: KeyPath ) -> Result { let resultTy = type(of: root).appendedType(with: type(of: leaf)) - return root.withBuffer { + var returnValue:AnyKeyPath = root.withBuffer { var rootBuffer = $0 return leaf.withBuffer { var leafBuffer = $0 @@ -2423,6 +2526,8 @@ internal func _appendingKeyPaths< return unsafeDowncast(result, to: Result.self) } } + _processAppendingKeyPathType(root: &returnValue, leaf: leaf) + return returnValue as! Result } // The distance in bytes from the address point of a KeyPath object to its From 9fb8f598eb38859e0777ecfc1057bee6a0a35e11 Mon Sep 17 00:00:00 2001 From: Martin Cwikla Date: Wed, 24 Aug 2022 16:24:26 -0600 Subject: [PATCH 02/14] Clarified a comment in isTuple(). --- stdlib/public/core/KeyPath.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stdlib/public/core/KeyPath.swift b/stdlib/public/core/KeyPath.swift index 294bf3ad37650..6ee9bdf2bb2bb 100644 --- a/stdlib/public/core/KeyPath.swift +++ b/stdlib/public/core/KeyPath.swift @@ -180,7 +180,7 @@ public class AnyKeyPath: Hashable, _AppendKeyPath { // TODO: Find a quicker way to see if this is a tuple. internal func isTuple(_ item: Any) -> Bool { // Unwraps type information if possible. - // Otherwise, everything the Mirror sees would be "Optional". + // Otherwise, everything the Mirror handling "item" sees would be "Optional". func unwrapType(_ any: T) -> Any { let mirror = Mirror(reflecting: any) guard mirror.displayStyle == .optional, let first = mirror.children.first else { From 5efc87c67e62fad48f2fa61cd3363eb4034e4d5b Mon Sep 17 00:00:00 2001 From: Martin Cwikla Date: Fri, 26 Aug 2022 10:56:25 -0600 Subject: [PATCH 03/14] ABI stability fix: isPureStructKeyPath should be internal. --- stdlib/public/core/KeyPath.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stdlib/public/core/KeyPath.swift b/stdlib/public/core/KeyPath.swift index 6ee9bdf2bb2bb..22d3838be82c3 100644 --- a/stdlib/public/core/KeyPath.swift +++ b/stdlib/public/core/KeyPath.swift @@ -168,7 +168,7 @@ public class AnyKeyPath: Hashable, _AppendKeyPath { return try f(KeyPathBuffer(base: base)) } - public var isPureStructKeyPath: Bool { + internal var isPureStructKeyPath: Bool { return _isPureStructKeyPath ?? false } From 353eb9ffd5ec3e9bfe736a619e51c9e61a7eeadf Mon Sep 17 00:00:00 2001 From: Martin Cwikla Date: Fri, 26 Aug 2022 14:06:08 -0600 Subject: [PATCH 04/14] KeyPath offset tests were breaking on Ubuntu 20.04. It's possible _storedInlineOffset wasn't factored out correctly. --- stdlib/public/core/KeyPath.swift | 63 +++++++++++++++++++++++--------- 1 file changed, 46 insertions(+), 17 deletions(-) diff --git a/stdlib/public/core/KeyPath.swift b/stdlib/public/core/KeyPath.swift index 22d3838be82c3..865f7642dfc08 100644 --- a/stdlib/public/core/KeyPath.swift +++ b/stdlib/public/core/KeyPath.swift @@ -199,12 +199,15 @@ public class AnyKeyPath: Hashable, _AppendKeyPath { return false } + // TODO: Merge the functionality of _computeOffsetForPureStructKeypath, + // _recalculateOffsetForPureStructKeyPath() and _storedInlineOffset. + // If this keypath traverses structs only, it'll have a predictable memory layout. // We can then jump to the value directly in _projectReadOnly(). - internal func _computeOffsets() -> (offset: Int, isPureStruct: Bool, isTuple: Bool) { + internal func _computeOffsetForPureStructKeypath() { _pureStructValueOffset = 0 var isPureStruct = true - var _isTuple = false + var exitEarly = false defer { _isPureStructKeyPath = isPureStruct } @@ -215,17 +218,18 @@ public class AnyKeyPath: Hashable, _AppendKeyPath { isPureStruct = false } } else { - while true { + while !exitEarly { let (rawComponent, optNextType) = buffer.next() if isTuple(optNextType as Any) { isPureStruct = false - _isTuple = true + break } switch rawComponent.header.kind { case .struct: _pureStructValueOffset += rawComponent._structOrClassOffset case .class, .computed, .optionalChain, .optionalForce, .optionalWrap, .external: isPureStruct = false + exitEarly = true } if optNextType == nil { break @@ -233,25 +237,50 @@ public class AnyKeyPath: Hashable, _AppendKeyPath { } } } - return (_pureStructValueOffset, isPureStruct, _isTuple) } - internal func _computeOffsetForPureStructKeypath() { - _ = _computeOffsets() + // Recalculates the offset in the case where one pure struct keypath is appended to another. + internal func _recalculateOffsetForPureStructKeyPath() { + _pureStructValueOffset = 0 + withBuffer { + var buffer = $0 + while true { + let (rawComponent, optNextType) = buffer.next() + switch rawComponent.header.kind { + case .struct: + _pureStructValueOffset += rawComponent._structOrClassOffset + case .class, .computed, .optionalChain, .optionalForce, .optionalWrap, .external: + return + } + if optNextType == nil { + break + } + } + } } - // This function was refactored since _computeOffsets() was performing - // essentially the same computation. @usableFromInline // Exposed as public API by MemoryLayout.offset(of:) internal var _storedInlineOffset: Int? { - // TODO: Cache this value in a similar manner to _pureStructValueOffset. - // The current design assumes keypath read and write operations will be called - // much more often than MemoryLayout.offset(of:). - let offsetInformation = _computeOffsets() - if offsetInformation.isPureStruct || offsetInformation.isTuple { - return .some(offsetInformation.offset) - } else { - return .none + return withBuffer { + var buffer = $0 + + // The identity key path is effectively a stored keypath of type Self + // at offset zero + if buffer.data.isEmpty { return 0 } + + var offset = 0 + while true { + let (rawComponent, optNextType) = buffer.next() + switch rawComponent.header.kind { + case .struct: + offset += rawComponent._structOrClassOffset + + case .class, .computed, .optionalChain, .optionalForce, .optionalWrap, .external: + return .none + } + + if optNextType == nil { return .some(offset) } + } } } } From afebae56949baba4b1a86391459a0cc2c3b45216 Mon Sep 17 00:00:00 2001 From: Martin Cwikla Date: Fri, 23 Sep 2022 17:19:56 -0600 Subject: [PATCH 05/14] Moved computation of offset for pureStructKeyPaths into KeyPathPatternVisitors. Next expected commit: Move information stored in _pureStructValueOffset to _kvcKeyPathStringPtr (or similar). --- stdlib/public/core/KeyPath.swift | 145 +++++++++++++++---------------- 1 file changed, 70 insertions(+), 75 deletions(-) diff --git a/stdlib/public/core/KeyPath.swift b/stdlib/public/core/KeyPath.swift index 865f7642dfc08..13cb9c1056ad1 100644 --- a/stdlib/public/core/KeyPath.swift +++ b/stdlib/public/core/KeyPath.swift @@ -158,7 +158,7 @@ public class AnyKeyPath: Hashable, _AppendKeyPath { let base = UnsafeMutableRawPointer(Builtin.projectTailElems(keypath, Int32.self)) body(UnsafeMutableRawBufferPointer(start: base, count: bytes)) - keypath._computeOffsetForPureStructKeypath() + keypath._pureStructValueOffset = 0 return keypath } final internal func withBuffer(_ f: (KeyPathBuffer) throws -> T) rethrows -> T { @@ -173,7 +173,8 @@ public class AnyKeyPath: Hashable, _AppendKeyPath { } internal func isClass(_ item: Any.Type) -> Bool { - // Displays "warning: 'is' test is always true" at compile time, but that's not actually the case. + // Displays "warning: 'is' test is always true" at compile time, + // but that's not actually the case. return (item is AnyObject) } @@ -192,73 +193,14 @@ public class AnyKeyPath: Hashable, _AppendKeyPath { let mirror = Mirror(reflecting: unwrapType(item)) let description = mirror.description let offsetOfFirstBracketForMirrorDescriptionOfTuple = 11 - let idx = description.index(description.startIndex, offsetBy: offsetOfFirstBracketForMirrorDescriptionOfTuple) + let idx = description.index(description.startIndex, + offsetBy: offsetOfFirstBracketForMirrorDescriptionOfTuple) if description[idx] == "(" { return true } return false } - // TODO: Merge the functionality of _computeOffsetForPureStructKeypath, - // _recalculateOffsetForPureStructKeyPath() and _storedInlineOffset. - - // If this keypath traverses structs only, it'll have a predictable memory layout. - // We can then jump to the value directly in _projectReadOnly(). - internal func _computeOffsetForPureStructKeypath() { - _pureStructValueOffset = 0 - var isPureStruct = true - var exitEarly = false - defer { - _isPureStructKeyPath = isPureStruct - } - withBuffer { - var buffer = $0 - if buffer.data.isEmpty { - if isClass(Self._rootAndValueType.root) { - isPureStruct = false - } - } else { - while !exitEarly { - let (rawComponent, optNextType) = buffer.next() - if isTuple(optNextType as Any) { - isPureStruct = false - break - } - switch rawComponent.header.kind { - case .struct: - _pureStructValueOffset += rawComponent._structOrClassOffset - case .class, .computed, .optionalChain, .optionalForce, .optionalWrap, .external: - isPureStruct = false - exitEarly = true - } - if optNextType == nil { - break - } - } - } - } - } - - // Recalculates the offset in the case where one pure struct keypath is appended to another. - internal func _recalculateOffsetForPureStructKeyPath() { - _pureStructValueOffset = 0 - withBuffer { - var buffer = $0 - while true { - let (rawComponent, optNextType) = buffer.next() - switch rawComponent.header.kind { - case .struct: - _pureStructValueOffset += rawComponent._structOrClassOffset - case .class, .computed, .optionalChain, .optionalForce, .optionalWrap, .external: - return - } - if optNextType == nil { - break - } - } - } - } - @usableFromInline // Exposed as public API by MemoryLayout.offset(of:) internal var _storedInlineOffset: Int? { return withBuffer { @@ -2370,10 +2312,15 @@ extension _AppendKeyPath /* where Self == ReferenceWritableKeyPath */ { /// Note: Currently we only distinguish between keypaths that traverse only structs to get to the final value, /// and all other types. This is done for performance reasons. /// Other type information may be handled in the future to improve performance. -internal func _processAppendingKeyPathType(root: inout AnyKeyPath, leaf: AnyKeyPath) { - root._isPureStructKeyPath = root.isPureStructKeyPath && leaf.isPureStructKeyPath - if let isPureStruct = root._isPureStructKeyPath, isPureStruct { - root._computeOffsetForPureStructKeypath() +internal func _processOffsetForAppendedKeyPath( + appendedKeyPath: inout AnyKeyPath, + root: AnyKeyPath, + leaf: AnyKeyPath +) { + appendedKeyPath._isPureStructKeyPath = root.isPureStructKeyPath && leaf.isPureStructKeyPath + if let isPureStruct = appendedKeyPath._isPureStructKeyPath, isPureStruct { + appendedKeyPath._pureStructValueOffset = root._pureStructValueOffset + leaf + ._pureStructValueOffset } } @@ -2403,11 +2350,10 @@ internal func _tryToAppendKeyPaths( } return _openExistential(rootValue, do: open2) } - var returnValue:AnyKeyPath = _openExistential(rootRoot, do: open) - _processAppendingKeyPathType(root: &returnValue, leaf: leaf) - if let returnValue = returnValue as? Result - { - return returnValue + var returnValue: AnyKeyPath = _openExistential(rootRoot, do: open) + _processOffsetForAppendedKeyPath(appendedKeyPath: &returnValue, root: root, leaf: leaf) + if let returnValue = returnValue as? Result { + return returnValue } return nil } @@ -2555,7 +2501,7 @@ internal func _appendingKeyPaths< return unsafeDowncast(result, to: Result.self) } } - _processAppendingKeyPathType(root: &returnValue, leaf: leaf) + _processOffsetForAppendedKeyPath(appendedKeyPath: &returnValue, root: root, leaf: leaf) return returnValue as! Result } @@ -2639,12 +2585,33 @@ public func _swift_getKeyPath(pattern: UnsafeMutableRawPointer, let (keyPathClass, rootType, size, _) = _getKeyPathClassAndInstanceSizeFromPattern(patternPtr, arguments) + var offset = UInt32(0) + var isPureStruct = true + var keyPathBase: Any.Type? // Allocate the instance. let instance = keyPathClass._create(capacityInBytes: size) { instanceData in // Instantiate the pattern into the instance. - _instantiateKeyPathBuffer(patternPtr, instanceData, rootType, arguments) + let returnValue = _instantiateKeyPathBuffer(patternPtr, instanceData, rootType, arguments) + offset += returnValue.structOffset + + let booleans = returnValue.componentStructList + if(booleans.count == 0) { + isPureStruct = false + } + for value in booleans { + isPureStruct = isPureStruct && value + } + keyPathBase = rootType } + // TODO: See if there's a better way to eliminate + // tuples from the current `isPureStruct` optimization. + if let keyPathBase = keyPathBase, instance.isTuple(keyPathBase) { + isPureStruct = false + } + instance._pureStructValueOffset = Int(offset) + instance._isPureStructKeyPath = isPureStruct + // Adopt the KVC string from the pattern. let kvcStringBase = patternPtr.advanced(by: 12) let kvcStringOffset = kvcStringBase.load(as: Int32.self) @@ -2848,6 +2815,10 @@ internal protocol KeyPathPatternVisitor { mutating func visitIntermediateComponentType(metadataRef: MetadataReference) mutating func finish() + + // Offset for pure-struct keypaths. + var structOffset: UInt32 { get set } + var isPureStruct: [Bool] { get set } } internal func _resolveRelativeAddress(_ base: UnsafeRawPointer, @@ -3175,6 +3146,8 @@ internal struct GetKeyPathClassAndInstanceSizeFromPattern var leaf: Any.Type! var genericEnvironment: UnsafeRawPointer? let patternArgs: UnsafeRawPointer? + var structOffset: UInt32 = 0 + var isPureStruct: [Bool] = [] init(patternArgs: UnsafeRawPointer?) { self.patternArgs = patternArgs @@ -3382,6 +3355,8 @@ internal struct InstantiateKeyPathBuffer: KeyPathPatternVisitor { var genericEnvironment: UnsafeRawPointer? let patternArgs: UnsafeRawPointer? var base: Any.Type + var structOffset: UInt32 = 0 + var isPureStruct: [Bool] = [] init(destData: UnsafeMutableRawBufferPointer, patternArgs: UnsafeRawPointer?, @@ -3454,6 +3429,12 @@ internal struct InstantiateKeyPathBuffer: KeyPathPatternVisitor { mutable: Bool, offset: KeyPathPatternStoredOffset) { let previous = updatePreviousComponentAddr() + switch kind { + case .struct: + isPureStruct.append(true) + default: + isPureStruct.append(false) + } switch kind { case .class: // A mutable class property can end the reference prefix. @@ -3470,6 +3451,12 @@ internal struct InstantiateKeyPathBuffer: KeyPathPatternVisitor { mutable: mutable, inlineOffset: value) pushDest(header) + switch kind { + case .struct: + structOffset += value + default: + break + } case .outOfLine(let offset): let header = RawKeyPathComponent.Header(storedWithOutOfLineOffset: kind, mutable: mutable) @@ -3515,6 +3502,7 @@ internal struct InstantiateKeyPathBuffer: KeyPathPatternVisitor { setter: UnsafeRawPointer?, arguments: KeyPathPatternComputedArguments?, externalArgs: UnsafeBufferPointer?) { + isPureStruct.append(false) let previous = updatePreviousComponentAddr() let settable = setter != nil // A nonmutating settable property can end the reference prefix. @@ -3654,22 +3642,26 @@ internal struct InstantiateKeyPathBuffer: KeyPathPatternVisitor { } mutating func visitOptionalChainComponent() { + isPureStruct.append(false) let _ = updatePreviousComponentAddr() let header = RawKeyPathComponent.Header(optionalChain: ()) pushDest(header) } mutating func visitOptionalWrapComponent() { + isPureStruct.append(false) let _ = updatePreviousComponentAddr() let header = RawKeyPathComponent.Header(optionalWrap: ()) pushDest(header) } mutating func visitOptionalForceComponent() { + isPureStruct.append(false) let _ = updatePreviousComponentAddr() let header = RawKeyPathComponent.Header(optionalForce: ()) pushDest(header) } mutating func visitIntermediateComponentType(metadataRef: MetadataReference) { + isPureStruct.append(false) // Get the metadata for the intermediate type. let metadata = _resolveKeyPathMetadataReference( metadataRef, @@ -3694,6 +3686,8 @@ internal struct ValidatingInstantiateKeyPathBuffer: KeyPathPatternVisitor { var sizeVisitor: GetKeyPathClassAndInstanceSizeFromPattern var instantiateVisitor: InstantiateKeyPathBuffer let origDest: UnsafeMutableRawPointer + var structOffset: UInt32 = 0 + var isPureStruct: [Bool] = [] init(sizeVisitor: GetKeyPathClassAndInstanceSizeFromPattern, instantiateVisitor: InstantiateKeyPathBuffer) { @@ -3795,7 +3789,7 @@ internal func _instantiateKeyPathBuffer( _ origDestData: UnsafeMutableRawBufferPointer, _ rootType: Any.Type, _ arguments: UnsafeRawPointer -) { +) -> (structOffset: UInt32, componentStructList: [Bool]) { let destHeaderPtr = origDestData.baseAddress.unsafelyUnwrapped var destData = UnsafeMutableRawBufferPointer( start: destHeaderPtr.advanced(by: MemoryLayout.size), @@ -3847,6 +3841,7 @@ internal func _instantiateKeyPathBuffer( endOfReferencePrefixComponent.storeBytes(of: componentHeader, as: RawKeyPathComponent.Header.self) } + return (walker.structOffset, walker.isPureStruct) } #if SWIFT_ENABLE_REFLECTION From 2d5086c08524512f8cc73ed67c16bc52e7ae99fc Mon Sep 17 00:00:00 2001 From: Martin Cwikla Date: Wed, 28 Sep 2022 16:56:49 -0600 Subject: [PATCH 06/14] Fixed the failing unit tests in capture_propagate_keypath.swift. Revisited formatting. Pulled in changes from the past four weeks. --- stdlib/public/core/KeyPath.swift | 273 ++++++++++++++++++++----------- 1 file changed, 179 insertions(+), 94 deletions(-) diff --git a/stdlib/public/core/KeyPath.swift b/stdlib/public/core/KeyPath.swift index 13cb9c1056ad1..a3af3cb9ec994 100644 --- a/stdlib/public/core/KeyPath.swift +++ b/stdlib/public/core/KeyPath.swift @@ -35,8 +35,6 @@ internal func _abstract( /// type. @_objcRuntimeName(_TtCs11_AnyKeyPath) public class AnyKeyPath: Hashable, _AppendKeyPath { - internal var _isPureStructKeyPath: Bool? - internal var _pureStructValueOffset: Int = 0 /// The root type for this key path. @inlinable public static var rootType: Any.Type { @@ -49,6 +47,9 @@ public class AnyKeyPath: Hashable, _AppendKeyPath { return _rootAndValueType.value } + /// Used to store the offset from the root to the value + /// in the case of a pure struct KeyPath. + /// It's a regular kvcKeyPathStringPtr otherwise. internal final var _kvcKeyPathStringPtr: UnsafePointer? /// The hash value. @@ -120,12 +121,86 @@ public class AnyKeyPath: Hashable, _AppendKeyPath { } } } + + /* + The following pertains to 32-bit architectures only. + We assume everything is a valid pointer to a potential + _kvcKeyPathStringPtr except for the first 4KB page which is reserved + for the nil pointer. Note that we have to distinguish between a valid + keypath offset of 0, and the nil pointer itself. + We use maximumOffsetOn32BitArchitecture + 1 for this case. + + The variable maximumOffsetOn32BitArchitecture is duplicated in the two + functions below since having it as a global would make accesses slower, + given getOffsetFromStorage() gets called on each KeyPath read. Further, + having it as an instance variable in AnyKeyPath would increase the size + of AnyKeyPath by 8 bytes. + TODO: Find a better method of refactoring this variable if possible. + */ + + func assignOffsetToStorage(offset: Int) { + let maximumOffsetOn32BitArchitecture = 4094 + + guard offset >= 0 else { + return + } + // TODO: This just gets the architecture size (32 or 64 bits). + // Is there a more efficient way? Something in Builtin maybe? + let architectureSize = MemoryLayout.size + if architectureSize == 8 { + _kvcKeyPathStringPtr = UnsafePointer(bitPattern: -offset - 1) + } else { + if offset == 0 { + _kvcKeyPathStringPtr = + UnsafePointer(bitPattern: maximumOffsetOn32BitArchitecture + 1) + } else if offset < maximumOffsetOn32BitArchitecture { + _kvcKeyPathStringPtr = UnsafePointer(bitPattern: offset) + } else { + _kvcKeyPathStringPtr = nil + } + } + } + + // TODO: See if this can be @inlinable since it gets called on each + // KeyPath access. It would mean _kvcKeyPathStringPtr would need + // to be @usableFromInline. What effect would that have on ABI stability? + func getOffsetFromStorage() -> Int? { + let maximumOffsetOn32BitArchitecture = 4094 + guard let storage = _kvcKeyPathStringPtr else { + return nil + } + + // TODO: Maybe we can get a pointer's raw bits instead of doing + // a distance calculation. Note: offsetBase can't be unwrapped + // forcefully if its bitPattern is 0x00. Hence the 0x01. + let offsetBase = UnsafePointer(bitPattern: 0x01) + let architectureSize = MemoryLayout.size + if architectureSize == 8 { + let storageDistance = storage.distance(to: offsetBase!) - 2 + guard storageDistance >= 0 else { + // This happens to be an actual _kvcKeyPathStringPtr, not an offset, if we get here. + return nil + } + return storageDistance + } else { + let storageDistance = offsetBase!.distance(to: storage) + 1 + if storageDistance == maximumOffsetOn32BitArchitecture + 1 { + return 0 + } else if storageDistance > maximumOffsetOn32BitArchitecture { + return nil + } + return storageDistance + } + } // SPI for the Foundation overlay to allow interop with KVC keypath-based // APIs. public var _kvcKeyPathString: String? { @_semantics("keypath.kvcKeyPathString") get { + guard self.getOffsetFromStorage() == nil else { + return nil + } guard let ptr = _kvcKeyPathStringPtr else { return nil } return String(validatingUTF8: ptr) @@ -152,15 +227,15 @@ public class AnyKeyPath: Hashable, _AppendKeyPath { ) -> Self { _internalInvariant(bytes > 0 && bytes % 4 == 0, "capacity must be multiple of 4 bytes") - let keypath = Builtin.allocWithTailElems_1(self, (bytes/4)._builtinWordValue, + let result = Builtin.allocWithTailElems_1(self, (bytes/4)._builtinWordValue, Int32.self) - keypath._kvcKeyPathStringPtr = nil - let base = UnsafeMutableRawPointer(Builtin.projectTailElems(keypath, + result._kvcKeyPathStringPtr = nil + let base = UnsafeMutableRawPointer(Builtin.projectTailElems(result, Int32.self)) body(UnsafeMutableRawBufferPointer(start: base, count: bytes)) - keypath._pureStructValueOffset = 0 - return keypath + return result } + final internal func withBuffer(_ f: (KeyPathBuffer) throws -> T) rethrows -> T { defer { _fixLifetime(self) } @@ -168,23 +243,16 @@ public class AnyKeyPath: Hashable, _AppendKeyPath { return try f(KeyPathBuffer(base: base)) } - internal var isPureStructKeyPath: Bool { - return _isPureStructKeyPath ?? false - } - - internal func isClass(_ item: Any.Type) -> Bool { - // Displays "warning: 'is' test is always true" at compile time, - // but that's not actually the case. - return (item is AnyObject) - } - // TODO: Find a quicker way to see if this is a tuple. internal func isTuple(_ item: Any) -> Bool { // Unwraps type information if possible. - // Otherwise, everything the Mirror handling "item" sees would be "Optional". + // Otherwise, everything the Mirror handling "item" + // sees would be "Optional". func unwrapType(_ any: T) -> Any { let mirror = Mirror(reflecting: any) - guard mirror.displayStyle == .optional, let first = mirror.children.first else { + guard mirror.displayStyle == .optional, + let first = mirror.children.first + else { return any } return first.value @@ -193,40 +261,40 @@ public class AnyKeyPath: Hashable, _AppendKeyPath { let mirror = Mirror(reflecting: unwrapType(item)) let description = mirror.description let offsetOfFirstBracketForMirrorDescriptionOfTuple = 11 - let idx = description.index(description.startIndex, - offsetBy: offsetOfFirstBracketForMirrorDescriptionOfTuple) + let idx = description.index( + description.startIndex, + offsetBy: offsetOfFirstBracketForMirrorDescriptionOfTuple) if description[idx] == "(" { return true } return false } - - @usableFromInline // Exposed as public API by MemoryLayout.offset(of:) - internal var _storedInlineOffset: Int? { - return withBuffer { - var buffer = $0 - - // The identity key path is effectively a stored keypath of type Self - // at offset zero - if buffer.data.isEmpty { return 0 } + + @usableFromInline // Exposed as public API by MemoryLayout.offset(of:) + internal var _storedInlineOffset: Int? { + return withBuffer { + var buffer = $0 - var offset = 0 - while true { - let (rawComponent, optNextType) = buffer.next() - switch rawComponent.header.kind { - case .struct: - offset += rawComponent._structOrClassOffset + // The identity key path is effectively a stored keypath of type Self + // at offset zero + if buffer.data.isEmpty { return 0 } - case .class, .computed, .optionalChain, .optionalForce, .optionalWrap, .external: - return .none - } + var offset = 0 + while true { + let (rawComponent, optNextType) = buffer.next() + switch rawComponent.header.kind { + case .struct: + offset += rawComponent._structOrClassOffset - if optNextType == nil { return .some(offset) } + case .class, .computed, .optionalChain, .optionalForce, .optionalWrap, .external: + return .none } + + if optNextType == nil { return .some(offset) } } } } - +} /// A partially type-erased key path, from a concrete root type to any /// resulting value type. @@ -282,13 +350,12 @@ public class KeyPath: PartialKeyPath { @usableFromInline internal final func _projectReadOnly(from root: Root) -> Value { - - //One performance improvement is to skip right to Value - //if this keypath traverses through structs only. - if isPureStructKeyPath - { + + // One performance improvement is to skip right to Value + // if this keypath traverses through structs only. + if let offset = getOffsetFromStorage() { return withUnsafeBytes(of: root) { - let pointer = $0.baseAddress.unsafelyUnwrapped.advanced(by: _pureStructValueOffset) + let pointer = $0.baseAddress.unsafelyUnwrapped.advanced(by: offset) return pointer.assumingMemoryBound(to: Value.self).pointee } } @@ -349,14 +416,6 @@ public class WritableKeyPath: KeyPath { @usableFromInline internal func _projectMutableAddress(from base: UnsafePointer) -> (pointer: UnsafeMutablePointer, owner: AnyObject?) { - - // Don't declare "p" above this if-statement; it may slow things down. - if isPureStructKeyPath - { - let p = UnsafeRawPointer(base).advanced(by: _pureStructValueOffset) - return (pointer: UnsafeMutablePointer( - mutating: p.assumingMemoryBound(to: Value.self)), owner: nil) - } var p = UnsafeRawPointer(base) var type: Any.Type = Root.self var keepAlive: AnyObject? @@ -2307,24 +2366,24 @@ extension _AppendKeyPath /* where Self == ReferenceWritableKeyPath */ { } } -/// Updates information pertaining to the types associated with each key path. +/// Updates information pertaining to the types associated with each KeyPath. /// -/// Note: Currently we only distinguish between keypaths that traverse only structs to get to the final value, -/// and all other types. This is done for performance reasons. +/// Note: Currently we only distinguish between keypaths that traverse +/// only structs to get to the final value, and all other types. +/// This is done for performance reasons. /// Other type information may be handled in the future to improve performance. internal func _processOffsetForAppendedKeyPath( appendedKeyPath: inout AnyKeyPath, root: AnyKeyPath, leaf: AnyKeyPath ) { - appendedKeyPath._isPureStructKeyPath = root.isPureStructKeyPath && leaf.isPureStructKeyPath - if let isPureStruct = appendedKeyPath._isPureStructKeyPath, isPureStruct { - appendedKeyPath._pureStructValueOffset = root._pureStructValueOffset + leaf - ._pureStructValueOffset + if let rootOffset = root.getOffsetFromStorage(), + let leafOffset = leaf.getOffsetFromStorage() + { + appendedKeyPath.assignOffsetToStorage(offset: rootOffset + leafOffset) } } - @usableFromInline internal func _tryToAppendKeyPaths( root: AnyKeyPath, @@ -2351,7 +2410,11 @@ internal func _tryToAppendKeyPaths( return _openExistential(rootValue, do: open2) } var returnValue: AnyKeyPath = _openExistential(rootRoot, do: open) - _processOffsetForAppendedKeyPath(appendedKeyPath: &returnValue, root: root, leaf: leaf) + _processOffsetForAppendedKeyPath( + appendedKeyPath: &returnValue, + root: root, + leaf: leaf + ) if let returnValue = returnValue as? Result { return returnValue } @@ -2367,7 +2430,7 @@ internal func _appendingKeyPaths< leaf: KeyPath ) -> Result { let resultTy = type(of: root).appendedType(with: type(of: leaf)) - var returnValue:AnyKeyPath = root.withBuffer { + var returnValue: AnyKeyPath = root.withBuffer { var rootBuffer = $0 return leaf.withBuffer { var leafBuffer = $0 @@ -2385,8 +2448,9 @@ internal func _appendingKeyPaths< // KVC-compatible. let appendedKVCLength: Int, rootKVCLength: Int, leafKVCLength: Int - if let rootPtr = root._kvcKeyPathStringPtr, - let leafPtr = leaf._kvcKeyPathStringPtr { + if root.getOffsetFromStorage() == nil, leaf.getOffsetFromStorage() == nil, + let rootPtr = root._kvcKeyPathStringPtr, + let leafPtr = leaf._kvcKeyPathStringPtr { rootKVCLength = Int(_swift_stdlib_strlen(rootPtr)) leafKVCLength = Int(_swift_stdlib_strlen(leafPtr)) // root + "." + leaf @@ -2482,27 +2546,36 @@ internal func _appendingKeyPaths< } // Build the KVC string if there is one. - if let kvcStringBuffer = kvcStringBuffer { - let rootPtr = root._kvcKeyPathStringPtr.unsafelyUnwrapped - let leafPtr = leaf._kvcKeyPathStringPtr.unsafelyUnwrapped - _memcpy(dest: kvcStringBuffer, - src: rootPtr, - size: UInt(rootKVCLength)) - kvcStringBuffer.advanced(by: rootKVCLength) - .storeBytes(of: 0x2E /* '.' */, as: CChar.self) - _memcpy(dest: kvcStringBuffer.advanced(by: rootKVCLength + 1), - src: leafPtr, - size: UInt(leafKVCLength)) - result._kvcKeyPathStringPtr = - UnsafePointer(kvcStringBuffer.assumingMemoryBound(to: CChar.self)) - kvcStringBuffer.advanced(by: rootKVCLength + leafKVCLength + 1) - .storeBytes(of: 0 /* '\0' */, as: CChar.self) + if root.getOffsetFromStorage() == nil, + leaf.getOffsetFromStorage() == nil { + if let kvcStringBuffer = kvcStringBuffer { + let rootPtr = root._kvcKeyPathStringPtr.unsafelyUnwrapped + let leafPtr = leaf._kvcKeyPathStringPtr.unsafelyUnwrapped + _memcpy( + dest: kvcStringBuffer, + src: rootPtr, + size: UInt(rootKVCLength)) + kvcStringBuffer.advanced(by: rootKVCLength) + .storeBytes(of: 0x2E /* '.' */, as: CChar.self) + _memcpy( + dest: kvcStringBuffer.advanced(by: rootKVCLength + 1), + src: leafPtr, + size: UInt(leafKVCLength)) + result._kvcKeyPathStringPtr = + UnsafePointer(kvcStringBuffer.assumingMemoryBound(to: CChar.self)) + kvcStringBuffer.advanced(by: rootKVCLength + leafKVCLength + 1) + .storeBytes(of: 0 /* '\0' */, as: CChar.self) + } } return unsafeDowncast(result, to: Result.self) } } - _processOffsetForAppendedKeyPath(appendedKeyPath: &returnValue, root: root, leaf: leaf) - return returnValue as! Result + _processOffsetForAppendedKeyPath( + appendedKeyPath: &returnValue, + root: root, + leaf: leaf + ) + return returnValue as! Result } // The distance in bytes from the address point of a KeyPath object to its @@ -2585,20 +2658,27 @@ public func _swift_getKeyPath(pattern: UnsafeMutableRawPointer, let (keyPathClass, rootType, size, _) = _getKeyPathClassAndInstanceSizeFromPattern(patternPtr, arguments) + // Used to process offsets for pure struct KeyPaths. var offset = UInt32(0) var isPureStruct = true var keyPathBase: Any.Type? + // Allocate the instance. let instance = keyPathClass._create(capacityInBytes: size) { instanceData in // Instantiate the pattern into the instance. - let returnValue = _instantiateKeyPathBuffer(patternPtr, instanceData, rootType, arguments) + let returnValue = _instantiateKeyPathBuffer( + patternPtr, + instanceData, + rootType, + arguments + ) offset += returnValue.structOffset - let booleans = returnValue.componentStructList - if(booleans.count == 0) { + let componentStructList = returnValue.componentStructList + if componentStructList.count == 0 { isPureStruct = false } - for value in booleans { + for value in componentStructList { isPureStruct = isPureStruct && value } keyPathBase = rootType @@ -2609,8 +2689,9 @@ public func _swift_getKeyPath(pattern: UnsafeMutableRawPointer, if let keyPathBase = keyPathBase, instance.isTuple(keyPathBase) { isPureStruct = false } - instance._pureStructValueOffset = Int(offset) - instance._isPureStructKeyPath = isPureStruct + if isPureStruct { + instance.assignOffsetToStorage(offset: Int(offset)) + } // Adopt the KVC string from the pattern. let kvcStringBase = patternPtr.advanced(by: 12) @@ -2625,6 +2706,9 @@ public func _swift_getKeyPath(pattern: UnsafeMutableRawPointer, kvcStringPtr.assumingMemoryBound(to: CChar.self) } + if instance._kvcKeyPathStringPtr == nil, isPureStruct { + instance.assignOffsetToStorage(offset: Int(offset)) + } // If we can cache this instance as a shared instance, do so. if let oncePtr = oncePtr { // Try to replace a null pointer in the cache variable with the instance @@ -2815,10 +2899,6 @@ internal protocol KeyPathPatternVisitor { mutating func visitIntermediateComponentType(metadataRef: MetadataReference) mutating func finish() - - // Offset for pure-struct keypaths. - var structOffset: UInt32 { get set } - var isPureStruct: [Bool] { get set } } internal func _resolveRelativeAddress(_ base: UnsafeRawPointer, @@ -3458,11 +3538,13 @@ internal struct InstantiateKeyPathBuffer: KeyPathPatternVisitor { break } case .outOfLine(let offset): + isPureStruct.append(false) let header = RawKeyPathComponent.Header(storedWithOutOfLineOffset: kind, mutable: mutable) pushDest(header) pushDest(offset) case .unresolvedFieldOffset(let offsetOfOffset): + isPureStruct.append(false) // Look up offset in the type metadata. The value in the pattern is // the offset within the metadata object. let metadataPtr = unsafeBitCast(base, to: UnsafeRawPointer.self) @@ -3481,6 +3563,7 @@ internal struct InstantiateKeyPathBuffer: KeyPathPatternVisitor { pushDest(header) pushDest(offset) case .unresolvedIndirectOffset(let pointerToOffset): + isPureStruct.append(false) // Look up offset in the indirectly-referenced variable we have a // pointer. _internalInvariant(pointerToOffset.pointee <= UInt32.max) @@ -3717,6 +3800,8 @@ internal struct ValidatingInstantiateKeyPathBuffer: KeyPathPatternVisitor { instantiateVisitor.visitStoredComponent(kind: kind, mutable: mutable, offset: offset) checkSizeConsistency() + structOffset = instantiateVisitor.structOffset + isPureStruct = instantiateVisitor.isPureStruct } mutating func visitComputedComponent(mutating: Bool, idKind: KeyPathComputedIDKind, From d2912dca75e159ea1a3ed7d3979ae79abb634c8e Mon Sep 17 00:00:00 2001 From: Martin Cwikla Date: Thu, 29 Sep 2022 16:55:57 -0600 Subject: [PATCH 07/14] Fixed .unresolvedFieldOffset should count as an optimized offset. Restored optimization for KeyPath writes. --- stdlib/public/core/KeyPath.swift | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/stdlib/public/core/KeyPath.swift b/stdlib/public/core/KeyPath.swift index a3af3cb9ec994..1b5957604eb15 100644 --- a/stdlib/public/core/KeyPath.swift +++ b/stdlib/public/core/KeyPath.swift @@ -416,6 +416,17 @@ public class WritableKeyPath: KeyPath { @usableFromInline internal func _projectMutableAddress(from base: UnsafePointer) -> (pointer: UnsafeMutablePointer, owner: AnyObject?) { + + // One performance improvement is to skip right to Value + // if this keypath traverses through structs only. + + // Don't declare "p" above this if-statement; it may slow things down. + if let offset = getOffsetFromStorage() + { + let p = UnsafeRawPointer(base).advanced(by: offset) + return (pointer: UnsafeMutablePointer( + mutating: p.assumingMemoryBound(to: Value.self)), owner: nil) + } var p = UnsafeRawPointer(base) var type: Any.Type = Root.self var keepAlive: AnyObject? @@ -3544,7 +3555,6 @@ internal struct InstantiateKeyPathBuffer: KeyPathPatternVisitor { pushDest(header) pushDest(offset) case .unresolvedFieldOffset(let offsetOfOffset): - isPureStruct.append(false) // Look up offset in the type metadata. The value in the pattern is // the offset within the metadata object. let metadataPtr = unsafeBitCast(base, to: UnsafeRawPointer.self) @@ -3556,6 +3566,7 @@ internal struct InstantiateKeyPathBuffer: KeyPathPatternVisitor { case .struct: offset = UInt32(metadataPtr.load(fromByteOffset: Int(offsetOfOffset), as: UInt32.self)) + structOffset += offset } let header = RawKeyPathComponent.Header(storedWithOutOfLineOffset: kind, From 4fb2f9e15e3e865e0c62c2f1ef8181c6c9c23746 Mon Sep 17 00:00:00 2001 From: Martin Cwikla Date: Fri, 30 Sep 2022 12:33:03 -0600 Subject: [PATCH 08/14] Modified storage of valid offsets in assignOffsetToStorage() and getOffsetFromStorage(). Got rid of isTuple(). Moved determination of pure struct KeyPaths into _instantiateKeyPathBuffer. --- stdlib/public/core/KeyPath.swift | 107 ++++++++++--------------------- 1 file changed, 35 insertions(+), 72 deletions(-) diff --git a/stdlib/public/core/KeyPath.swift b/stdlib/public/core/KeyPath.swift index 1b5957604eb15..115c84bbfb9f9 100644 --- a/stdlib/public/core/KeyPath.swift +++ b/stdlib/public/core/KeyPath.swift @@ -149,21 +149,17 @@ public class AnyKeyPath: Hashable, _AppendKeyPath { let architectureSize = MemoryLayout.size if architectureSize == 8 { _kvcKeyPathStringPtr = UnsafePointer(bitPattern: -offset - 1) - } else { - if offset == 0 { - _kvcKeyPathStringPtr = - UnsafePointer(bitPattern: maximumOffsetOn32BitArchitecture + 1) - } else if offset < maximumOffsetOn32BitArchitecture { - _kvcKeyPathStringPtr = UnsafePointer(bitPattern: offset) - } else { + } + else { + if offset <= maximumOffsetOn32BitArchitecture { + _kvcKeyPathStringPtr = UnsafePointer(bitPattern: (offset + 1)) + } + else { _kvcKeyPathStringPtr = nil } } } - // TODO: See if this can be @inlinable since it gets called on each - // KeyPath access. It would mean _kvcKeyPathStringPtr would need - // to be @usableFromInline. What effect would that have on ABI stability? func getOffsetFromStorage() -> Int? { let maximumOffsetOn32BitArchitecture = 4094 guard let storage = _kvcKeyPathStringPtr else { @@ -182,14 +178,13 @@ public class AnyKeyPath: Hashable, _AppendKeyPath { return nil } return storageDistance - } else { - let storageDistance = offsetBase!.distance(to: storage) + 1 - if storageDistance == maximumOffsetOn32BitArchitecture + 1 { - return 0 - } else if storageDistance > maximumOffsetOn32BitArchitecture { - return nil + } + else { + let storageDistance = offsetBase!.distance(to: storage) + if (storageDistance <= maximumOffsetOn32BitArchitecture) { + return storageDistance } - return storageDistance + return nil } } @@ -242,33 +237,6 @@ public class AnyKeyPath: Hashable, _AppendKeyPath { let base = UnsafeRawPointer(Builtin.projectTailElems(self, Int32.self)) return try f(KeyPathBuffer(base: base)) } - - // TODO: Find a quicker way to see if this is a tuple. - internal func isTuple(_ item: Any) -> Bool { - // Unwraps type information if possible. - // Otherwise, everything the Mirror handling "item" - // sees would be "Optional". - func unwrapType(_ any: T) -> Any { - let mirror = Mirror(reflecting: any) - guard mirror.displayStyle == .optional, - let first = mirror.children.first - else { - return any - } - return first.value - } - - let mirror = Mirror(reflecting: unwrapType(item)) - let description = mirror.description - let offsetOfFirstBracketForMirrorDescriptionOfTuple = 11 - let idx = description.index( - description.startIndex, - offsetBy: offsetOfFirstBracketForMirrorDescriptionOfTuple) - if description[idx] == "(" { - return true - } - return false - } @usableFromInline // Exposed as public API by MemoryLayout.offset(of:) internal var _storedInlineOffset: Int? { @@ -2669,39 +2637,17 @@ public func _swift_getKeyPath(pattern: UnsafeMutableRawPointer, let (keyPathClass, rootType, size, _) = _getKeyPathClassAndInstanceSizeFromPattern(patternPtr, arguments) - // Used to process offsets for pure struct KeyPaths. - var offset = UInt32(0) - var isPureStruct = true - var keyPathBase: Any.Type? + var pureStructOffset: UInt32? = nil // Allocate the instance. let instance = keyPathClass._create(capacityInBytes: size) { instanceData in // Instantiate the pattern into the instance. - let returnValue = _instantiateKeyPathBuffer( + pureStructOffset = _instantiateKeyPathBuffer( patternPtr, instanceData, rootType, arguments ) - offset += returnValue.structOffset - - let componentStructList = returnValue.componentStructList - if componentStructList.count == 0 { - isPureStruct = false - } - for value in componentStructList { - isPureStruct = isPureStruct && value - } - keyPathBase = rootType - } - - // TODO: See if there's a better way to eliminate - // tuples from the current `isPureStruct` optimization. - if let keyPathBase = keyPathBase, instance.isTuple(keyPathBase) { - isPureStruct = false - } - if isPureStruct { - instance.assignOffsetToStorage(offset: Int(offset)) } // Adopt the KVC string from the pattern. @@ -2716,8 +2662,7 @@ public func _swift_getKeyPath(pattern: UnsafeMutableRawPointer, instance._kvcKeyPathStringPtr = kvcStringPtr.assumingMemoryBound(to: CChar.self) } - - if instance._kvcKeyPathStringPtr == nil, isPureStruct { + if instance._kvcKeyPathStringPtr == nil, let offset = pureStructOffset { instance.assignOffsetToStorage(offset: Int(offset)) } // If we can cache this instance as a shared instance, do so. @@ -3885,7 +3830,7 @@ internal func _instantiateKeyPathBuffer( _ origDestData: UnsafeMutableRawBufferPointer, _ rootType: Any.Type, _ arguments: UnsafeRawPointer -) -> (structOffset: UInt32, componentStructList: [Bool]) { +) -> UInt32? { let destHeaderPtr = origDestData.baseAddress.unsafelyUnwrapped var destData = UnsafeMutableRawBufferPointer( start: destHeaderPtr.advanced(by: MemoryLayout.size), @@ -3937,7 +3882,25 @@ internal func _instantiateKeyPathBuffer( endOfReferencePrefixComponent.storeBytes(of: componentHeader, as: RawKeyPathComponent.Header.self) } - return (walker.structOffset, walker.isPureStruct) + var isPureStruct = true + var offset: UInt32? = nil + + for value in walker.isPureStruct { + isPureStruct = isPureStruct && value + } + + // Disable the optimization in the general case of 0 components. + // Note that a KeyPath such as \SomeStruct.self would still technically + // have a valid offset of 0. + // TODO: Add the logic to distinguish pure struct + // 0-component KeyPaths (tuples too?) from others. + if walker.isPureStruct.count == 0 { + isPureStruct = false + } + if isPureStruct { + offset = walker.structOffset + } + return offset } #if SWIFT_ENABLE_REFLECTION From a6a2f509b304f9c1f747b4c495c146c8d7165947 Mon Sep 17 00:00:00 2001 From: Martin Cwikla Date: Thu, 20 Oct 2022 14:20:10 -0600 Subject: [PATCH 09/14] Use Int(bitPattern) rather than distance(to) to compute the offset. --- stdlib/public/core/KeyPath.swift | 18 +++++++----------- 1 file changed, 7 insertions(+), 11 deletions(-) diff --git a/stdlib/public/core/KeyPath.swift b/stdlib/public/core/KeyPath.swift index 115c84bbfb9f9..1a559ac4eeebb 100644 --- a/stdlib/public/core/KeyPath.swift +++ b/stdlib/public/core/KeyPath.swift @@ -162,27 +162,23 @@ public class AnyKeyPath: Hashable, _AppendKeyPath { func getOffsetFromStorage() -> Int? { let maximumOffsetOn32BitArchitecture = 4094 - guard let storage = _kvcKeyPathStringPtr else { + guard _kvcKeyPathStringPtr != nil else { return nil } - // TODO: Maybe we can get a pointer's raw bits instead of doing - // a distance calculation. Note: offsetBase can't be unwrapped - // forcefully if its bitPattern is 0x00. Hence the 0x01. - let offsetBase = UnsafePointer(bitPattern: 0x01) let architectureSize = MemoryLayout.size if architectureSize == 8 { - let storageDistance = storage.distance(to: offsetBase!) - 2 - guard storageDistance >= 0 else { + let offset = -Int(bitPattern: _kvcKeyPathStringPtr) - 1 + guard offset >= 0 else { // This happens to be an actual _kvcKeyPathStringPtr, not an offset, if we get here. return nil } - return storageDistance + return offset } else { - let storageDistance = offsetBase!.distance(to: storage) - if (storageDistance <= maximumOffsetOn32BitArchitecture) { - return storageDistance + let offset = Int(bitPattern: _kvcKeyPathStringPtr) - 1 + if (offset <= maximumOffsetOn32BitArchitecture) { + return offset } return nil } From 21f175ae3c8389e23d48dc757b26c0d6a3b5f33e Mon Sep 17 00:00:00 2001 From: Martin Cwikla Date: Mon, 24 Oct 2022 17:43:00 -0600 Subject: [PATCH 10/14] Added missing instances of pureStruct information propagation. This means we no longer need to check for empty KeyPath Walker results. --- stdlib/public/core/KeyPath.swift | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/stdlib/public/core/KeyPath.swift b/stdlib/public/core/KeyPath.swift index 1a559ac4eeebb..7fe3abbc1381f 100644 --- a/stdlib/public/core/KeyPath.swift +++ b/stdlib/public/core/KeyPath.swift @@ -3753,7 +3753,7 @@ internal struct ValidatingInstantiateKeyPathBuffer: KeyPathPatternVisitor { offset: offset) checkSizeConsistency() structOffset = instantiateVisitor.structOffset - isPureStruct = instantiateVisitor.isPureStruct + isPureStruct.append(contentsOf: instantiateVisitor.isPureStruct) } mutating func visitComputedComponent(mutating: Bool, idKind: KeyPathComputedIDKind, @@ -3782,32 +3782,41 @@ internal struct ValidatingInstantiateKeyPathBuffer: KeyPathPatternVisitor { setter: setter, arguments: arguments, externalArgs: externalArgs) + // Note: For this function and the ones below, modification of structOffset + // is omitted since these types of KeyPaths won't have a pureStruct + // offset anyway. + isPureStruct.append(contentsOf: instantiateVisitor.isPureStruct) checkSizeConsistency() } mutating func visitOptionalChainComponent() { sizeVisitor.visitOptionalChainComponent() instantiateVisitor.visitOptionalChainComponent() + isPureStruct.append(contentsOf: instantiateVisitor.isPureStruct) checkSizeConsistency() } mutating func visitOptionalWrapComponent() { sizeVisitor.visitOptionalWrapComponent() instantiateVisitor.visitOptionalWrapComponent() + isPureStruct.append(contentsOf: instantiateVisitor.isPureStruct) checkSizeConsistency() } mutating func visitOptionalForceComponent() { sizeVisitor.visitOptionalForceComponent() instantiateVisitor.visitOptionalForceComponent() + isPureStruct.append(contentsOf: instantiateVisitor.isPureStruct) checkSizeConsistency() } mutating func visitIntermediateComponentType(metadataRef: MetadataReference) { sizeVisitor.visitIntermediateComponentType(metadataRef: metadataRef) instantiateVisitor.visitIntermediateComponentType(metadataRef: metadataRef) + isPureStruct.append(contentsOf: instantiateVisitor.isPureStruct) checkSizeConsistency() } mutating func finish() { sizeVisitor.finish() instantiateVisitor.finish() + isPureStruct.append(contentsOf: instantiateVisitor.isPureStruct) checkSizeConsistency() } @@ -3885,14 +3894,6 @@ internal func _instantiateKeyPathBuffer( isPureStruct = isPureStruct && value } - // Disable the optimization in the general case of 0 components. - // Note that a KeyPath such as \SomeStruct.self would still technically - // have a valid offset of 0. - // TODO: Add the logic to distinguish pure struct - // 0-component KeyPaths (tuples too?) from others. - if walker.isPureStruct.count == 0 { - isPureStruct = false - } if isPureStruct { offset = walker.structOffset } From 92d7cf57256065c986525fbe0ac68f02ccc945c0 Mon Sep 17 00:00:00 2001 From: Martin Cwikla Date: Wed, 26 Oct 2022 11:05:02 -0600 Subject: [PATCH 11/14] Revised _tryToAppendKeyPaths(). Removed two instances of isPureStruct.append() that weren't needed. --- stdlib/public/core/KeyPath.swift | 18 +++++------------- 1 file changed, 5 insertions(+), 13 deletions(-) diff --git a/stdlib/public/core/KeyPath.swift b/stdlib/public/core/KeyPath.swift index 7fe3abbc1381f..55fe7b3a55674 100644 --- a/stdlib/public/core/KeyPath.swift +++ b/stdlib/public/core/KeyPath.swift @@ -2377,23 +2377,17 @@ internal func _tryToAppendKeyPaths( let typedRoot = unsafeDowncast(root, to: KeyPath.self) let typedLeaf = unsafeDowncast(leaf, to: KeyPath.self) - let result = _appendingKeyPaths(root: typedRoot, leaf: typedLeaf) + var result:AnyKeyPath = _appendingKeyPaths(root: typedRoot, + leaf: typedLeaf) + _processOffsetForAppendedKeyPath(appendedKeyPath: &result, + root: root, leaf: leaf) return unsafeDowncast(result, to: Result.self) } return _openExistential(leafValue, do: open3) } return _openExistential(rootValue, do: open2) } - var returnValue: AnyKeyPath = _openExistential(rootRoot, do: open) - _processOffsetForAppendedKeyPath( - appendedKeyPath: &returnValue, - root: root, - leaf: leaf - ) - if let returnValue = returnValue as? Result { - return returnValue - } - return nil + return _openExistential(rootRoot, do: open) } @usableFromInline @@ -3490,7 +3484,6 @@ internal struct InstantiateKeyPathBuffer: KeyPathPatternVisitor { break } case .outOfLine(let offset): - isPureStruct.append(false) let header = RawKeyPathComponent.Header(storedWithOutOfLineOffset: kind, mutable: mutable) pushDest(header) @@ -3515,7 +3508,6 @@ internal struct InstantiateKeyPathBuffer: KeyPathPatternVisitor { pushDest(header) pushDest(offset) case .unresolvedIndirectOffset(let pointerToOffset): - isPureStruct.append(false) // Look up offset in the indirectly-referenced variable we have a // pointer. _internalInvariant(pointerToOffset.pointee <= UInt32.max) From dd8fb4a7798e0fabbea765d87a21c1fac3ff39bb Mon Sep 17 00:00:00 2001 From: Martin Cwikla Date: Fri, 28 Oct 2022 11:18:10 -0600 Subject: [PATCH 12/14] Added a benchmark for KeyPaths where trivially-typed memory is preceded by non-trivially-typed memory. --- .../KeyPathPerformanceTests.swift | 74 +++++++++++++++++++ 1 file changed, 74 insertions(+) diff --git a/benchmark/single-source/KeyPathPerformanceTests.swift b/benchmark/single-source/KeyPathPerformanceTests.swift index f1d5c69c9347f..55067e14f5fbc 100644 --- a/benchmark/single-source/KeyPathPerformanceTests.swift +++ b/benchmark/single-source/KeyPathPerformanceTests.swift @@ -66,6 +66,12 @@ public let benchmarks = [ tags: [.validation, .api], setUpFunction: setupKeyPathNestedStructs ), + BenchmarkInfo( + name: "KeyPathClassStructs", + runFunction: run_KeyPathClassStructs, + tags: [.validation, .api], + setUpFunction: setupKeyPathNestedStructs + ), ] /** @@ -97,6 +103,7 @@ class FixedSizeArrayHolder { var fixedSizeArray100: FixedSizeArray100 var fixedSizeArray10: FixedSizeArray10 var mainArrayForNestedStructs: [A] + var mainArrayForClassStructs: [D1] var arrayForMutatingGetset: [MutatingGetsetNested1] var arrayForGet: [GetNested1] var arrayForOptionals: [Optional1] @@ -107,6 +114,7 @@ class FixedSizeArrayHolder { var keypathForOptional: KeyPath var keypathForNestedClasses: KeyPath var keypathForNonMutatingGetset: WritableKeyPath + var keypathForClassStructs: WritableKeyPath // Same order as in KeyPathWritePerformance var kp46: WritableKeyPath, ElementType> @@ -176,6 +184,7 @@ class FixedSizeArrayHolder { fixedSizeArray100 = initializeFixedSizeArray100() fixedSizeArray10 = initializeFixedSizeArray10() mainArrayForNestedStructs = [A]() + mainArrayForClassStructs = [D1]() arrayForMutatingGetset = [MutatingGetsetNested1]() arrayForGet = [GetNested1]() arrayForOptionals = [Optional1]() @@ -252,6 +261,7 @@ class FixedSizeArrayHolder { ._nestedItemStorage?!._nestedItemStorage?!._storage) keypathForNestedClasses = identity(\C1.r.r.r.r.a) keypathForNonMutatingGetset = identity(\M.n.o.p.q.q) + keypathForClassStructs = identity(\D1.b.c.d.e.e) } } @@ -262,6 +272,10 @@ public func setupKeyPathNestedStructs() { let instance = A(a: 0, b: B(b: 0, c: C(c: 0, d: D(d: 0, e: E(e: expectedIntForNestedItems))))) holder.mainArrayForNestedStructs.append(instance) + let classStructInstance = D1(b: D2(b: 0, c: D3(c: 0, d: D4(d: 0, + e: D5(e: expectedIntForNestedItems))))) + holder.mainArrayForClassStructs.append(classStructInstance) + var mutatingGetsetInstance = MutatingGetsetNested1() mutatingGetsetInstance.nestedItem.nestedItem.nestedItem.nestedItem .storage = expectedIntForNestedItems @@ -333,6 +347,36 @@ struct E { var e: Int } +// Used for run_KeyPathClassStruct(). +class D1 { + var a: Int + var b: D2 + init(b: D2) + { + a = 0 + self.b = b + } +} + +struct D2 { + var b: Int + var c: D3 +} + +struct D3 { + var c: Int + var d: D4 +} + +struct D4 { + var d: Int + var e: D5 +} + +struct D5 { + var e: Int +} + // Used for run_KeyPathNestedClasses() class C1 { let a: Int = 0 @@ -1326,6 +1370,36 @@ public func run_KeyPathNestedStructs(n: Int) { check(sum == iterationMultipier * n * expectedIntForNestedItems) } +// This measures the performance of keypath reads where a block of +// trivially-typed memory is preceded by something else (optionals, reference +// types, etc.) +@inline(never) +public func run_KeyPathClassStructs(n: Int) { + var sum = 0 + var index = 0 + let iterationMultipier = 2000 + + let singleHopKeyPath0: WritableKeyPath = \D1.b + let singleHopKeyPath1: WritableKeyPath = \D2.c + let singleHopKeyPath2: WritableKeyPath = \D3.d + let singleHopKeyPath3: WritableKeyPath = \D4.e + let singleHopKeyPath4: WritableKeyPath = \D5.e + + let appendedKeyPath = identity(singleHopKeyPath0.appending(path: singleHopKeyPath1) + .appending(path: singleHopKeyPath2).appending(path: singleHopKeyPath3) + .appending(path: singleHopKeyPath4)) + + let elementCount = FixedSizeArrayHolder.shared.mainArrayForClassStructs.count + for _ in 1 ... iterationMultipier * n { + let element = FixedSizeArrayHolder.shared.mainArrayForClassStructs[index] + sum += element[keyPath: appendedKeyPath] + index = (index + 1) % elementCount + } + check(sum == iterationMultipier * n * expectedIntForNestedItems) +} + + + // This is meant as a baseline, from a timing perspective, // for run_testKeyPathReadPerformance() and run_testKeyPathWritePerformance(). // It's currently set to ".skip", but is useful for comparing the performance between keypath operations and direct dot-notation. From 9171f79df2c6872210e357e1752d115f32c3e431 Mon Sep 17 00:00:00 2001 From: Martin Cwikla Date: Fri, 28 Oct 2022 11:32:16 -0600 Subject: [PATCH 13/14] Reverted a change that should have gone into another branch. --- .../KeyPathPerformanceTests.swift | 74 ------------------- 1 file changed, 74 deletions(-) diff --git a/benchmark/single-source/KeyPathPerformanceTests.swift b/benchmark/single-source/KeyPathPerformanceTests.swift index 55067e14f5fbc..f1d5c69c9347f 100644 --- a/benchmark/single-source/KeyPathPerformanceTests.swift +++ b/benchmark/single-source/KeyPathPerformanceTests.swift @@ -66,12 +66,6 @@ public let benchmarks = [ tags: [.validation, .api], setUpFunction: setupKeyPathNestedStructs ), - BenchmarkInfo( - name: "KeyPathClassStructs", - runFunction: run_KeyPathClassStructs, - tags: [.validation, .api], - setUpFunction: setupKeyPathNestedStructs - ), ] /** @@ -103,7 +97,6 @@ class FixedSizeArrayHolder { var fixedSizeArray100: FixedSizeArray100 var fixedSizeArray10: FixedSizeArray10 var mainArrayForNestedStructs: [A] - var mainArrayForClassStructs: [D1] var arrayForMutatingGetset: [MutatingGetsetNested1] var arrayForGet: [GetNested1] var arrayForOptionals: [Optional1] @@ -114,7 +107,6 @@ class FixedSizeArrayHolder { var keypathForOptional: KeyPath var keypathForNestedClasses: KeyPath var keypathForNonMutatingGetset: WritableKeyPath - var keypathForClassStructs: WritableKeyPath // Same order as in KeyPathWritePerformance var kp46: WritableKeyPath, ElementType> @@ -184,7 +176,6 @@ class FixedSizeArrayHolder { fixedSizeArray100 = initializeFixedSizeArray100() fixedSizeArray10 = initializeFixedSizeArray10() mainArrayForNestedStructs = [A]() - mainArrayForClassStructs = [D1]() arrayForMutatingGetset = [MutatingGetsetNested1]() arrayForGet = [GetNested1]() arrayForOptionals = [Optional1]() @@ -261,7 +252,6 @@ class FixedSizeArrayHolder { ._nestedItemStorage?!._nestedItemStorage?!._storage) keypathForNestedClasses = identity(\C1.r.r.r.r.a) keypathForNonMutatingGetset = identity(\M.n.o.p.q.q) - keypathForClassStructs = identity(\D1.b.c.d.e.e) } } @@ -272,10 +262,6 @@ public func setupKeyPathNestedStructs() { let instance = A(a: 0, b: B(b: 0, c: C(c: 0, d: D(d: 0, e: E(e: expectedIntForNestedItems))))) holder.mainArrayForNestedStructs.append(instance) - let classStructInstance = D1(b: D2(b: 0, c: D3(c: 0, d: D4(d: 0, - e: D5(e: expectedIntForNestedItems))))) - holder.mainArrayForClassStructs.append(classStructInstance) - var mutatingGetsetInstance = MutatingGetsetNested1() mutatingGetsetInstance.nestedItem.nestedItem.nestedItem.nestedItem .storage = expectedIntForNestedItems @@ -347,36 +333,6 @@ struct E { var e: Int } -// Used for run_KeyPathClassStruct(). -class D1 { - var a: Int - var b: D2 - init(b: D2) - { - a = 0 - self.b = b - } -} - -struct D2 { - var b: Int - var c: D3 -} - -struct D3 { - var c: Int - var d: D4 -} - -struct D4 { - var d: Int - var e: D5 -} - -struct D5 { - var e: Int -} - // Used for run_KeyPathNestedClasses() class C1 { let a: Int = 0 @@ -1370,36 +1326,6 @@ public func run_KeyPathNestedStructs(n: Int) { check(sum == iterationMultipier * n * expectedIntForNestedItems) } -// This measures the performance of keypath reads where a block of -// trivially-typed memory is preceded by something else (optionals, reference -// types, etc.) -@inline(never) -public func run_KeyPathClassStructs(n: Int) { - var sum = 0 - var index = 0 - let iterationMultipier = 2000 - - let singleHopKeyPath0: WritableKeyPath = \D1.b - let singleHopKeyPath1: WritableKeyPath = \D2.c - let singleHopKeyPath2: WritableKeyPath = \D3.d - let singleHopKeyPath3: WritableKeyPath = \D4.e - let singleHopKeyPath4: WritableKeyPath = \D5.e - - let appendedKeyPath = identity(singleHopKeyPath0.appending(path: singleHopKeyPath1) - .appending(path: singleHopKeyPath2).appending(path: singleHopKeyPath3) - .appending(path: singleHopKeyPath4)) - - let elementCount = FixedSizeArrayHolder.shared.mainArrayForClassStructs.count - for _ in 1 ... iterationMultipier * n { - let element = FixedSizeArrayHolder.shared.mainArrayForClassStructs[index] - sum += element[keyPath: appendedKeyPath] - index = (index + 1) % elementCount - } - check(sum == iterationMultipier * n * expectedIntForNestedItems) -} - - - // This is meant as a baseline, from a timing perspective, // for run_testKeyPathReadPerformance() and run_testKeyPathWritePerformance(). // It's currently set to ".skip", but is useful for comparing the performance between keypath operations and direct dot-notation. From bc4b38d747cff206c3ef57e7dbd6e9c04261a7ef Mon Sep 17 00:00:00 2001 From: Martin Cwikla Date: Mon, 31 Oct 2022 14:43:44 -0600 Subject: [PATCH 14/14] Removed an unnecessary disabling of isPureStruct. This would have prevented explicitly specified KeyPaths through pure structs, e.g., \A.b.c, from taking the optimized path. --- stdlib/public/core/KeyPath.swift | 1 - 1 file changed, 1 deletion(-) diff --git a/stdlib/public/core/KeyPath.swift b/stdlib/public/core/KeyPath.swift index 55fe7b3a55674..9d3ecd54cc24c 100644 --- a/stdlib/public/core/KeyPath.swift +++ b/stdlib/public/core/KeyPath.swift @@ -3688,7 +3688,6 @@ internal struct InstantiateKeyPathBuffer: KeyPathPatternVisitor { } mutating func visitIntermediateComponentType(metadataRef: MetadataReference) { - isPureStruct.append(false) // Get the metadata for the intermediate type. let metadata = _resolveKeyPathMetadataReference( metadataRef,