1
- //===----------------------------------------------------------------------===//
2
- //
3
1
// This source file is part of the Swift.org open source project
4
2
//
5
- // Copyright (c) 2022 Apple Inc. and the Swift project authors
3
+ // Copyright (c) 2020 Apple Inc. and the Swift project authors
6
4
// Licensed under Apache License v2.0 with Runtime Library Exception
7
5
//
8
- // See https://swift.org/LICENSE.txt for license information
9
- // See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
10
- //
11
- //===----------------------------------------------------------------------===//
6
+ // See http://swift.org/LICENSE.txt for license information
7
+ // See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
12
8
13
9
import Dispatch
14
10
import Foundation
11
+ import NIOConcurrencyHelpers
12
+ import NIO
13
+
14
+ import TSCUtility
15
+
15
16
16
17
/// Run the given computations on a given array in batches, exercising
17
18
/// a specified amount of parallelism.
@@ -21,70 +22,153 @@ import Foundation
21
22
/// them on the NIO loops is very expensive since it blocks the event
22
23
/// processing machinery. Here we use extra threads for such operations.
23
24
public class LLBBatchingFutureOperationQueue {
24
- // OperationQueue based implementation
25
- private var oq : LLBBatchingFutureOperationQueueDeprecated ?
26
-
27
- // DispatchQueue based implementation
28
- private var dq : LLBBatchingFutureDispatchQueue ?
29
-
30
- public var group : LLBFuturesDispatchGroup { oq? . group ?? dq!. group }
31
-
25
+
26
+ /// Threads capable of running futures.
27
+ public let group : LLBFuturesDispatchGroup
28
+
29
+ /// Whether the queue is suspended.
30
+ @available ( * , deprecated, message: " Property 'isSuspended' is deprecated. " )
31
+ public var isSuspended : Bool {
32
+ // Cannot suspend a DispatchQueue
33
+ false
34
+ }
35
+
32
36
/// Maximum number of operations executed concurrently.
33
37
public var maxOpCount : Int {
34
- get { oq? . maxOpCount ?? dq!. maxOpCount }
35
- set {
36
- if var q = oq {
37
- q. maxOpCount = newValue
38
- return
39
- }
40
- dq!. maxOpCount = newValue
41
- }
38
+ get { lock. withLock { maxOpCount_ } }
39
+ set { scheduleMoreTasks { maxOpCount_ = newValue } }
42
40
}
43
-
44
- public var opCount : Int { oq? . opCount ?? dq!. opCount }
45
-
46
- @available ( * , deprecated, message: " isSuspended is deprecated " )
47
- public var isSuspended : Bool { oq? . isSuspended ?? dq!. isSuspended }
48
-
49
- ///
50
- /// - Parameters:
51
- /// - name: Unique string label, for logging.
52
- /// - group: Threads capable of running futures.
53
- /// - maxConcurrentOperationCount:
54
- /// Operations to execute in parallel.
41
+
42
+ /// Return the number of operations currently queued.
43
+ public var opCount : Int { lock. withLock { opCount_ } }
44
+
45
+ /// Queue of outstanding operations
46
+ private let dispatchQueue : DispatchQueue
47
+
48
+ /// Lock protecting state.
49
+ private let lock = NIOConcurrencyHelpers . Lock ( )
50
+
51
+ private var maxOpCount_ : Int
52
+
53
+ private var opCount_ : Int
54
+
55
+ /// The queue of operations to run.
56
+ private var workQueue = NIO . CircularBuffer < DispatchWorkItem > ( )
57
+
55
58
@available ( * , deprecated, message: " 'qualityOfService' is deprecated: Use 'dispatchQoS' " )
56
- public init ( name: String , group: LLBFuturesDispatchGroup , maxConcurrentOperationCount maxOpCount: Int , qualityOfService: QualityOfService = . default) {
57
- self . oq = LLBBatchingFutureOperationQueueDeprecated ( name: name, group: group, maxConcurrentOperationCount: maxOpCount, qualityOfService: qualityOfService)
58
- self . dq = nil
59
+ public convenience init ( name: String , group: LLBFuturesDispatchGroup , maxConcurrentOperationCount maxOpCount: Int , qualityOfService: QualityOfService ) {
60
+ let dispatchQoS : DispatchQoS
61
+
62
+ switch qualityOfService {
63
+ case . userInteractive:
64
+ dispatchQoS = . userInteractive
65
+ case . userInitiated:
66
+ dispatchQoS = . userInitiated
67
+ case . utility:
68
+ dispatchQoS = . utility
69
+ case . background:
70
+ dispatchQoS = . background
71
+ default :
72
+ dispatchQoS = . default
73
+ }
74
+
75
+ self . init ( name: name, group: group, maxConcurrentOperationCount: maxOpCount, dispatchQoS: dispatchQoS)
59
76
}
60
-
77
+
61
78
///
62
79
/// - Parameters:
63
80
/// - name: Unique string label, for logging.
64
81
/// - group: Threads capable of running futures.
65
82
/// - maxConcurrentOperationCount:
66
83
/// Operations to execute in parallel.
67
- public init ( name: String , group: LLBFuturesDispatchGroup , maxConcurrentOperationCount maxOpCount: Int , dispatchQoS: DispatchQoS ) {
68
- self . dq = LLBBatchingFutureDispatchQueue ( name: name, group: group, maxConcurrentOperationCount: maxOpCount, dispatchQoS: dispatchQoS)
69
- self . oq = nil
84
+ public convenience init ( name: String , group: LLBFuturesDispatchGroup , maxConcurrentOperationCount maxOpCount: Int ) {
85
+ self . init ( name: name, group: group, maxConcurrentOperationCount: maxOpCount, dispatchQoS: . default)
70
86
}
71
-
87
+
88
+ public init ( name: String , group: LLBFuturesDispatchGroup , maxConcurrentOperationCount maxOpCnt: Int , dispatchQoS: DispatchQoS ) {
89
+ self . group = group
90
+ self . dispatchQueue = DispatchQueue ( label: name, qos: dispatchQoS, attributes: . concurrent)
91
+ self . opCount_ = 0
92
+ self . maxOpCount_ = maxOpCnt
93
+ }
94
+
72
95
public func execute< T> ( _ body: @escaping ( ) throws -> T ) -> LLBFuture < T > {
73
- return oq? . execute ( body) ?? dq!. execute ( body)
96
+ let promise = group. any ( ) . makePromise ( of: T . self)
97
+
98
+ let workItem = DispatchWorkItem {
99
+ promise. fulfill ( body)
100
+ self . scheduleMoreTasks {
101
+ self . opCount_ -= 1
102
+ }
103
+ }
104
+
105
+ self . scheduleMoreTasks {
106
+ workQueue. append ( workItem)
107
+ }
108
+
109
+ return promise. futureResult
74
110
}
75
-
111
+
76
112
public func execute< T> ( _ body: @escaping ( ) -> LLBFuture < T > ) -> LLBFuture < T > {
77
- return oq? . execute ( body) ?? dq!. execute ( body)
113
+ let promise = group. any ( ) . makePromise ( of: T . self)
114
+
115
+ let workItem = DispatchWorkItem {
116
+ let f = body ( )
117
+ f. cascade ( to: promise)
118
+
119
+ _ = try ? f. wait ( )
120
+
121
+ self . scheduleMoreTasks {
122
+ self . opCount_ -= 1
123
+ }
124
+ }
125
+
126
+ self . scheduleMoreTasks {
127
+ workQueue. append ( workItem)
128
+ }
129
+
130
+ return promise. futureResult
78
131
}
79
-
132
+
80
133
/// Order-preserving parallel execution. Wait for everything to complete.
134
+ @inlinable
81
135
public func execute< A, T> ( _ args: [ A ] , minStride: Int = 1 , _ body: @escaping ( ArraySlice < A > ) throws -> [ T ] ) -> LLBFuture < [ T ] > {
82
- return oq? . execute ( args, minStride: minStride, body) ?? dq!. execute ( args, minStride: minStride, body)
136
+ let futures : [ LLBFuture < [ T ] > ] = executeNoWait ( args, minStride: minStride, body)
137
+ let loop = futures. first? . eventLoop ?? group. next ( )
138
+ return LLBFuture < [ T ] > . whenAllSucceed ( futures, on: loop) . map { $0. flatMap { $0} }
83
139
}
84
-
140
+
85
141
/// Order-preserving parallel execution.
86
142
/// Do not wait for all executions to complete, returning individual futures.
143
+ @inlinable
87
144
public func executeNoWait< A, T> ( _ args: [ A ] , minStride: Int = 1 , maxStride: Int = Int . max, _ body: @escaping ( ArraySlice < A > ) throws -> [ T ] ) -> [ LLBFuture < [ T ] > ] {
88
- return oq? . executeNoWait ( args, minStride: minStride, maxStride: maxStride, body) ?? dq!. executeNoWait ( args, minStride: minStride, maxStride: maxStride, body)
145
+ let batches : [ ArraySlice < A > ] = args. tsc_sliceBy ( maxStride: max ( minStride, min ( maxStride, args. count / maxOpCount) ) )
146
+ return batches. map { arg in execute { try body ( arg) } }
147
+ }
148
+
149
+ private func scheduleMoreTasks( performUnderLock: ( ) -> Void ) {
150
+ let toExecute : [ DispatchWorkItem ] = lock. withLock {
151
+ performUnderLock ( )
152
+
153
+ var scheduleItems : [ DispatchWorkItem ] = [ ]
154
+
155
+ while opCount_ < maxOpCount_ {
156
+
157
+ // Schedule a new operation, if available.
158
+ guard let op = workQueue. popFirst ( ) else {
159
+ break
160
+ }
161
+
162
+ self . opCount_ += 1
163
+ scheduleItems. append ( op)
164
+ }
165
+
166
+ return scheduleItems
167
+ }
168
+
169
+ for workItem in toExecute {
170
+ dispatchQueue. async ( execute: workItem)
171
+ }
89
172
}
173
+
90
174
}
0 commit comments