Reputation: 3079
I have multiple requests:
var data1: MyData?
var data2: MyData?
var data3: MyData?
func makeRequest(url: String, completion: (result: ResponseResult, data: MyData?) -> Void){
Alamofire.request(.GET, url).responseJSON { response in
switch response.result {
case .Success(let JSON):
completion(result: .Success, MyData(JSON))
case. Failure(let error):
completion(result: .Failure, nil)
}
}
}
makeRequest(url1){ result, data in
data1 = data
}
makeRequest(url2){ result, data in
data2 = data
}
makeRequest(url3){ result, data in
data3 = data
}
And after all data was received i must call the following function:
workWithData(data1, data2: data2, data3: data3)
How to make barrier for that three requests in that situation?
Upvotes: 3
Views: 5305
Reputation: 116
You have to use DispatchGroup, and don't forget about deadlocks.
var data1: MyData?
var data2: MyData?
var data3: MyData?
func makeRequest(url: String, completion: (result: ResponseResult, data: MyData?) -> Void){
Alamofire.request(.GET, url).responseJSON { response in
switch response.result {
case .Success(let JSON):
completion(result: .Success, MyData(JSON))
case. Failure(let error):
completion(result: .Failure, nil)
}
}
}
let downloadGroup = DispatchGroup()
downloadGroup.enter()
downloadGroup.enter()
downloadGroup.enter()
makeRequest(url1){ result, data in
data1 = data
downloadGroup.leave()
}
makeRequest(url2){ result, data in
data2 = data
downloadGroup.leave()
}
makeRequest(url3){ result, data in
data3 = data
downloadGroup.leave()
}
DispatchQueue.global(qos: .background).async {
downloadGroup.wait()
DispatchQueue.main.async {
workWithData(data1, data2: data2, data3: data3)
}
}
Upvotes: 9
Reputation: 2229
Semaphore should work for you. Consider this:
var data1: NSData?
var data2: NSData?
var data3: NSData?
func makeRequest(url: String, completion: (data: NSData?) -> Void){
let request = Alamofire.request(.GET, "https://google.com").responseJSON(queue: dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)) { closureResponse in
completion(data: NSData())
}
}
let sem = dispatch_semaphore_create(0)
makeRequest("1"){ data in
data1 = data
dispatch_semaphore_signal(sem)
}
makeRequest("2"){ data in
data2 = data
dispatch_semaphore_signal(sem)
}
makeRequest("2"){ data in
data3 = data
dispatch_semaphore_signal(sem)
}
dispatch_semaphore_wait(sem, DISPATCH_TIME_FOREVER)
dispatch_semaphore_wait(sem, DISPATCH_TIME_FOREVER)
dispatch_semaphore_wait(sem, DISPATCH_TIME_FOREVER)
print("123")
Per @James comment, I have spent some time playing with Alamofire. What I figured out is by default it delivers callbacks on the main queue. That's not good from my perspective, I prefere to minimise main thread load. I would recommend to use concurrent queue for callback delivery.
Upvotes: 1
Reputation: 13934
I think you should check whether to continue at end of Alamofire.request
:
func makeRequest(url: String, completion: (result: ResponseResult, data: MyData?) -> Void){
Alamofire.request(.GET, url).responseJSON { response in
switch response.result {
case .Success(let JSON):
completion(result: .Success, MyData(JSON))
case. Failure(let error):
completion(result: .Failure, nil)
}
if data1 != nil && data2 != nil && data3 != nil {
workWithData(data1, data2: data2, data3: data3)
}
}
}
Upvotes: 0