func getAlbumAssets(albumName: String) async -> Array {
var albumAssets: [AlbumAsset] = [AlbumAsset]()
var albumAssetsResult = PHFetchResult<AnyObject>()
let fetchOptions = PHFetchOptions()
fetchOptions.predicate = NSPredicate(format: "title = %@", albumName)
let albumCollection:PHFetchResult = PHAssetCollection.fetchAssetCollections(with: .album, subtype: .any, options: fetchOptions)
if albumCollection.firstObject != nil {
albumAssetsResult = PHAsset.fetchAssets(in: albumCollection.firstObject!, options: nil) as! PHFetchResult<AnyObject>
let requestImageOptions = PHImageRequestOptions()
requestImageOptions.version = .current
requestImageOptions.resizeMode = .exact
requestImageOptions.deliveryMode = .highQualityFormat
requestImageOptions.isNetworkAccessAllowed = true
requestImageOptions.isSynchronous = true
requestImageOptions.progressHandler = {(progress, error, stop, info) in
if(progress == 1.0) {
print("Downloaded Shared Image from iCloud")
}else{
print("\(progress)")
}
}
let requestVideoOptions = PHVideoRequestOptions()
requestVideoOptions.version = .current
requestVideoOptions.deliveryMode = .highQualityFormat
requestVideoOptions.isNetworkAccessAllowed = true
requestVideoOptions.progressHandler = {(progress, error, stop, info) in
if(progress == 1.0) {
print("Downloaded Shared Video from iCloud")
}else{
print("\(progress)")
}
}
lazy var imageManager = {
return PHImageManager() // PHCachingImageManager() //
}()
let asset = albumAssetsResult.object(at: 1) as! PHAsset
let imageSize = CGSize(width: asset.pixelWidth/2, height: asset.pixelHeight/2)
var converted = asset.mediaType == .image ? Image("iva") : Image("vai")
let index = 0
let album = albumName
let name = asset.localIdentifier
let cdate = asset.creationDate!
var mediaasset = ""
var mediaduration = CMTimeGetSeconds(.zero)
let imageType = asset.mediaType
let imageWidth = CGFloat(asset.pixelWidth)
let imageHeight = CGFloat(asset.pixelHeight)
let location = CLLocationCoordinate2D(latitude: 0.0, longitude: 0.0)
if (asset.location?.coordinate.latitude != nil){
_ = asset.location!.coordinate
}
let cover = false
if asset.mediaType == .image {
imageManager.requestImage(for: asset, targetSize: imageSize, contentMode: .aspectFill, options: requestImageOptions, resultHandler: { (image, info) -> Void in
if image != nil {
converted = Image(uiImage: image!)
}
})
} else if imageType == .video {
imageManager.requestAVAsset(forVideo: asset, options: requestVideoOptions, resultHandler: {(asset1, audioMix, info) -> Void in
let avaURLasset = asset1 as! AVURLAsset
mediaasset = avaURLasset.url.absoluteString
mediaduration = CMTimeGetSeconds(avaURLasset.duration)
})
}
let newAsset = AlbumAsset(index: index, album: album, name: name, createDate: cdate, asset: mediaasset, duration: mediaduration, image: converted, imageType: imageType, imageWidth: imageWidth, imageHeight: imageHeight, location: location, cover: cover)
albumAssets.append(newAsset)
}
return albumAssets
}
.requestImage for .image works great and returns only the first image in the album requested. However the .requestAVAsset takes a bit longer and returns nothing. I see that the return albumAssets array runs before the video is even loaded.
How can I make it so that the video finishes loading and then continues to add to the array albumAssets.
New to Swift and did try DispatchGroup() but found that the Group.wait() is deprecated.