Reputation: 71
I am working on an app that can detect dog breeds, and I would like to make it when users pull up their photo library, they only see images with a dog. To do this, I used the Vision pet recognizer model which is very basic, the issue is that it takes a while and too much memory to run on 1000s of photos. I was wondering if there were any solutions.
PhotoLibraryViewModel.swift
import Photos
import UIKit
import Vision
class PhotoLibraryViewModel: ObservableObject {
@Published var uiImages = [UIImage]()
var allPhotos : PHFetchResult<PHAsset>? = nil
init() {
getPhotos()
}
private func getPhotos() {
PHPhotoLibrary.requestAuthorization { [weak self] status in
switch status {
case .authorized:
let fetchOptions = PHFetchOptions()
fetchOptions.sortDescriptors = [NSSortDescriptor(key: "creationDate", ascending: false)]
self?.allPhotos = PHAsset.fetchAssets(with: .image, options: fetchOptions)
self?.setCompositionalLayout()
case .denied, .restricted:
print("Not allowed")
case .notDetermined:
print("Not determined yet")
case .limited:
print("limited")
@unknown default:
fatalError()
}
}
}
private func setCompositionalLayout() {
print("all photos: " + "\(allPhotos!.count)")
allPhotos?.enumerateObjects() { [self] photo, _, _ in
let options = PHImageRequestOptions()
options.version = .original
PHImageManager.default().requestImage(for: photo, targetSize: CGSize(width: (screen.width - 60) / 3, height: (screen.width - 60) / 3), contentMode: PHImageContentMode.aspectFit, options: options) { [self] uiImage, _ in
guard let finalUIImage = uiImage else {
print("failed to make finalUIImage")
return
}
checkImage(uiImage: finalUIImage)
}
}
}
private func checkImage(uiImage: UIImage) {
var animalRecognitionRequest = VNRecognizeAnimalsRequest(completionHandler: nil)
let animalRecognitionWorkQueue = DispatchQueue(label: "PetClassifierRequest", qos: .userInteractive, attributes: [], autoreleaseFrequency: .workItem)
guard let cgImage = uiImage.cgImage else {
print("failed to convert image")
return
}
animalRecognitionWorkQueue.async {
let requestHandler = VNImageRequestHandler(cgImage: cgImage, options: [:])
do {
try requestHandler.perform([animalRecognitionRequest])
} catch {
print("error: " + "\(error)")
}
}
animalRecognitionRequest = VNRecognizeAnimalsRequest { [self] request, error in
if let results = request.results as? [VNRecognizedObjectObservation] {
for result in results {
let animals = result.labels
for animal in animals {
if animal.identifier == "Dog" {
DispatchQueue.main.async {
uiImages.append(uiImage)
print("dog detected")
}
continue
}
}
}
}
}
}
}
PhotoLibraryView.swift
import SwiftUI
struct PhotoLibraryView: View {
@StateObject var photoLibraryViewModel = PhotoLibraryViewModel()
var body: some View {
NavigationView {
ZStack {
LinearGradient(gradient: Gradient(colors: [Color(#colorLiteral(red: 0.2588235438, green: 0.7568627596, blue: 0.9686274529, alpha: 1)), Color(#colorLiteral(red: 0.2392156869, green: 0.6745098233, blue: 0.9686274529, alpha: 1))]), startPoint: .top, endPoint: .bottom)
.edgesIgnoringSafeArea(.all)
ScrollView {
// Compositional Layout....
LazyVGrid(columns: Array(repeating: GridItem(.flexible(), spacing: 15), count: 3)) {
ForEach(0..<photoLibraryViewModel.uiImages.count, id: \.self) { index in
Image(uiImage: photoLibraryViewModel.uiImages[index])
.resizable()
.frame(width: (screen.width - 60) / 3, height: (screen.width - 60) / 3)
.cornerRadius(13)
}
}
.padding(.horizontal)
}
}
.navigationTitle("Dog images")
}
}
}
Upvotes: 3
Views: 534
Reputation: 534893
@Published var uiImages = [UIImage]()
Red flag! The term [UIImage]
is immediately suspect. Images (especially straight from photos) are big. An array of images is a great way to run out of memory.
You can keep an array of references to images, but not an array of actual images.
Upvotes: 3