Replace brittle localized-string selectors and broken wait helpers with a robust, identifier-first UI test infrastructure. All 41 UI tests pass on iOS 26.2 simulator (iPhone 17). Foundation: - BaseUITestCase with deterministic launch helpers (launchClean, launchOffline) - WaitHelpers (waitUntilHittable, waitUntilGone, tapWhenReady) replacing sleep() - UITestID enum mirroring AccessibilityIdentifiers from the app target - Screen objects: TabBarScreen, CameraScreen, CollectionScreen, TodayScreen, SettingsScreen, PlantDetailScreen Key fixes: - Tab navigation uses waitForExistence+tap instead of isHittable (unreliable in iOS 26 simulator) - Tests handle real app state (empty collection, no camera permission) - Increased timeouts for parallel clone execution - Added NetworkMonitorProtocol and protocol-typed DI for testability - Fixed actor-isolation issues in unit test mocks Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
82 lines
2.5 KiB
Swift
82 lines
2.5 KiB
Swift
//
|
|
// IdentifyPlantOnDeviceUseCase.swift
|
|
// PlantGuide
|
|
//
|
|
// Created on 1/21/26.
|
|
//
|
|
|
|
import Foundation
|
|
import UIKit
|
|
|
|
// MARK: - IdentifyPlantUseCaseProtocol
|
|
|
|
/// Protocol for plant identification use cases (both on-device and remote)
|
|
protocol IdentifyPlantUseCaseProtocol: Sendable {
|
|
/// Identifies a plant from an image
|
|
/// - Parameter image: The UIImage containing the plant to identify
|
|
/// - Returns: An array of view-layer predictions
|
|
/// - Throws: If identification fails
|
|
func execute(image: UIImage) async throws -> [ViewPlantPrediction]
|
|
}
|
|
|
|
// MARK: - IdentifyPlantOnDeviceUseCase
|
|
|
|
/// Use case for identifying plants using on-device machine learning
|
|
struct IdentifyPlantOnDeviceUseCase: IdentifyPlantUseCaseProtocol {
|
|
|
|
// MARK: - Dependencies
|
|
|
|
private let imagePreprocessor: any ImagePreprocessorProtocol
|
|
private let classificationService: any PlantClassificationServiceProtocol
|
|
|
|
// MARK: - Initialization
|
|
|
|
init(
|
|
imagePreprocessor: any ImagePreprocessorProtocol,
|
|
classificationService: any PlantClassificationServiceProtocol
|
|
) {
|
|
self.imagePreprocessor = imagePreprocessor
|
|
self.classificationService = classificationService
|
|
}
|
|
|
|
// MARK: - IdentifyPlantUseCaseProtocol
|
|
|
|
/// Identifies a plant and returns view-layer predictions
|
|
func execute(image: UIImage) async throws -> [ViewPlantPrediction] {
|
|
// Preprocess the image for classification
|
|
let preprocessedImage = try await imagePreprocessor.preprocess(image)
|
|
|
|
// Run classification on the preprocessed image
|
|
let predictions = try await classificationService.classify(image: preprocessedImage)
|
|
|
|
guard !predictions.isEmpty else {
|
|
throw IdentifyPlantOnDeviceUseCaseError.noMatchesFound
|
|
}
|
|
|
|
// Map ML predictions to view-layer predictions
|
|
return predictions.map { prediction in
|
|
ViewPlantPrediction(
|
|
id: prediction.id,
|
|
speciesName: prediction.scientificName,
|
|
commonName: prediction.commonNames.first,
|
|
confidence: Double(prediction.confidence)
|
|
)
|
|
}
|
|
}
|
|
}
|
|
|
|
// MARK: - IdentifyPlantOnDeviceUseCaseError
|
|
|
|
/// Errors that can occur during on-device plant identification
|
|
enum IdentifyPlantOnDeviceUseCaseError: Error, LocalizedError {
|
|
/// No matches were found for the provided image
|
|
case noMatchesFound
|
|
|
|
var errorDescription: String? {
|
|
switch self {
|
|
case .noMatchesFound:
|
|
return "No plant matches were found in the image"
|
|
}
|
|
}
|
|
}
|