-
Notifications
You must be signed in to change notification settings - Fork 241
Detect more than 100 images #254
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: master
Are you sure you want to change the base?
Conversation
olexale
left a comment
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Thanks for the contribution!
It has been a bit since I last touched Swift, but I wanted to see if we could consolidate the logic for both scenarios. I’ve modified the proposal slightly to reuse code for both scenarios and extended support to tracking images.
A quick smoke test looks promising, but I haven't been able to test this against 100+ images. Could you take a look at the changes and let me know if they work for your use case?
| self.sendToFlutter("onInitialized", arguments: nil) | ||
| } else { | ||
| logPluginError("Failed to create ARConfiguration", toChannel: self.channel) | ||
| let allImages = arguments["detectionImages"] as? [[String: Any]] ?? [] |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Updates in this ios/Classes/FlutterArkitView+Initialization.swift file.
import ARKit
extension FlutterArkitView {
func initalize(_ arguments: [String: Any], _: FlutterResult) {
if let showStatistics = arguments["showStatistics"] as? Bool {
sceneView.showsStatistics = showStatistics
}
if let autoenablesDefaultLighting = arguments["autoenablesDefaultLighting"] as? Bool {
sceneView.autoenablesDefaultLighting = autoenablesDefaultLighting
}
if let forceUserTapOnCenter = arguments["forceUserTapOnCenter"] as? Bool {
forceTapOnCenter = forceUserTapOnCenter
}
initalizeGesutreRecognizers(arguments)
sceneView.debugOptions = parseDebugOptions(arguments)
// Check for large sets of images to detect (World Tracking) or track (Image Tracking)
let detectionImages = arguments["detectionImages"] as? [[String: Any]] ?? []
let trackingImages = arguments["trackingImages"] as? [[String: Any]] ?? []
let (allImages, key) = !detectionImages.isEmpty
? (detectionImages, "detectionImages")
: (trackingImages, "trackingImages")
if allImages.count > 100 {
let imageBatches = stride(from: 0, to: allImages.count, by: 100).map {
Array(allImages[$0..<min($0 + 100, allImages.count)])
}
runImageDetectionBatches(
baseArguments: arguments,
imageKey: key,
imageBatches: imageBatches,
sendInitialized: true
)
} else {
runConfiguration(arguments, sendInitialized: true)
}
}
private func runConfiguration(_ arguments: [String: Any], sendInitialized: Bool) {
guard !isDisposed else { return }
configuration = parseConfiguration(arguments)
guard let config = configuration else {
logPluginError("Failed to create ARConfiguration", toChannel: channel)
return
}
// Do NOT use .removeExistingAnchors to preserve the world state
sceneView.session.run(config)
if sendInitialized {
sendToFlutter("onInitialized", arguments: nil)
}
}
private func runImageDetectionBatches(
baseArguments: [String: Any],
imageKey: String,
imageBatches: [[Any]],
batchIndex: Int = 0,
sendInitialized: Bool = false
) {
guard !isDisposed else { return }
var arguments = baseArguments
arguments[imageKey] = imageBatches[batchIndex]
runConfiguration(arguments, sendInitialized: sendInitialized)
// Schedule next batch rotation
let nextIndex = (batchIndex + 1) % imageBatches.count
DispatchQueue.main.asyncAfter(deadline: .now() + 2.0) { [weak self] in
self?.runImageDetectionBatches(
baseArguments: baseArguments,
imageKey: imageKey,
imageBatches: imageBatches,
batchIndex: nextIndex
)
}
}
func parseDebugOptions(_ arguments: [String: Any]) -> SCNDebugOptions {
var options = ARSCNDebugOptions().rawValue
if arguments["showFeaturePoints"] as? Bool == true {
options |= ARSCNDebugOptions.showFeaturePoints.rawValue
}
if arguments["showWorldOrigin"] as? Bool == true {
options |= ARSCNDebugOptions.showWorldOrigin.rawValue
}
return ARSCNDebugOptions(rawValue: options)
}
func parseConfiguration(_ arguments: [String: Any]) -> ARConfiguration? {
let configurationType = arguments["configuration"] as! Int
var configuration: ARConfiguration?
switch configurationType {
case 0:
configuration = createWorldTrackingConfiguration(arguments)
case 1:
#if !DISABLE_TRUEDEPTH_API
configuration = createFaceTrackingConfiguration(arguments)
#else
logPluginError("TRUEDEPTH_API disabled", toChannel: channel)
#endif
case 2:
if #available(iOS 12.0, *) {
configuration = createImageTrackingConfiguration(arguments)
} else {
logPluginError("configuration is not supported on this device", toChannel: channel)
}
case 3:
if #available(iOS 13.0, *) {
configuration = createBodyTrackingConfiguration(arguments)
} else {
logPluginError("configuration is not supported on this device", toChannel: channel)
}
case 4:
if #available(iOS 14.0, *) {
configuration = createDepthTrackingConfiguration(arguments)
} else {
logPluginError("configuration is not supported on this device", toChannel: channel)
}
default:
break
}
configuration?.worldAlignment = parseWorldAlignment(arguments)
return configuration
}
func parseWorldAlignment(_ arguments: [String: Any]) -> ARConfiguration.WorldAlignment {
switch arguments["worldAlignment"] as? Int {
case 0: return .gravity
case 1: return .gravityAndHeading
default: return .camera
}
}
}Add a flag in ios/Classes/FlutterArkitView.swift:
class FlutterArkitView: NSObject, FlutterPlatformView {
let sceneView: ARSCNView
let channel: FlutterMethodChannel
var forceTapOnCenter: Bool = false
var configuration: ARConfiguration? = nil
var isDisposed: Bool = false // <- THIS ONE
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I’ll test it later with a large image set and get back to you once I’ve verified it against my use case.
|
Hi @olexale |
This pull request introduces logic to handle a large number of detection images during ARKit view initialization by batching them to avoid overloading the AR session. The main change is the addition of a batching mechanism that processes detection images in groups of up to 100, running each batch sequentially with a delay, which improves stability and reliability when initializing with many images.
ARKit image detection batching:
FlutterArkitViewinitialization; if so, splits images into batches of 100 and processes them sequentially to prevent ARKit session overload.runImageDetectionBatchesprivate method to handle running each batch, updating the configuration and AR session for each, and scheduling the next batch with a delay to comply with Apple’s recommended behavior.