@@ -139,6 +139,7 @@ - (void)setCaptureSessionPreset:(NSString *)resolutionPreset {
139
139
- (void )captureOutput : (AVCaptureOutput *)output
140
140
didOutputSampleBuffer : (CMSampleBufferRef)sampleBuffer
141
141
fromConnection : (AVCaptureConnection *)connection {
142
+ CVImageBufferRef newBuffer = CMSampleBufferGetImageBuffer (sampleBuffer);
142
143
if (output == _captureVideoOutput) {
143
144
CVPixelBufferRef newBuffer = CMSampleBufferGetImageBuffer (sampleBuffer);
144
145
CFRetain (newBuffer);
@@ -210,19 +211,22 @@ @implementation FLTFirebaseMlVisionPlugin {
210
211
}
211
212
212
213
static NSMutableDictionary <NSNumber *, id <Detector>> *detectors;
214
+ FlutterEventSink resultSink;
213
215
214
216
+ (void )handleError : (NSError *)error result : (FlutterResult)result {
215
217
result (getFlutterError (error));
216
218
}
217
219
218
220
+ (void )registerWithRegistrar : (NSObject <FlutterPluginRegistrar> *)registrar {
219
221
detectors = [NSMutableDictionary new ];
222
+ FlutterEventChannel *results = [FlutterEventChannel eventChannelWithName: @" plugins.flutter.io/firebase_mlvision_results" binaryMessenger: [registrar messenger ]];
220
223
FlutterMethodChannel *channel =
221
224
[FlutterMethodChannel methodChannelWithName: @" plugins.flutter.io/firebase_mlvision"
222
225
binaryMessenger: [registrar messenger ]];
223
226
FLTFirebaseMlVisionPlugin *instance = [[FLTFirebaseMlVisionPlugin alloc ] initWithRegistry: [registrar textures ]
224
227
messenger: [registrar messenger ]];
225
228
[registrar addMethodCallDelegate: instance channel: channel];
229
+ [results setStreamHandler: instance];
226
230
227
231
SEL sel = NSSelectorFromString (@" registerLibrary:withVersion:" );
228
232
if ([FIRApp respondsToSelector: sel]) {
@@ -258,6 +262,8 @@ - (void)handleMethodCall:(FlutterMethodCall *)call result:(FlutterResult)result
258
262
259
263
- (void )handleMethodCallAsync : (FlutterMethodCall *)call result : (FlutterResult)result {
260
264
NSString *modelName = call.arguments [@" model" ];
265
+ NSDictionary *options = call.arguments [@" options" ];
266
+ NSNumber *handle = call.arguments [@" handle" ];
261
267
if ([@" ModelManager#setupLocalModel" isEqualToString: call.method]) {
262
268
[SetupLocalModel modelName: modelName result: result];
263
269
} else if ([@" ModelManager#setupRemoteModel" isEqualToString: call.method]) {
@@ -307,7 +313,7 @@ - (void)handleMethodCallAsync:(FlutterMethodCall *)call result:(FlutterResult)re
307
313
int64_t textureId = [_registry registerTexture: cam];
308
314
_camera = cam;
309
315
cam.onFrameAvailable = ^{
310
- [_registry textureFrameAvailable: textureId];
316
+ [self -> _registry textureFrameAvailable: textureId];
311
317
};
312
318
FlutterEventChannel *eventChannel = [FlutterEventChannel
313
319
eventChannelWithName: [NSString
@@ -323,6 +329,12 @@ - (void)handleMethodCallAsync:(FlutterMethodCall *)call result:(FlutterResult)re
323
329
});
324
330
[cam start ];
325
331
}
332
+ } else if ([@" BarcodeDetector#startDetection" isEqualToString: call.method]){
333
+ id <Detector> detector = detectors[handle];
334
+ if (!detector) {
335
+ detector = [[BarcodeDetector alloc ] initWithVision: [FIRVision vision ] options: options];
336
+ [FLTFirebaseMlVisionPlugin addDetector: handle detector: detector];
337
+ }
326
338
} else if ([@" BarcodeDetector#detectInImage" isEqualToString: call.method] ||
327
339
[@" FaceDetector#processImage" isEqualToString: call.method] ||
328
340
[@" ImageLabeler#processImage" isEqualToString: call.method] ||
@@ -521,4 +533,14 @@ + (void)addDetector:(NSNumber *)handle detector:(id<Detector>)detector {
521
533
detectors[handle] = detector;
522
534
}
523
535
536
+ - (FlutterError * _Nullable)onCancelWithArguments : (id _Nullable)arguments {
537
+ resultSink = nil ;
538
+ return nil ;
539
+ }
540
+
541
+ - (FlutterError * _Nullable)onListenWithArguments : (id _Nullable)arguments eventSink : (nonnull FlutterEventSink)events {
542
+ resultSink = events;
543
+ return nil ;
544
+ }
545
+
524
546
@end
0 commit comments