Skip to content

Commit 92e8161

Browse files
committed
updating
1 parent bafa737 commit 92e8161

9 files changed

+89
-78
lines changed

ios/Classes/BarcodeDetector.m

+1-1
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ - (instancetype)initWithVision:(FIRVision *)vision options:(NSDictionary *)optio
1313
return self;
1414
}
1515

16-
- (void)handleDetection:(FIRVisionImage *)image result:(FlutterResult)result {
16+
- (void)handleDetection:(FIRVisionImage *)image result:(FlutterEventSink)result {
1717
[_detector detectInImage:image
1818
completion:^(NSArray<FIRVisionBarcode *> *barcodes, NSError *error) {
1919
if (error) {

ios/Classes/FaceDetector.m

+1-1
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ - (instancetype)initWithVision:(FIRVision *)vision options:(NSDictionary *)optio
1313
return self;
1414
}
1515

16-
- (void)handleDetection:(FIRVisionImage *)image result:(FlutterResult)result {
16+
- (void)handleDetection:(FIRVisionImage *)image result:(FlutterEventSink)result {
1717
[_detector
1818
processImage:image
1919
completion:^(NSArray<FIRVisionFace *> *_Nullable faces, NSError *_Nullable error) {

ios/Classes/FirebaseMlVisionPlugin.m

+80-69
Original file line numberDiff line numberDiff line change
@@ -13,8 +13,11 @@ @interface FirebaseCam : NSObject <FlutterTexture,
1313
AVCaptureVideoDataOutputSampleBufferDelegate,
1414
AVCaptureAudioDataOutputSampleBufferDelegate,
1515
FlutterStreamHandler>
16+
@property(assign, atomic) BOOL isRecognizing;
1617
@property(readonly, nonatomic) int64_t textureId;
1718
@property(nonatomic, copy) void (^onFrameAvailable)();
19+
@property(nonatomic) id<Detector> activeDetector;
20+
@property(nonatomic) FlutterEventSink resultSink;
1821
@property(nonatomic) FlutterEventChannel *eventChannel;
1922
@property(nonatomic) FlutterEventSink eventSink;
2023
@property(readonly, nonatomic) AVCaptureSession *captureSession;
@@ -140,8 +143,17 @@ - (void)captureOutput:(AVCaptureOutput *)output
140143
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
141144
fromConnection:(AVCaptureConnection *)connection {
142145
CVImageBufferRef newBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
143-
if (output == _captureVideoOutput) {
144-
CVPixelBufferRef newBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
146+
if (newBuffer) {
147+
if (!_isRecognizing) {
148+
_isRecognizing = YES;
149+
FIRVisionImage *visionImage = [[FIRVisionImage alloc] initWithBuffer:sampleBuffer];
150+
FIRVisionImageMetadata *metadata = [[FIRVisionImageMetadata alloc] init];
151+
FIRVisionDetectorImageOrientation visionOrientation = FIRVisionDetectorImageOrientationTopLeft;
152+
153+
metadata.orientation = visionOrientation;
154+
visionImage.metadata = metadata;
155+
[_activeDetector handleDetection:visionImage result:_resultSink];
156+
}
145157
CFRetain(newBuffer);
146158
CVPixelBufferRef old = _latestPixelBuffer;
147159
while (!OSAtomicCompareAndSwapPtrBarrier(old, newBuffer, (void **)&_latestPixelBuffer)) {
@@ -156,9 +168,9 @@ - (void)captureOutput:(AVCaptureOutput *)output
156168
}
157169
if (!CMSampleBufferDataIsReady(sampleBuffer)) {
158170
_eventSink(@{
159-
@"event" : @"error",
160-
@"errorDescription" : @"sample buffer is not ready. Skipping sample"
161-
});
171+
@"event" : @"error",
172+
@"errorDescription" : @"sample buffer is not ready. Skipping sample"
173+
});
162174
return;
163175
}
164176
}
@@ -227,7 +239,7 @@ + (void)registerWithRegistrar:(NSObject<FlutterPluginRegistrar> *)registrar {
227239
messenger:[registrar messenger]];
228240
[registrar addMethodCallDelegate:instance channel:channel];
229241
[results setStreamHandler:instance];
230-
242+
231243
SEL sel = NSSelectorFromString(@"registerLibrary:withVersion:");
232244
if ([FIRApp respondsToSelector:sel]) {
233245
[FIRApp performSelector:sel withObject:LIBRARY_NAME withObject:LIBRARY_VERSION];
@@ -264,11 +276,11 @@ - (void)handleMethodCallAsync:(FlutterMethodCall *)call result:(FlutterResult)re
264276
NSString *modelName = call.arguments[@"model"];
265277
NSDictionary *options = call.arguments[@"options"];
266278
NSNumber *handle = call.arguments[@"handle"];
267-
if ([@"ModelManager#setupLocalModel" isEqualToString:call.method]) {
268-
[SetupLocalModel modelName:modelName result:result];
269-
} else if ([@"ModelManager#setupRemoteModel" isEqualToString:call.method]) {
270-
[SetupRemoteModel modelName:modelName result:result];
271-
} else if ([@"camerasAvailable" isEqualToString:call.method]){
279+
if ([@"ModelManager#setupLocalModel" isEqualToString:call.method]) {
280+
[SetupLocalModel modelName:modelName result:result];
281+
} else if ([@"ModelManager#setupRemoteModel" isEqualToString:call.method]) {
282+
[SetupRemoteModel modelName:modelName result:result];
283+
} else if ([@"camerasAvailable" isEqualToString:call.method]){
272284
AVCaptureDeviceDiscoverySession *discoverySession = [AVCaptureDeviceDiscoverySession
273285
discoverySessionWithDeviceTypes:@[ AVCaptureDeviceTypeBuiltInWideAngleCamera ]
274286
mediaType:AVMediaTypeVideo
@@ -290,20 +302,20 @@ - (void)handleMethodCallAsync:(FlutterMethodCall *)call result:(FlutterResult)re
290302
break;
291303
}
292304
[reply addObject:@{
293-
@"name" : [device uniqueID],
294-
@"lensFacing" : lensFacing,
295-
@"sensorOrientation" : @90,
296-
}];
305+
@"name" : [device uniqueID],
306+
@"lensFacing" : lensFacing,
307+
@"sensorOrientation" : @90,
308+
}];
297309
}
298310
result(reply);
299311
} else if ([@"initialize" isEqualToString:call.method]) {
300312
NSString *cameraName = call.arguments[@"cameraName"];
301313
NSString *resolutionPreset = call.arguments[@"resolutionPreset"];
302314
NSError *error;
303315
FirebaseCam *cam = [[FirebaseCam alloc] initWithCameraName:cameraName
304-
resolutionPreset:resolutionPreset
305-
dispatchQueue:_dispatchQueue
306-
error:&error];
316+
resolutionPreset:resolutionPreset
317+
dispatchQueue:_dispatchQueue
318+
error:&error];
307319
if (error) {
308320
result(getFlutterError(error));
309321
} else {
@@ -323,10 +335,10 @@ - (void)handleMethodCallAsync:(FlutterMethodCall *)call result:(FlutterResult)re
323335
[eventChannel setStreamHandler:cam];
324336
cam.eventChannel = eventChannel;
325337
result(@{
326-
@"textureId" : @(textureId),
327-
@"previewWidth" : @(cam.previewSize.width),
328-
@"previewHeight" : @(cam.previewSize.height),
329-
});
338+
@"textureId" : @(textureId),
339+
@"previewWidth" : @(cam.previewSize.width),
340+
@"previewHeight" : @(cam.previewSize.height),
341+
});
330342
[cam start];
331343
}
332344
} else if ([@"BarcodeDetector#startDetection" isEqualToString:call.method]){
@@ -335,52 +347,51 @@ - (void)handleMethodCallAsync:(FlutterMethodCall *)call result:(FlutterResult)re
335347
detector = [[BarcodeDetector alloc] initWithVision:[FIRVision vision] options:options];
336348
[FLTFirebaseMlVisionPlugin addDetector:handle detector:detector];
337349
}
350+
_camera.activeDetector = detectors[handle];
338351
} else if ([@"BarcodeDetector#detectInImage" isEqualToString:call.method] ||
339-
[@"FaceDetector#processImage" isEqualToString:call.method] ||
340-
[@"ImageLabeler#processImage" isEqualToString:call.method] ||
341-
[@"TextRecognizer#processImage" isEqualToString:call.method] ||
342-
[@"VisionEdgeImageLabeler#processLocalImage" isEqualToString:call.method] ||
343-
[@"VisionEdgeImageLabeler#processRemoteImage" isEqualToString:call.method]) {
344-
[self handleDetection:call result:result];
345-
} else if ([@"BarcodeDetector#close" isEqualToString:call.method] ||
346-
[@"FaceDetector#close" isEqualToString:call.method] ||
347-
[@"ImageLabeler#close" isEqualToString:call.method] ||
348-
[@"TextRecognizer#close" isEqualToString:call.method] ||
349-
[@"VisionEdgeImageLabeler#close" isEqualToString:call.method]) {
350-
NSNumber *handle = call.arguments[@"handle"];
351-
[detectors removeObjectForKey:handle];
352-
result(nil);
353-
} else {
354-
result(FlutterMethodNotImplemented);
355-
}
352+
[@"FaceDetector#processImage" isEqualToString:call.method] ||
353+
[@"ImageLabeler#processImage" isEqualToString:call.method] ||
354+
[@"TextRecognizer#processImage" isEqualToString:call.method] ||
355+
[@"VisionEdgeImageLabeler#processLocalImage" isEqualToString:call.method] ||
356+
[@"VisionEdgeImageLabeler#processRemoteImage" isEqualToString:call.method]) {
357+
[self handleDetection:call result:result];
358+
} else if ([@"BarcodeDetector#close" isEqualToString:call.method] ||
359+
[@"FaceDetector#close" isEqualToString:call.method] ||
360+
[@"ImageLabeler#close" isEqualToString:call.method] ||
361+
[@"TextRecognizer#close" isEqualToString:call.method] ||
362+
[@"VisionEdgeImageLabeler#close" isEqualToString:call.method]) {
363+
NSNumber *handle = call.arguments[@"handle"];
364+
[detectors removeObjectForKey:handle];
365+
result(nil);
366+
} else {
367+
result(FlutterMethodNotImplemented);
368+
}
356369
}
357370

358371
- (void)handleDetection:(FlutterMethodCall *)call result:(FlutterResult)result {
359-
FIRVisionImage *image = [self dataToVisionImage:call.arguments];
360-
NSDictionary *options = call.arguments[@"options"];
361-
362-
NSNumber *handle = call.arguments[@"handle"];
363-
id<Detector> detector = detectors[handle];
364-
if (!detector) {
365-
if ([call.method hasPrefix:@"BarcodeDetector"]) {
366-
detector = [[BarcodeDetector alloc] initWithVision:[FIRVision vision] options:options];
367-
} else if ([call.method hasPrefix:@"FaceDetector"]) {
368-
detector = [[FaceDetector alloc] initWithVision:[FIRVision vision] options:options];
369-
} else if ([call.method hasPrefix:@"ImageLabeler"]) {
370-
detector = [[ImageLabeler alloc] initWithVision:[FIRVision vision] options:options];
371-
} else if ([call.method hasPrefix:@"TextRecognizer"]) {
372-
detector = [[TextRecognizer alloc] initWithVision:[FIRVision vision] options:options];
373-
} else if ([call.method isEqualToString:@"VisionEdgeImageLabeler#processLocalImage"]) {
374-
detector = [[LocalVisionEdgeDetector alloc] initWithVision:[FIRVision vision]
375-
options:options];
376-
} else if ([call.method isEqualToString:@"VisionEdgeImageLabeler#processRemoteImage"]) {
377-
detector = [[RemoteVisionEdgeDetector alloc] initWithVision:[FIRVision vision]
378-
options:options];
372+
FIRVisionImage *image = [self dataToVisionImage:call.arguments];
373+
NSDictionary *options = call.arguments[@"options"];
374+
NSNumber *handle = call.arguments[@"handle"];
375+
id<Detector> detector = detectors[handle];
376+
if (!detector) {
377+
if ([call.method hasPrefix:@"BarcodeDetector"]) {
378+
detector = [[BarcodeDetector alloc] initWithVision:[FIRVision vision] options:options];
379+
} else if ([call.method hasPrefix:@"FaceDetector"]) {
380+
detector = [[FaceDetector alloc] initWithVision:[FIRVision vision] options:options];
381+
} else if ([call.method hasPrefix:@"ImageLabeler"]) {
382+
detector = [[ImageLabeler alloc] initWithVision:[FIRVision vision] options:options];
383+
} else if ([call.method hasPrefix:@"TextRecognizer"]) {
384+
detector = [[TextRecognizer alloc] initWithVision:[FIRVision vision] options:options];
385+
} else if ([call.method isEqualToString:@"VisionEdgeImageLabeler#processLocalImage"]) {
386+
detector = [[LocalVisionEdgeDetector alloc] initWithVision:[FIRVision vision]
387+
options:options];
388+
} else if ([call.method isEqualToString:@"VisionEdgeImageLabeler#processRemoteImage"]) {
389+
detector = [[RemoteVisionEdgeDetector alloc] initWithVision:[FIRVision vision]
390+
options:options];
391+
}
392+
[FLTFirebaseMlVisionPlugin addDetector:handle detector:detector];
379393
}
380-
[FLTFirebaseMlVisionPlugin addDetector:handle detector:detector];
381-
}
382-
383-
[detectors[handle] handleDetection:image result:result];
394+
[detectors[handle] handleDetection:image result:result];
384395
}
385396

386397
- (FIRVisionImage *)dataToVisionImage:(NSDictionary *)imageData {
@@ -524,13 +535,13 @@ - (FIRVisionImage *)pixelBufferToVisionImage:(CVPixelBufferRef)pixelBufferRef {
524535
}
525536

526537
+ (void)addDetector:(NSNumber *)handle detector:(id<Detector>)detector {
527-
if (detectors[handle]) {
528-
NSString *reason =
538+
if (detectors[handle]) {
539+
NSString *reason =
529540
[[NSString alloc] initWithFormat:@"Object for handle already exists: %d", handle.intValue];
530-
@throw [[NSException alloc] initWithName:NSInvalidArgumentException reason:reason userInfo:nil];
531-
}
532-
533-
detectors[handle] = detector;
541+
@throw [[NSException alloc] initWithName:NSInvalidArgumentException reason:reason userInfo:nil];
542+
}
543+
544+
detectors[handle] = detector;
534545
}
535546

536547
- (FlutterError * _Nullable)onCancelWithArguments:(id _Nullable)arguments {

ios/Classes/ImageLabeler.m

+1-1
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ - (instancetype)initWithVision:(FIRVision *)vision options:(NSDictionary *)optio
2323
return self;
2424
}
2525

26-
- (void)handleDetection:(FIRVisionImage *)image result:(FlutterResult)result {
26+
- (void)handleDetection:(FIRVisionImage *)image result:(FlutterEventSink)result {
2727
[_labeler
2828
processImage:image
2929
completion:^(NSArray<FIRVisionImageLabel *> *_Nullable labels, NSError *_Nullable error) {

ios/Classes/LocalVisionEdgeDetector.m

+1-1
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ - (instancetype)initWithVision:(FIRVision *)vision options:(NSDictionary *)optio
3131
return self;
3232
}
3333

34-
- (void)handleDetection:(FIRVisionImage *)image result:(FlutterResult)result {
34+
- (void)handleDetection:(FIRVisionImage *)image result:(FlutterEventSink)result {
3535
[_labeler
3636
processImage:image
3737
completion:^(NSArray<FIRVisionImageLabel *> *_Nullable labels, NSError *_Nullable error) {

ios/Classes/RemoteVisionEdgeDetector.m

+1-1
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ - (instancetype)initWithVision:(FIRVision *)vision options:(NSDictionary *)optio
4141
return self;
4242
}
4343

44-
- (void)handleDetection:(FIRVisionImage *)image result:(FlutterResult)result {
44+
- (void)handleDetection:(FIRVisionImage *)image result:(FlutterEventSink)result {
4545
[_labeler
4646
processImage:image
4747
completion:^(NSArray<FIRVisionImageLabel *> *_Nullable labels, NSError *_Nullable error) {

ios/Classes/TextRecognizer.m

+1-1
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ - (instancetype)initWithVision:(FIRVision *)vision options:(NSDictionary *)optio
2323
return self;
2424
}
2525

26-
- (void)handleDetection:(FIRVisionImage *)image result:(FlutterResult)result {
26+
- (void)handleDetection:(FIRVisionImage *)image result:(FlutterEventSink)result {
2727
[_recognizer processImage:image
2828
completion:^(FIRVisionText *_Nullable visionText, NSError *_Nullable error) {
2929
if (error) {

lib/src/barcode_detector.dart

+2-2
Original file line numberDiff line numberDiff line change
@@ -192,7 +192,7 @@ class BarcodeDetector {
192192
bool _isClosed = false;
193193

194194
/// Detects barcodes in the input image.
195-
Stream<List<Barcode>> startDetection() {
195+
Stream<Barcode> startDetection() {
196196
assert(!_isClosed);
197197

198198
_hasBeenOpened = true;
@@ -207,7 +207,7 @@ class BarcodeDetector {
207207
},
208208
).then((onValue){
209209
const EventChannel resultsChannel = EventChannel('plugins.flutter.io/firebase_mlvision_results');
210-
data = resultsChannel.receiveBroadcastStream();
210+
data = resultsChannel.receiveBroadcastStream().map((convert) => Barcode._(convert));
211211
});
212212
return data;
213213
}

lib/src/firebase_vision.dart

+1-1
Original file line numberDiff line numberDiff line change
@@ -284,7 +284,7 @@ class FirebaseVision extends ValueNotifier<FirebaseCameraValue> {
284284
}
285285

286286
/// Creates an instance of [BarcodeDetector].
287-
Stream<List<Barcode>> addBarcodeDetector([BarcodeDetectorOptions options]) {
287+
Stream<Barcode> addBarcodeDetector([BarcodeDetectorOptions options]) {
288288
BarcodeDetector detector = BarcodeDetector._(options ?? const BarcodeDetectorOptions(),
289289
nextHandle++,
290290
);

0 commit comments

Comments
 (0)