@@ -22,31 +22,31 @@ @interface FLTSavePhotoDelegate : NSObject <AVCapturePhotoCaptureDelegate>
22
22
23
23
@interface FLTImageStreamHandler : NSObject <FlutterStreamHandler>
24
24
// The queue on which `eventSink` property should be accessed
25
- @property (nonatomic , strong ) dispatch_queue_t dispatchQueue ;
26
- // `eventSink` property should be accessed on `dispatchQueue `.
25
+ @property (nonatomic , strong ) dispatch_queue_t captureSessionQueue ;
26
+ // `eventSink` property should be accessed on `captureSessionQueue `.
27
27
// The block itself should be invoked on the main queue.
28
28
@property FlutterEventSink eventSink;
29
29
@end
30
30
31
31
@implementation FLTImageStreamHandler
32
32
33
- - (instancetype )initWithDispatchQueue : (dispatch_queue_t )dispatchQueue {
33
+ - (instancetype )initWithCaptureSessionQueue : (dispatch_queue_t )captureSessionQueue {
34
34
self = [super init ];
35
35
NSAssert (self, @" super init cannot be nil" );
36
- _dispatchQueue = dispatchQueue ;
36
+ _captureSessionQueue = captureSessionQueue ;
37
37
return self;
38
38
}
39
39
40
40
- (FlutterError *_Nullable)onCancelWithArguments : (id _Nullable)arguments {
41
- dispatch_async (self.dispatchQueue , ^{
41
+ dispatch_async (self.captureSessionQueue , ^{
42
42
self.eventSink = nil ;
43
43
});
44
44
return nil ;
45
45
}
46
46
47
47
- (FlutterError *_Nullable)onListenWithArguments : (id _Nullable)arguments
48
48
eventSink : (nonnull FlutterEventSink)events {
49
- dispatch_async (self.dispatchQueue , ^{
49
+ dispatch_async (self.captureSessionQueue , ^{
50
50
self.eventSink = events;
51
51
});
52
52
return nil ;
@@ -360,7 +360,9 @@ @interface FLTCam : NSObject <FlutterTexture,
360
360
@end
361
361
362
362
@implementation FLTCam {
363
- dispatch_queue_t _dispatchQueue;
363
+ // All FLTCam's state access and capture session related operations should be on run on this
364
+ // queue.
365
+ dispatch_queue_t _captureSessionQueue;
364
366
UIDeviceOrientation _deviceOrientation;
365
367
}
366
368
// Format used for video and image streaming.
@@ -371,7 +373,7 @@ - (instancetype)initWithCameraName:(NSString *)cameraName
371
373
resolutionPreset : (NSString *)resolutionPreset
372
374
enableAudio : (BOOL )enableAudio
373
375
orientation : (UIDeviceOrientation)orientation
374
- dispatchQueue : (dispatch_queue_t )dispatchQueue
376
+ captureSessionQueue : (dispatch_queue_t )captureSessionQueue
375
377
error : (NSError **)error {
376
378
self = [super init ];
377
379
NSAssert (self, @" super init cannot be nil" );
@@ -381,7 +383,7 @@ - (instancetype)initWithCameraName:(NSString *)cameraName
381
383
*error = e;
382
384
}
383
385
_enableAudio = enableAudio;
384
- _dispatchQueue = dispatchQueue ;
386
+ _captureSessionQueue = captureSessionQueue ;
385
387
_captureSession = [[AVCaptureSession alloc ] init ];
386
388
_captureDevice = [AVCaptureDevice deviceWithUniqueID: cameraName];
387
389
_flashMode = _captureDevice.hasFlash ? FlashModeAuto : FlashModeOff;
@@ -1141,10 +1143,11 @@ - (void)startImageStreamWithMessenger:(NSObject<FlutterBinaryMessenger> *)messen
1141
1143
FLTThreadSafeEventChannel *threadSafeEventChannel =
1142
1144
[[FLTThreadSafeEventChannel alloc ] initWithEventChannel: eventChannel];
1143
1145
1144
- _imageStreamHandler = [[FLTImageStreamHandler alloc ] initWithDispatchQueue: _dispatchQueue];
1146
+ _imageStreamHandler =
1147
+ [[FLTImageStreamHandler alloc ] initWithCaptureSessionQueue: _captureSessionQueue];
1145
1148
[threadSafeEventChannel setStreamHandler: _imageStreamHandler
1146
1149
completion: ^{
1147
- dispatch_async (self->_dispatchQueue , ^{
1150
+ dispatch_async (self->_captureSessionQueue , ^{
1148
1151
self.isStreamingImages = YES ;
1149
1152
});
1150
1153
}];
@@ -1268,7 +1271,7 @@ - (BOOL)setupWriterForPath:(NSString *)path {
1268
1271
_audioWriterInput.expectsMediaDataInRealTime = YES ;
1269
1272
1270
1273
[_videoWriter addInput: _audioWriterInput];
1271
- [_audioOutput setSampleBufferDelegate: self queue: _dispatchQueue ];
1274
+ [_audioOutput setSampleBufferDelegate: self queue: _captureSessionQueue ];
1272
1275
}
1273
1276
1274
1277
if (_flashMode == FlashModeTorch) {
@@ -1279,7 +1282,7 @@ - (BOOL)setupWriterForPath:(NSString *)path {
1279
1282
1280
1283
[_videoWriter addInput: _videoWriterInput];
1281
1284
1282
- [_captureVideoOutput setSampleBufferDelegate: self queue: _dispatchQueue ];
1285
+ [_captureVideoOutput setSampleBufferDelegate: self queue: _captureSessionQueue ];
1283
1286
1284
1287
return YES ;
1285
1288
}
@@ -1320,7 +1323,7 @@ @interface CameraPlugin ()
1320
1323
@end
1321
1324
1322
1325
@implementation CameraPlugin {
1323
- dispatch_queue_t _dispatchQueue ;
1326
+ dispatch_queue_t _captureSessionQueue ;
1324
1327
}
1325
1328
1326
1329
+ (void )registerWithRegistrar : (NSObject <FlutterPluginRegistrar> *)registrar {
@@ -1382,12 +1385,12 @@ - (void)sendDeviceOrientation:(UIDeviceOrientation)orientation {
1382
1385
}
1383
1386
1384
1387
- (void )handleMethodCall : (FlutterMethodCall *)call result : (FlutterResult)result {
1385
- if (_dispatchQueue == nil ) {
1386
- _dispatchQueue = dispatch_queue_create (" io.flutter.camera.dispatchqueue" , NULL );
1388
+ if (_captureSessionQueue == nil ) {
1389
+ _captureSessionQueue = dispatch_queue_create (" io.flutter.camera.dispatchqueue" , NULL );
1387
1390
}
1388
1391
1389
1392
// Invoke the plugin on another dispatch queue to avoid blocking the UI.
1390
- dispatch_async (_dispatchQueue , ^{
1393
+ dispatch_async (_captureSessionQueue , ^{
1391
1394
FLTThreadSafeFlutterResult *threadSafeResult =
1392
1395
[[FLTThreadSafeFlutterResult alloc ] initWithResult: result];
1393
1396
@@ -1438,7 +1441,7 @@ - (void)handleMethodCallAsync:(FlutterMethodCall *)call
1438
1441
resolutionPreset: resolutionPreset
1439
1442
enableAudio: [enableAudio boolValue ]
1440
1443
orientation: [[UIDevice currentDevice ] orientation ]
1441
- dispatchQueue: _dispatchQueue
1444
+ captureSessionQueue: _captureSessionQueue
1442
1445
error: &error];
1443
1446
1444
1447
if (error) {
@@ -1504,7 +1507,7 @@ - (void)handleMethodCallAsync:(FlutterMethodCall *)call
1504
1507
} else if ([@" dispose" isEqualToString: call.method]) {
1505
1508
[_registry unregisterTexture: cameraId];
1506
1509
[_camera close ];
1507
- _dispatchQueue = nil ;
1510
+ _captureSessionQueue = nil ;
1508
1511
[result sendSuccess ];
1509
1512
} else if ([@" prepareForVideoRecording" isEqualToString: call.method]) {
1510
1513
[_camera setUpCaptureSessionForAudio ];
0 commit comments