IPC Broadcast upload extension screenshare (#981)

* IPC Broadcast Extension impl

* React-native-webrtc attribution

* default to in app, require deviceId == broadcast for now

* update flutter version in build action

fixes error in flutter analyze
This commit is contained in:
davidliu
2022-06-21 19:02:34 +09:00
committed by GitHub
parent e4d4428443
commit ef4da102e8
10 changed files with 589 additions and 5 deletions

View File

@ -21,7 +21,7 @@ jobs:
java-version: '12.x'
- uses: subosito/flutter-action@v1
with:
flutter-version: '2.5.3'
flutter-version: '3.0.2'
channel: 'stable'
- run: flutter packages get
- run: flutter format lib/ test/ --set-exit-if-changed

27
NOTICE
View File

@ -22,3 +22,30 @@ See the License for the specific language governing permissions and
limitations under the License.
#####################################################################################
react-native-webrtc
https://github.com/react-native-webrtc/react-native-webrtc
The MIT License (MIT)
Copyright (c) 2015 Howard Yang
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
#####################################################################################

View File

@ -8,7 +8,9 @@
#import "AudioUtils.h"
#if TARGET_OS_IPHONE
#import <ReplayKit/ReplayKit.h>
#import "FlutterRPScreenRecorder.h"
#import "FlutterBroadcastScreenCapturer.h"
#endif
@implementation AVCaptureDevice (Flutter)
@ -488,12 +490,38 @@ typedef void (^NavigatorUserMediaSuccessCallback)(RTCMediaStream *mediaStream);
result:(FlutterResult)result {
NSString *mediaStreamId = [[NSUUID UUID] UUIDString];
RTCMediaStream *mediaStream = [self.peerConnectionFactory mediaStreamWithStreamId:mediaStreamId];
RTCVideoSource *videoSource = [self.peerConnectionFactory videoSource];
FlutterRPScreenRecorder *screenCapturer = [[FlutterRPScreenRecorder alloc] initWithDelegate:videoSource];
BOOL useBroadcastExtension = false;
id videoConstraints = constraints[@"video"];
if ([videoConstraints isKindOfClass:[NSDictionary class]]) {
// constraints.video.deviceId
useBroadcastExtension = [((NSDictionary *)videoConstraints)[@"deviceId"] isEqualToString:@"broadcast"];
}
id screenCapturer;
if(useBroadcastExtension){
screenCapturer = [[FlutterBroadcastScreenCapturer alloc] initWithDelegate:videoSource];
} else {
screenCapturer = [[FlutterRPScreenRecorder alloc] initWithDelegate:videoSource];
}
[screenCapturer startCapture];
if(useBroadcastExtension) {
NSString *extension = [[[NSBundle mainBundle] infoDictionary] valueForKey: kRTCScreenSharingExtension];
if(extension) {
RPSystemBroadcastPickerView *picker = [[RPSystemBroadcastPickerView alloc] init];
picker.preferredExtension = extension;
picker.showsMicrophoneButton = false;
SEL selector = NSSelectorFromString(@"buttonPressed:");
if([picker respondsToSelector:selector]) {
[picker performSelector:selector withObject:nil];
}
}
}
//TODO:
self.videoCapturer = screenCapturer;

View File

@ -0,0 +1,25 @@
//
// FlutterBroadcastScreenCapturer.h
// RCTWebRTC
//
// Created by Alex-Dan Bumbu on 06/01/2021.
//
#import <Foundation/Foundation.h>
#import <WebRTC/WebRTC.h>
NS_ASSUME_NONNULL_BEGIN
extern NSString* const kRTCScreensharingSocketFD;
extern NSString* const kRTCAppGroupIdentifier;
extern NSString* const kRTCScreenSharingExtension;
@class FlutterSocketConnectionFrameReader;
@interface FlutterBroadcastScreenCapturer : RTCVideoCapturer
- (void)startCapture;
- (void)stopCapture;
- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,66 @@
//
// FlutterBroadcastScreenCapturer.m
// RCTWebRTC
//
// Created by Alex-Dan Bumbu on 06/01/2021.
//
#import "FlutterBroadcastScreenCapturer.h"
#import "FlutterSocketConnectionFrameReader.h"
#import "FlutterSocketConnection.h"
NSString* const kRTCScreensharingSocketFD = @"rtc_SSFD";
NSString* const kRTCAppGroupIdentifier = @"RTCAppGroupIdentifier";
NSString* const kRTCScreenSharingExtension = @"RTCScreenSharingExtension";
@interface FlutterBroadcastScreenCapturer ()
@property (nonatomic, retain) FlutterSocketConnectionFrameReader *capturer;
@end
@interface FlutterBroadcastScreenCapturer (Private)
@property (nonatomic, readonly) NSString *appGroupIdentifier;
@end
@implementation FlutterBroadcastScreenCapturer
- (void)startCapture {
if (!self.appGroupIdentifier) {
return;
}
NSString *socketFilePath = [self filePathForApplicationGroupIdentifier:self.appGroupIdentifier];
FlutterSocketConnectionFrameReader *frameReader = [[FlutterSocketConnectionFrameReader alloc] initWithDelegate:self.delegate];
FlutterSocketConnection *connection = [[FlutterSocketConnection alloc] initWithFilePath:socketFilePath];
self.capturer = frameReader;
[self.capturer startCaptureWithConnection:connection];
}
- (void)stopCapture {
[self.capturer stopCapture];
}
- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler
{
[self stopCapture];
if(completionHandler != nil) {
completionHandler();
}
}
// MARK: Private Methods
- (NSString *)appGroupIdentifier {
NSDictionary *infoDictionary = [[NSBundle mainBundle] infoDictionary];
return infoDictionary[kRTCAppGroupIdentifier];
}
- (NSString *)filePathForApplicationGroupIdentifier:(nonnull NSString *)identifier {
NSURL *sharedContainer = [[NSFileManager defaultManager] containerURLForSecurityApplicationGroupIdentifier:identifier];
NSString *socketFilePath = [[sharedContainer URLByAppendingPathComponent:kRTCScreensharingSocketFD] path];
return socketFilePath;
}
@end

View File

@ -0,0 +1,20 @@
//
// FlutterSocketConnection.h
// RCTWebRTC
//
// Created by Alex-Dan Bumbu on 08/01/2021.
//
#import <Foundation/Foundation.h>
NS_ASSUME_NONNULL_BEGIN
@interface FlutterSocketConnection : NSObject
- (instancetype)initWithFilePath:(nonnull NSString *)filePath;
- (void)openWithStreamDelegate:(id <NSStreamDelegate>)streamDelegate;
- (void)close;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,144 @@
//
// FlutterSocketConnection.m
// RCTWebRTC
//
// Created by Alex-Dan Bumbu on 08/01/2021.
//
#include <sys/socket.h>
#include <sys/un.h>
#import "FlutterSocketConnection.h"
@interface FlutterSocketConnection ()
@property (nonatomic, assign) int serverSocket;
@property (nonatomic, strong) dispatch_source_t listeningSource;
@property (nonatomic, strong) NSThread *networkThread;
@property (nonatomic, strong) NSInputStream *inputStream;
@property (nonatomic, strong) NSOutputStream *outputStream;
@end
@implementation FlutterSocketConnection
- (instancetype)initWithFilePath:(nonnull NSString *)filePath {
self = [super init];
[self setupNetworkThread];
self.serverSocket = socket(AF_UNIX, SOCK_STREAM, 0);
if (self.serverSocket < 0) {
NSLog(@"failure creating socket");
return nil;
}
if (![self setupSocketWithFileAtPath: filePath]) {
close(self.serverSocket);
return nil;
}
return self;
}
- (void)openWithStreamDelegate:(id <NSStreamDelegate>)streamDelegate {
int status = listen(self.serverSocket, 10);
if (status < 0) {
NSLog(@"failure: socket listening");
return;
}
dispatch_source_t listeningSource = dispatch_source_create(DISPATCH_SOURCE_TYPE_READ, self.serverSocket, 0, NULL);
dispatch_source_set_event_handler(listeningSource, ^ {
int clientSocket = accept(self.serverSocket, NULL, NULL);
if (clientSocket < 0) {
NSLog(@"failure accepting connection");
return;
}
CFReadStreamRef readStream;
CFWriteStreamRef writeStream;
CFStreamCreatePairWithSocket(kCFAllocatorDefault, clientSocket, &readStream, &writeStream);
self.inputStream = (__bridge_transfer NSInputStream *)readStream;
self.inputStream.delegate = streamDelegate;
[self.inputStream setProperty:@"kCFBooleanTrue" forKey:@"kCFStreamPropertyShouldCloseNativeSocket"];
self.outputStream = (__bridge_transfer NSOutputStream *)writeStream;
[self.outputStream setProperty:@"kCFBooleanTrue" forKey:@"kCFStreamPropertyShouldCloseNativeSocket"];
[self.networkThread start];
[self performSelector:@selector(scheduleStreams) onThread:self.networkThread withObject:nil waitUntilDone:true];
[self.inputStream open];
[self.outputStream open];
});
self.listeningSource = listeningSource;
dispatch_resume(listeningSource);
}
- (void)close {
[self performSelector:@selector(unscheduleStreams) onThread:self.networkThread withObject:nil waitUntilDone:true];
self.inputStream.delegate = nil;
self.outputStream.delegate = nil;
[self.inputStream close];
[self.outputStream close];
[self.networkThread cancel];
dispatch_source_cancel(self.listeningSource);
close(self.serverSocket);
}
// MARK: - Private Methods
- (void)setupNetworkThread {
self.networkThread = [[NSThread alloc] initWithBlock:^{
do {
@autoreleasepool {
[[NSRunLoop currentRunLoop] run];
}
} while (![NSThread currentThread].isCancelled);
}];
self.networkThread.qualityOfService = NSQualityOfServiceUserInitiated;
}
- (BOOL)setupSocketWithFileAtPath:(NSString *)filePath {
struct sockaddr_un addr;
memset(&addr, 0, sizeof(addr));
addr.sun_family = AF_UNIX;
if (filePath.length > sizeof(addr.sun_path)) {
NSLog(@"failure: path too long");
return false;
}
unlink(filePath.UTF8String);
strncpy(addr.sun_path, filePath.UTF8String, sizeof(addr.sun_path) - 1);
int status = bind(self.serverSocket, (struct sockaddr *)&addr, sizeof(addr));
if (status < 0) {
NSLog(@"failure: socket binding");
return false;
}
return true;
}
- (void)scheduleStreams {
[self.inputStream scheduleInRunLoop:NSRunLoop.currentRunLoop forMode:NSRunLoopCommonModes];
[self.outputStream scheduleInRunLoop:NSRunLoop.currentRunLoop forMode:NSRunLoopCommonModes];
}
- (void)unscheduleStreams {
[self.inputStream removeFromRunLoop:NSRunLoop.currentRunLoop forMode:NSRunLoopCommonModes];
[self.outputStream removeFromRunLoop:NSRunLoop.currentRunLoop forMode:NSRunLoopCommonModes];
}
@end

View File

@ -0,0 +1,23 @@
//
// FlutterSocketConnectionFrameReader.h
// RCTWebRTC
//
// Created by Alex-Dan Bumbu on 06/01/2021.
//
#import <AVFoundation/AVFoundation.h>
#import <WebRTC/RTCVideoCapturer.h>
NS_ASSUME_NONNULL_BEGIN
@class FlutterSocketConnection;
@interface FlutterSocketConnectionFrameReader: RTCVideoCapturer
- (instancetype)initWithDelegate:(__weak id<RTCVideoCapturerDelegate>)delegate;
- (void)startCaptureWithConnection:(nonnull FlutterSocketConnection *)connection;
- (void)stopCapture;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,251 @@
//
// FlutterSocketConnectionFrameReader.m
// RCTWebRTC
//
// Created by Alex-Dan Bumbu on 06/01/2021.
//
#include <mach/mach_time.h>
#import <WebRTC/RTCCVPixelBuffer.h>
#import <WebRTC/RTCVideoFrameBuffer.h>
#import <ReplayKit/ReplayKit.h>
#import "FlutterSocketConnectionFrameReader.h"
#import "FlutterSocketConnection.h"
const NSUInteger kMaxReadLength = 10 * 1024;
@interface Message: NSObject
@property (nonatomic, assign, readonly) CVImageBufferRef imageBuffer;
@property (nonatomic, copy, nullable) void (^didComplete)(BOOL succes, Message *message);
- (NSInteger)appendBytes: (UInt8 *)buffer length:(NSUInteger)length;
@end
@interface Message ()
@property (nonatomic, assign) CVImageBufferRef imageBuffer;
@property (nonatomic, assign) int imageOrientation;
@property (nonatomic, assign) CFHTTPMessageRef framedMessage;
@end
@implementation Message
- (instancetype)init {
self = [super init];
if (self) {
self.imageBuffer = NULL;
}
return self;
}
- (void)dealloc {
CVPixelBufferRelease(_imageBuffer);
}
/** Returns the amount of missing bytes to complete the message, or -1 when not enough bytes were provided to compute the message length */
- (NSInteger)appendBytes: (UInt8 *)buffer length:(NSUInteger)length {
if (!_framedMessage) {
_framedMessage = CFHTTPMessageCreateEmpty(kCFAllocatorDefault, false);
}
CFHTTPMessageAppendBytes(_framedMessage, buffer, length);
if (!CFHTTPMessageIsHeaderComplete(_framedMessage)) {
return -1;
}
NSInteger contentLength = [CFBridgingRelease(CFHTTPMessageCopyHeaderFieldValue(_framedMessage, (__bridge CFStringRef)@"Content-Length")) integerValue];
NSInteger bodyLength = (NSInteger)[CFBridgingRelease(CFHTTPMessageCopyBody(_framedMessage)) length];
NSInteger missingBytesCount = contentLength - bodyLength;
if (missingBytesCount == 0) {
BOOL success = [self unwrapMessage:self.framedMessage];
self.didComplete(success, self);
CFRelease(self.framedMessage);
self.framedMessage = NULL;
}
return missingBytesCount;
}
// MARK: Private Methods
- (CIContext *)imageContext {
// Initializing a CIContext object is costly, so we use a singleton instead
static CIContext *imageContext = nil;
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
imageContext = [[CIContext alloc] initWithOptions:nil];
});
return imageContext;
}
- (BOOL)unwrapMessage:(CFHTTPMessageRef)framedMessage {
size_t width = [CFBridgingRelease(CFHTTPMessageCopyHeaderFieldValue(_framedMessage, (__bridge CFStringRef)@"Buffer-Width")) integerValue];
size_t height = [CFBridgingRelease(CFHTTPMessageCopyHeaderFieldValue(_framedMessage, (__bridge CFStringRef)@"Buffer-Height")) integerValue];
_imageOrientation = [CFBridgingRelease(CFHTTPMessageCopyHeaderFieldValue(_framedMessage, (__bridge CFStringRef)@"Buffer-Orientation")) intValue];
NSData *messageData = CFBridgingRelease(CFHTTPMessageCopyBody(_framedMessage));
// Copy the pixel buffer
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, width, height, kCVPixelFormatType_32BGRA, NULL, &_imageBuffer);
if (status != kCVReturnSuccess) {
NSLog(@"CVPixelBufferCreate failed");
return false;
}
[self copyImageData:messageData toPixelBuffer:&_imageBuffer];
return true;
}
- (void)copyImageData:(NSData *)data toPixelBuffer:(CVPixelBufferRef*)pixelBuffer {
CVPixelBufferLockBaseAddress(*pixelBuffer, 0);
CIImage *image = [CIImage imageWithData:data];
[self.imageContext render:image toCVPixelBuffer:*pixelBuffer];
CVPixelBufferUnlockBaseAddress(*pixelBuffer, 0);
}
@end
// MARK: -
@interface FlutterSocketConnectionFrameReader () <NSStreamDelegate>
@property (nonatomic, strong) FlutterSocketConnection *connection;
@property (nonatomic, strong) Message *message;
@end
@implementation FlutterSocketConnectionFrameReader {
mach_timebase_info_data_t _timebaseInfo;
NSInteger _readLength;
int64_t _startTimeStampNs;
}
- (instancetype)initWithDelegate:(__weak id<RTCVideoCapturerDelegate>)delegate {
self = [super initWithDelegate:delegate];
if (self) {
mach_timebase_info(&_timebaseInfo);
}
return self;
}
- (void)startCaptureWithConnection:(FlutterSocketConnection *)connection {
_startTimeStampNs = -1;
self.connection = connection;
self.message = nil;
[self.connection openWithStreamDelegate:self];
}
- (void)stopCapture {
[self.connection close];
}
// MARK: Private Methods
- (void)readBytesFromStream:(NSInputStream *)stream {
if (!stream.hasBytesAvailable) {
return;
}
if (!self.message) {
self.message = [[Message alloc] init];
_readLength = kMaxReadLength;
__weak __typeof__(self) weakSelf = self;
self.message.didComplete = ^(BOOL success, Message *message) {
if (success) {
[weakSelf didCaptureVideoFrame:message.imageBuffer withOrientation:message.imageOrientation];
}
weakSelf.message = nil;
};
}
uint8_t buffer[_readLength];
NSInteger numberOfBytesRead = [stream read:buffer maxLength:_readLength];
if (numberOfBytesRead < 0) {
NSLog(@"error reading bytes from stream");
return;
}
_readLength = [self.message appendBytes:buffer length:numberOfBytesRead];
if (_readLength == -1 || _readLength > kMaxReadLength) {
_readLength = kMaxReadLength;
}
}
- (void)didCaptureVideoFrame:(CVPixelBufferRef)pixelBuffer
withOrientation:(CGImagePropertyOrientation) orientation {
int64_t currentTime = mach_absolute_time();
int64_t currentTimeStampNs = currentTime * _timebaseInfo.numer / _timebaseInfo.denom;
if (_startTimeStampNs < 0) {
_startTimeStampNs = currentTimeStampNs;
}
RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer: pixelBuffer];
int64_t frameTimeStampNs = currentTimeStampNs - _startTimeStampNs;
RTCVideoRotation rotation;
switch (orientation) {
case kCGImagePropertyOrientationLeft:
rotation = RTCVideoRotation_90;
break;
case kCGImagePropertyOrientationDown:
rotation = RTCVideoRotation_180;
break;
case kCGImagePropertyOrientationRight:
rotation = RTCVideoRotation_270;
break;
default:
rotation = RTCVideoRotation_0;
break;
}
RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithBuffer: rtcPixelBuffer
rotation: rotation
timeStampNs: frameTimeStampNs];
[self.delegate capturer:self didCaptureVideoFrame:videoFrame];
}
@end
@implementation FlutterSocketConnectionFrameReader (NSStreamDelegate)
- (void)stream:(NSStream *)aStream handleEvent:(NSStreamEvent)eventCode {
switch (eventCode) {
case NSStreamEventOpenCompleted:
NSLog(@"server stream open completed");
break;
case NSStreamEventHasBytesAvailable:
[self readBytesFromStream: (NSInputStream *)aStream];
break;
case NSStreamEventEndEncountered:
NSLog(@"server stream end encountered");
[self stopCapture];
break;
case NSStreamEventErrorOccurred:
NSLog(@"server stream error encountered: %@", aStream.streamError.localizedDescription);
break;
default:
break;
}
}
@end

View File

@ -3,7 +3,7 @@
#
Pod::Spec.new do |s|
s.name = 'flutter_webrtc'
s.version = '0.7.1'
s.version = '0.8.0'
s.summary = 'Flutter WebRTC plugin for iOS.'
s.description = <<-DESC
A new flutter plugin project.