Code format (#1198)

* Code format and add format.sh

* Update format command

Only format files end with mm
This commit is contained in:
xiaowei guan
2022-12-08 14:30:00 +08:00
committed by GitHub
parent c23220a709
commit 8026ccfbaf
50 changed files with 4338 additions and 4161 deletions

View File

@ -52,7 +52,8 @@ class FlutterWebRTCPluginImpl : public FlutterWebRTCPlugin {
std::unique_ptr<MethodResult> result) {
// handle method call and forward to webrtc native sdk.
auto method_call_proxy = MethodCallProxy::Create(method_call);
webrtc_->HandleMethodCall(*method_call_proxy.get(), MethodResultProxy::Create(std::move(result)));
webrtc_->HandleMethodCall(*method_call_proxy.get(),
MethodResultProxy::Create(std::move(result)));
}
private:
@ -65,10 +66,13 @@ class FlutterWebRTCPluginImpl : public FlutterWebRTCPlugin {
} // namespace flutter_webrtc_plugin
#if defined(_WINDOWS)
void FlutterWebRTCPluginRegisterWithRegistrar( FlutterDesktopPluginRegistrarRef registrar){
void FlutterWebRTCPluginRegisterWithRegistrar(
FlutterDesktopPluginRegistrarRef registrar) {
#else
void flutter_web_r_t_c_plugin_register_with_registrar(FlPluginRegistrar* registrar) {
void flutter_web_r_t_c_plugin_register_with_registrar(
FlPluginRegistrar* registrar) {
#endif
static auto *plugin_registrar = new flutter::PluginRegistrar(registrar);
flutter_webrtc_plugin::FlutterWebRTCPluginImpl::RegisterWithRegistrar(plugin_registrar);
static auto* plugin_registrar = new flutter::PluginRegistrar(registrar);
flutter_webrtc_plugin::FlutterWebRTCPluginImpl::RegisterWithRegistrar(
plugin_registrar);
}

View File

@ -10,9 +10,9 @@
#include <flutter/standard_method_codec.h>
#include <flutter/texture_registrar.h>
#include <string>
#include <memory>
#include <list>
#include <memory>
#include <string>
typedef flutter::EncodableValue EncodableValue;
typedef flutter::EncodableMap EncodableMap;

View File

@ -131,9 +131,10 @@ class FlutterPeerConnection {
std::string rtpTransceiverId,
std::unique_ptr<MethodResultProxy> resulte);
void RtpTransceiverGetCurrentDirection(RTCPeerConnection* pc,
std::string rtpTransceiverId,
std::unique_ptr<MethodResultProxy> resulte);
void RtpTransceiverGetCurrentDirection(
RTCPeerConnection* pc,
std::string rtpTransceiverId,
std::unique_ptr<MethodResultProxy> resulte);
void SetConfiguration(RTCPeerConnection* pc,
const EncodableMap& configuration,
@ -151,7 +152,8 @@ class FlutterPeerConnection {
std::string direction,
std::unique_ptr<MethodResultProxy> resulte);
void GetSenders(RTCPeerConnection* pc, std::unique_ptr<MethodResultProxy> resulte);
void GetSenders(RTCPeerConnection* pc,
std::unique_ptr<MethodResultProxy> resulte);
void AddIceCandidate(RTCIceCandidate* candidate,
RTCPeerConnection* pc,

View File

@ -39,7 +39,8 @@ typedef struct {
FLUTTER_PLUGIN_EXPORT GType flutter_webrtc_plugin_get_type();
FLUTTER_PLUGIN_EXPORT void flutter_web_r_t_c_plugin_register_with_registrar(FlPluginRegistrar* registrar);
FLUTTER_PLUGIN_EXPORT void flutter_web_r_t_c_plugin_register_with_registrar(
FlPluginRegistrar* registrar);
G_END_DECLS

View File

@ -13,12 +13,12 @@
#include "rtc_audio_device.h"
#include "rtc_desktop_device.h"
#include "rtc_dtmf_sender.h"
#include "rtc_media_stream.h"
#include "rtc_media_track.h"
#include "rtc_mediaconstraints.h"
#include "rtc_peerconnection.h"
#include "rtc_peerconnection_factory.h"
#include "rtc_dtmf_sender.h"
#include "rtc_video_device.h"
#include "uuidxx.h"
@ -60,8 +60,9 @@ class FlutterWebRTCBase {
void RemovePeerConnectionObserversForId(const std::string& id);
scoped_refptr<RTCMediaStream> MediaStreamForId(const std::string& id,
std::string peerConnectionId = std::string());
scoped_refptr<RTCMediaStream> MediaStreamForId(
const std::string& id,
std::string peerConnectionId = std::string());
void RemoveStreamForId(const std::string& id);

View File

@ -68,10 +68,11 @@ void FlutterDataChannel::CreateDataChannel(
result->Success(EncodableValue(params));
}
void FlutterDataChannel::DataChannelSend(RTCDataChannel* data_channel,
const std::string& type,
const EncodableValue& data,
std::unique_ptr<MethodResultProxy> result) {
void FlutterDataChannel::DataChannelSend(
RTCDataChannel* data_channel,
const std::string& type,
const EncodableValue& data,
std::unique_ptr<MethodResultProxy> result) {
bool is_binary = type == "binary";
if (is_binary && TypeIs<std::vector<uint8_t>>(data)) {
std::vector<uint8_t> buffer = GetValue<std::vector<uint8_t>>(data);

View File

@ -14,8 +14,9 @@ FlutterMediaStream::FlutterMediaStream(FlutterWebRTCBase* base) : base_(base) {
});
}
void FlutterMediaStream::GetUserMedia(const EncodableMap& constraints,
std::unique_ptr<MethodResultProxy> result) {
void FlutterMediaStream::GetUserMedia(
const EncodableMap& constraints,
std::unique_ptr<MethodResultProxy> result) {
std::string uuid = base_->GenerateUUID();
scoped_refptr<RTCMediaStream> stream =
base_->factory_->CreateStream(uuid.c_str());

View File

@ -271,9 +271,10 @@ void FlutterPeerConnection::RTCPeerConnectionDispose(
result->Success();
}
void FlutterPeerConnection::CreateOffer(const EncodableMap& constraintsMap,
RTCPeerConnection* pc,
std::unique_ptr<MethodResultProxy> result) {
void FlutterPeerConnection::CreateOffer(
const EncodableMap& constraintsMap,
RTCPeerConnection* pc,
std::unique_ptr<MethodResultProxy> result) {
scoped_refptr<RTCMediaConstraints> constraints =
base_->ParseMediaConstraints(constraintsMap);
std::shared_ptr<MethodResultProxy> result_ptr(result.release());
@ -290,9 +291,10 @@ void FlutterPeerConnection::CreateOffer(const EncodableMap& constraintsMap,
constraints);
}
void FlutterPeerConnection::CreateAnswer(const EncodableMap& constraintsMap,
RTCPeerConnection* pc,
std::unique_ptr<MethodResultProxy> result) {
void FlutterPeerConnection::CreateAnswer(
const EncodableMap& constraintsMap,
RTCPeerConnection* pc,
std::unique_ptr<MethodResultProxy> result) {
scoped_refptr<RTCMediaConstraints> constraints =
base_->ParseMediaConstraints(constraintsMap);
std::shared_ptr<MethodResultProxy> result_ptr(result.release());
@ -725,8 +727,9 @@ void FlutterPeerConnection::RtpTransceiverSetDirection(
}
}
void FlutterPeerConnection::GetSenders(RTCPeerConnection* pc,
std::unique_ptr<MethodResultProxy> result) {
void FlutterPeerConnection::GetSenders(
RTCPeerConnection* pc,
std::unique_ptr<MethodResultProxy> result) {
std::shared_ptr<MethodResultProxy> result_ptr(result.release());
EncodableMap map;
@ -752,57 +755,58 @@ void FlutterPeerConnection::AddIceCandidate(
EncodableMap statsToMap(const scoped_refptr<MediaRTCStats>& stats) {
EncodableMap report_map;
report_map[EncodableValue("id")] = EncodableValue(stats->id().std_string());
report_map[EncodableValue("type")] = EncodableValue(stats->type().std_string());
report_map[EncodableValue("type")] =
EncodableValue(stats->type().std_string());
report_map[EncodableValue("timestamp")] =
EncodableValue(double(stats->timestamp_us()));
EncodableMap values;
auto members = stats->Members();
for (int i = 0; i < members.size(); i++) {
auto member = members[i];
if(!member->IsDefined()) {
if (!member->IsDefined()) {
continue;
}
switch (member->GetType())
{
case RTCStatsMember::Type::kBool:
values[EncodableValue(member->GetName().std_string())] =
EncodableValue(member->ValueBool());
break;
case RTCStatsMember::Type::kInt32:
values[EncodableValue(member->GetName().std_string())] =
EncodableValue(member->ValueInt32());
break;
case RTCStatsMember::Type::kUint32:
values[EncodableValue(member->GetName().std_string())] =
EncodableValue((int64_t)member->ValueUint32());
break;
case RTCStatsMember::Type::kInt64:
values[EncodableValue(member->GetName().std_string())] =
EncodableValue(member->ValueInt64());
break;
case RTCStatsMember::Type::kUint64:
values[EncodableValue(member->GetName().std_string())] =
EncodableValue((int64_t)member->ValueUint64());
break;
case RTCStatsMember::Type::kDouble:
values[EncodableValue(member->GetName().std_string())] =
EncodableValue(member->ValueDouble());
break;
case RTCStatsMember::Type::kString:
values[EncodableValue(member->GetName().std_string())] =
EncodableValue(member->ValueString().std_string());
break;
default:
break;
switch (member->GetType()) {
case RTCStatsMember::Type::kBool:
values[EncodableValue(member->GetName().std_string())] =
EncodableValue(member->ValueBool());
break;
case RTCStatsMember::Type::kInt32:
values[EncodableValue(member->GetName().std_string())] =
EncodableValue(member->ValueInt32());
break;
case RTCStatsMember::Type::kUint32:
values[EncodableValue(member->GetName().std_string())] =
EncodableValue((int64_t)member->ValueUint32());
break;
case RTCStatsMember::Type::kInt64:
values[EncodableValue(member->GetName().std_string())] =
EncodableValue(member->ValueInt64());
break;
case RTCStatsMember::Type::kUint64:
values[EncodableValue(member->GetName().std_string())] =
EncodableValue((int64_t)member->ValueUint64());
break;
case RTCStatsMember::Type::kDouble:
values[EncodableValue(member->GetName().std_string())] =
EncodableValue(member->ValueDouble());
break;
case RTCStatsMember::Type::kString:
values[EncodableValue(member->GetName().std_string())] =
EncodableValue(member->ValueString().std_string());
break;
default:
break;
}
}
report_map[EncodableValue("values")] = EncodableValue(values);
return report_map;
}
void FlutterPeerConnection::GetStats(const std::string& track_id,
RTCPeerConnection* pc,
std::unique_ptr<MethodResultProxy> result) {
void FlutterPeerConnection::GetStats(
const std::string& track_id,
RTCPeerConnection* pc,
std::unique_ptr<MethodResultProxy> result) {
std::shared_ptr<MethodResultProxy> result_ptr(result.release());
scoped_refptr<RTCMediaTrack> track = base_->MediaTracksForId(track_id);
if (track != nullptr) {
@ -849,23 +853,23 @@ void FlutterPeerConnection::GetStats(const std::string& track_id,
return;
}
}
if(!found) {
if (!found) {
result_ptr->Error("GetStats", "Track not found");
}
} else {
pc->GetStats(
[result_ptr](const vector<scoped_refptr<MediaRTCStats>> reports) {
std::vector<EncodableValue> list;
for (int i = 0; i < reports.size(); i++) {
list.push_back(EncodableValue(statsToMap(reports[i])));
}
EncodableMap params;
params[EncodableValue("stats")] = EncodableValue(list);
result_ptr->Success(EncodableValue(params));
},
std::vector<EncodableValue> list;
for (int i = 0; i < reports.size(); i++) {
list.push_back(EncodableValue(statsToMap(reports[i])));
}
EncodableMap params;
params[EncodableValue("stats")] = EncodableValue(list);
result_ptr->Success(EncodableValue(params));
},
[result_ptr](const char* error) {
result_ptr->Error("GetStats", error);
});
});
}
}
@ -899,10 +903,11 @@ void FlutterPeerConnection::MediaStreamRemoveTrack(
result_ptr->Success();
}
void FlutterPeerConnection::AddTrack(RTCPeerConnection* pc,
scoped_refptr<RTCMediaTrack> track,
std::list<std::string> streamIds,
std::unique_ptr<MethodResultProxy> result) {
void FlutterPeerConnection::AddTrack(
RTCPeerConnection* pc,
scoped_refptr<RTCMediaTrack> track,
std::list<std::string> streamIds,
std::unique_ptr<MethodResultProxy> result) {
std::shared_ptr<MethodResultProxy> result_ptr(result.release());
std::string kind = track->kind().std_string();
std::vector<string> streamids;
@ -938,9 +943,10 @@ FlutterPeerConnection::GetRtpSenderById(RTCPeerConnection* pc, std::string id) {
return result;
}
void FlutterPeerConnection::RemoveTrack(RTCPeerConnection* pc,
std::string senderId,
std::unique_ptr<MethodResultProxy> result) {
void FlutterPeerConnection::RemoveTrack(
RTCPeerConnection* pc,
std::string senderId,
std::unique_ptr<MethodResultProxy> result) {
auto sender = GetRtpSenderById(pc, senderId);
if (nullptr == sender.get()) {
result->Error("RemoveTrack", "not find RtpSender ");

View File

@ -75,7 +75,7 @@ void FlutterScreenCapture::UpdateDesktopSources(
return;
}
auto map = EncodableMap();
map[EncodableValue("result")] = true;
map[EncodableValue("result")] = true;
result->Success(EncodableValue(map));
}
@ -135,23 +135,23 @@ void FlutterScreenCapture::OnMediaSourceThumbnailChanged(
}
void FlutterScreenCapture::OnStart(scoped_refptr<RTCDesktopCapturer> capturer) {
//std::cout << " OnStart: " << capturer->source()->id().std_string()
// std::cout << " OnStart: " << capturer->source()->id().std_string()
// << std::endl;
}
void FlutterScreenCapture::OnPaused(
scoped_refptr<RTCDesktopCapturer> capturer) {
//std::cout << " OnPaused: " << capturer->source()->id().std_string()
// std::cout << " OnPaused: " << capturer->source()->id().std_string()
// << std::endl;
}
void FlutterScreenCapture::OnStop(scoped_refptr<RTCDesktopCapturer> capturer) {
//std::cout << " OnStop: " << capturer->source()->id().std_string()
// std::cout << " OnStop: " << capturer->source()->id().std_string()
// << std::endl;
}
void FlutterScreenCapture::OnError(scoped_refptr<RTCDesktopCapturer> capturer) {
//std::cout << " OnError: " << capturer->source()->id().std_string()
// std::cout << " OnError: " << capturer->source()->id().std_string()
// << std::endl;
}
@ -180,7 +180,7 @@ void FlutterScreenCapture::GetDisplayMedia(
const EncodableMap& constraints,
std::unique_ptr<MethodResultProxy> result) {
std::string source_id = "0";
//DesktopType source_type = kScreen;
// DesktopType source_type = kScreen;
double fps = 30.0;
const EncodableMap video = findMap(constraints, "video");
@ -193,7 +193,7 @@ void FlutterScreenCapture::GetDisplayMedia(
return;
}
if (source_id != "0") {
//source_type = DesktopType::kWindow;
// source_type = DesktopType::kWindow;
}
}
const EncodableMap mandatory = findMap(video, "mandatory");

View File

@ -123,9 +123,10 @@ void FlutterVideoRendererManager::CreateVideoRendererTexture(
result->Success(EncodableValue(params));
}
void FlutterVideoRendererManager::SetMediaStream(int64_t texture_id,
const std::string& stream_id,
const std::string& peerConnectionId) {
void FlutterVideoRendererManager::SetMediaStream(
int64_t texture_id,
const std::string& stream_id,
const std::string& peerConnectionId) {
scoped_refptr<RTCMediaStream> stream =
base_->MediaStreamForId(stream_id, peerConnectionId);

View File

@ -15,8 +15,9 @@ FlutterWebRTC::FlutterWebRTC(FlutterWebRTCPlugin* plugin)
FlutterWebRTC::~FlutterWebRTC() {}
void FlutterWebRTC::HandleMethodCall(const MethodCallProxy& method_call,
std::unique_ptr<MethodResultProxy> result) {
void FlutterWebRTC::HandleMethodCall(
const MethodCallProxy& method_call,
std::unique_ptr<MethodResultProxy> result) {
if (method_call.method_name().compare("createPeerConnection") == 0) {
if (!method_call.arguments()) {
result->Error("Bad Arguments", "Null arguments received");
@ -260,10 +261,10 @@ void FlutterWebRTC::HandleMethodCall(const MethodCallProxy& method_call,
SdpParseError error;
int sdpMLineIndex = findInt(constraints, "sdpMLineIndex");
scoped_refptr<RTCIceCandidate> rtc_candidate =
RTCIceCandidate::Create(findString(constraints, "candidate").c_str(),
findString(constraints, "sdpMid").c_str(),
sdpMLineIndex == -1 ? 0 : sdpMLineIndex, &error);
scoped_refptr<RTCIceCandidate> rtc_candidate = RTCIceCandidate::Create(
findString(constraints, "candidate").c_str(),
findString(constraints, "sdpMid").c_str(),
sdpMLineIndex == -1 ? 0 : sdpMLineIndex, &error);
AddIceCandidate(rtc_candidate.get(), pc, std::move(result));
} else if (method_call.method_name().compare("getStats") == 0) {
@ -274,11 +275,10 @@ void FlutterWebRTC::HandleMethodCall(const MethodCallProxy& method_call,
const EncodableMap params =
GetValue<EncodableMap>(*method_call.arguments());
const std::string peerConnectionId = findString(params, "peerConnectionId");
const std::string track_id = findString(params, "trackId");
const std::string track_id = findString(params, "trackId");
RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId);
if (pc == nullptr) {
result->Error("getStatsFailed",
"getStats() peerConnection is null");
result->Error("getStatsFailed", "getStats() peerConnection is null");
return;
}
GetStats(track_id, pc, std::move(result));
@ -961,16 +961,14 @@ void FlutterWebRTC::HandleMethodCall(const MethodCallProxy& method_call,
RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId);
if (pc == nullptr) {
result->Error("canInsertDtmf",
"canInsertDtmf() peerConnection is null");
result->Error("canInsertDtmf", "canInsertDtmf() peerConnection is null");
return;
}
auto rtpSender = GetRtpSenderById(pc, rtpSenderId);
if (rtpSender == nullptr) {
result->Error("sendDtmf",
"sendDtmf() rtpSender is null");
result->Error("sendDtmf", "sendDtmf() rtpSender is null");
return;
}
auto dtmfSender = rtpSender->dtmf_sender();
@ -989,19 +987,17 @@ void FlutterWebRTC::HandleMethodCall(const MethodCallProxy& method_call,
const std::string tone = findString(params, "tone");
int duration = findInt(params, "duration");
int gap = findInt(params, "gap");
RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId);
if (pc == nullptr) {
result->Error("sendDtmf",
"sendDtmf() peerConnection is null");
result->Error("sendDtmf", "sendDtmf() peerConnection is null");
return;
}
auto rtpSender = GetRtpSenderById(pc, rtpSenderId);
if (rtpSender == nullptr) {
result->Error("sendDtmf",
"sendDtmf() rtpSender is null");
result->Error("sendDtmf", "sendDtmf() rtpSender is null");
return;
}

View File

@ -87,7 +87,7 @@ void FlutterWebRTCBase::RemovePeerConnectionObserversForId(
scoped_refptr<RTCMediaStream> FlutterWebRTCBase::MediaStreamForId(
const std::string& id,
std::string peerConnectionId/* = std::string()*/) {
std::string peerConnectionId /* = std::string()*/) {
auto it = local_streams_.find(id);
if (it != local_streams_.end()) {
return (*it).second;

View File

@ -5,100 +5,110 @@
@implementation AudioUtils
+ (void)ensureAudioSessionWithRecording:(BOOL)recording {
RTCAudioSession *session = [RTCAudioSession sharedInstance];
// we also need to set default WebRTC audio configuration, since it may be activated after
// this method is called
RTCAudioSessionConfiguration *config = [RTCAudioSessionConfiguration webRTCConfiguration];
// require audio session to be either PlayAndRecord or MultiRoute
if (recording && session.category != AVAudioSessionCategoryPlayAndRecord &&
session.category != AVAudioSessionCategoryMultiRoute) {
config.category = AVAudioSessionCategoryPlayAndRecord;
config.categoryOptions = AVAudioSessionCategoryOptionAllowBluetooth |
AVAudioSessionCategoryOptionAllowBluetoothA2DP;
config.mode = AVAudioSessionModeVoiceChat;
[session lockForConfiguration];
[session setCategory:config.category
withOptions:config.categoryOptions
error:nil];
[session setMode:config.mode error:nil];
[session unlockForConfiguration];
} else if (!recording || (session.category == AVAudioSessionCategoryAmbient
|| session.category == AVAudioSessionCategorySoloAmbient)) {
config.category = AVAudioSessionCategoryPlayback;
config.categoryOptions = 0;
config.mode = AVAudioSessionModeDefault;
// upgrade from ambient if needed
[session lockForConfiguration];
[session setCategory:config.category
withOptions:config.categoryOptions
error:nil];
[session setMode:config.mode error:nil];
[session unlockForConfiguration];
}
RTCAudioSession* session = [RTCAudioSession sharedInstance];
// we also need to set default WebRTC audio configuration, since it may be activated after
// this method is called
RTCAudioSessionConfiguration* config = [RTCAudioSessionConfiguration webRTCConfiguration];
// require audio session to be either PlayAndRecord or MultiRoute
if (recording && session.category != AVAudioSessionCategoryPlayAndRecord &&
session.category != AVAudioSessionCategoryMultiRoute) {
config.category = AVAudioSessionCategoryPlayAndRecord;
config.categoryOptions =
AVAudioSessionCategoryOptionAllowBluetooth | AVAudioSessionCategoryOptionAllowBluetoothA2DP;
config.mode = AVAudioSessionModeVoiceChat;
[session lockForConfiguration];
[session setCategory:config.category withOptions:config.categoryOptions error:nil];
[session setMode:config.mode error:nil];
[session unlockForConfiguration];
} else if (!recording || (session.category == AVAudioSessionCategoryAmbient ||
session.category == AVAudioSessionCategorySoloAmbient)) {
config.category = AVAudioSessionCategoryPlayback;
config.categoryOptions = 0;
config.mode = AVAudioSessionModeDefault;
// upgrade from ambient if needed
[session lockForConfiguration];
[session setCategory:config.category withOptions:config.categoryOptions error:nil];
[session setMode:config.mode error:nil];
[session unlockForConfiguration];
}
}
+ (BOOL)selectAudioInput:(AVAudioSessionPort)type {
RTCAudioSession *rtcSession = [RTCAudioSession sharedInstance];
AVAudioSessionPortDescription *inputPort = nil;
for (AVAudioSessionPortDescription *port in rtcSession.session.availableInputs) {
if ([port.portType isEqualToString:type]) {
inputPort = port;
break;
}
RTCAudioSession* rtcSession = [RTCAudioSession sharedInstance];
AVAudioSessionPortDescription* inputPort = nil;
for (AVAudioSessionPortDescription* port in rtcSession.session.availableInputs) {
if ([port.portType isEqualToString:type]) {
inputPort = port;
break;
}
if (inputPort != nil) {
NSError *errOut = nil;
[rtcSession lockForConfiguration];
[rtcSession setPreferredInput:inputPort error:&errOut];
[rtcSession unlockForConfiguration];
if(errOut != nil) {
return NO;
}
return YES;
}
if (inputPort != nil) {
NSError* errOut = nil;
[rtcSession lockForConfiguration];
[rtcSession setPreferredInput:inputPort error:&errOut];
[rtcSession unlockForConfiguration];
if (errOut != nil) {
return NO;
}
return NO;
return YES;
}
return NO;
}
+ (void)setSpeakerphoneOn:(BOOL)enable {
RTCAudioSession *session = [RTCAudioSession sharedInstance];
RTCAudioSessionConfiguration *config = [RTCAudioSessionConfiguration webRTCConfiguration];
[session lockForConfiguration];
NSError *error = nil;
if(!enable) {
[session setMode:config.mode error:&error];
BOOL success = [session setCategory:config.category
withOptions:AVAudioSessionCategoryOptionAllowAirPlay|AVAudioSessionCategoryOptionAllowBluetoothA2DP|AVAudioSessionCategoryOptionAllowBluetooth
error:&error];
RTCAudioSession* session = [RTCAudioSession sharedInstance];
RTCAudioSessionConfiguration* config = [RTCAudioSessionConfiguration webRTCConfiguration];
[session lockForConfiguration];
NSError* error = nil;
if (!enable) {
[session setMode:config.mode error:&error];
BOOL success = [session setCategory:config.category
withOptions:AVAudioSessionCategoryOptionAllowAirPlay |
AVAudioSessionCategoryOptionAllowBluetoothA2DP |
AVAudioSessionCategoryOptionAllowBluetooth
error:&error];
success = [session.session overrideOutputAudioPort:kAudioSessionOverrideAudioRoute_None error:&error];
if (!success) NSLog(@"Port override failed due to: %@", error);
success = [session.session overrideOutputAudioPort:kAudioSessionOverrideAudioRoute_None
error:&error];
if (!success)
NSLog(@"Port override failed due to: %@", error);
success = [session setActive:YES error:&error];
if (!success) NSLog(@"Audio session override failed: %@", error);
else NSLog(@"AudioSession override via Earpiece/Headset is successful ");
success = [session setActive:YES error:&error];
if (!success)
NSLog(@"Audio session override failed: %@", error);
else
NSLog(@"AudioSession override via Earpiece/Headset is successful ");
} else {
[session setMode:config.mode error:&error];
BOOL success = [session setCategory:config.category
withOptions:AVAudioSessionCategoryOptionDefaultToSpeaker|AVAudioSessionCategoryOptionAllowAirPlay|AVAudioSessionCategoryOptionAllowBluetoothA2DP|AVAudioSessionCategoryOptionAllowBluetooth
error:&error];
success = [session overrideOutputAudioPort:kAudioSessionOverrideAudioRoute_Speaker error:&error];
if (!success) NSLog(@"Port override failed due to: %@", error);
} else {
[session setMode:config.mode error:&error];
BOOL success = [session setCategory:config.category
withOptions:AVAudioSessionCategoryOptionDefaultToSpeaker |
AVAudioSessionCategoryOptionAllowAirPlay |
AVAudioSessionCategoryOptionAllowBluetoothA2DP |
AVAudioSessionCategoryOptionAllowBluetooth
error:&error];
success = [session setActive:YES error:&error];
if (!success) NSLog(@"Audio session override failed: %@", error);
else NSLog(@"AudioSession override via Loudspeaker is successful ");
}
[session unlockForConfiguration];
success = [session overrideOutputAudioPort:kAudioSessionOverrideAudioRoute_Speaker
error:&error];
if (!success)
NSLog(@"Port override failed due to: %@", error);
success = [session setActive:YES error:&error];
if (!success)
NSLog(@"Audio session override failed: %@", error);
else
NSLog(@"AudioSession override via Loudspeaker is successful ");
}
[session unlockForConfiguration];
}
+ (void)deactiveRtcAudioSession {
NSError *error = nil;
[[AVAudioSession sharedInstance] setActive:NO withOptions:AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation error:&error];
NSError* error = nil;
[[AVAudioSession sharedInstance] setActive:NO
withOptions:AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation
error:&error];
}
@end

View File

@ -2,12 +2,12 @@
#if TARGET_OS_IPHONE
@interface FlutterRPScreenRecorder : RTCVideoCapturer
-(void)startCapture;
- (void)startCapture;
// Stops the capture session asynchronously and notifies callback on completion.
- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler;
-(void)stopCapture;
- (void)stopCapture;
@end
#endif

View File

@ -2,90 +2,92 @@
#if TARGET_OS_IPHONE
#import <ReplayKit/ReplayKit.h>
//See: https://developer.apple.com/videos/play/wwdc2017/606/
// See: https://developer.apple.com/videos/play/wwdc2017/606/
@implementation FlutterRPScreenRecorder {
RPScreenRecorder *screenRecorder;
RTCVideoSource *source;
RPScreenRecorder* screenRecorder;
RTCVideoSource* source;
}
- (instancetype)initWithDelegate:(__weak id<RTCVideoCapturerDelegate>)delegate {
source = delegate;
return [super initWithDelegate:delegate];
source = delegate;
return [super initWithDelegate:delegate];
}
-(void)startCapture
{
if(screenRecorder == NULL)
screenRecorder = [RPScreenRecorder sharedRecorder];
[screenRecorder setMicrophoneEnabled:NO];
- (void)startCapture {
if (screenRecorder == NULL)
screenRecorder = [RPScreenRecorder sharedRecorder];
if (![screenRecorder isAvailable]) {
NSLog(@"FlutterRPScreenRecorder.startCapture: Screen recorder is not available!");
return;
}
if (@available(iOS 11.0, *)) {
[screenRecorder startCaptureWithHandler:^(CMSampleBufferRef _Nonnull sampleBuffer, RPSampleBufferType bufferType, NSError * _Nullable error) {
if (bufferType == RPSampleBufferTypeVideo) {// We want video only now
[self handleSourceBuffer:sampleBuffer sampleType:bufferType];
}
} completionHandler:^(NSError * _Nullable error) {
if (error != nil)
NSLog(@"!!! startCaptureWithHandler/completionHandler %@ !!!", error);
[screenRecorder setMicrophoneEnabled:NO];
if (![screenRecorder isAvailable]) {
NSLog(@"FlutterRPScreenRecorder.startCapture: Screen recorder is not available!");
return;
}
if (@available(iOS 11.0, *)) {
[screenRecorder
startCaptureWithHandler:^(CMSampleBufferRef _Nonnull sampleBuffer,
RPSampleBufferType bufferType, NSError* _Nullable error) {
if (bufferType == RPSampleBufferTypeVideo) { // We want video only now
[self handleSourceBuffer:sampleBuffer sampleType:bufferType];
}
}
completionHandler:^(NSError* _Nullable error) {
if (error != nil)
NSLog(@"!!! startCaptureWithHandler/completionHandler %@ !!!", error);
}];
} else {
// Fallback on earlier versions
NSLog(@"FlutterRPScreenRecorder.startCapture: Screen recorder is not available in versions lower than iOS 11 !");
}
} else {
// Fallback on earlier versions
NSLog(@"FlutterRPScreenRecorder.startCapture: Screen recorder is not available in versions "
@"lower than iOS 11 !");
}
}
-(void)stopCapture
{
if (@available(iOS 11.0, *)) {
[screenRecorder stopCaptureWithHandler:^(NSError * _Nullable error) {
if (error != nil)
NSLog(@"!!! stopCaptureWithHandler/completionHandler %@ !!!", error);
}];
} else {
// Fallback on earlier versions
NSLog(@"FlutterRPScreenRecorder.stopCapture: Screen recorder is not available in versions lower than iOS 11 !");
}
- (void)stopCapture {
if (@available(iOS 11.0, *)) {
[screenRecorder stopCaptureWithHandler:^(NSError* _Nullable error) {
if (error != nil)
NSLog(@"!!! stopCaptureWithHandler/completionHandler %@ !!!", error);
}];
} else {
// Fallback on earlier versions
NSLog(@"FlutterRPScreenRecorder.stopCapture: Screen recorder is not available in versions "
@"lower than iOS 11 !");
}
}
- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler
{
[self stopCapture];
if(completionHandler != nil) {
completionHandler();
}
- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler {
[self stopCapture];
if (completionHandler != nil) {
completionHandler();
}
}
-(void)handleSourceBuffer:(CMSampleBufferRef)sampleBuffer sampleType:(RPSampleBufferType)sampleType
{
if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) ||
!CMSampleBufferDataIsReady(sampleBuffer)) {
return;
}
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
if (pixelBuffer == nil) {
return;
}
size_t width = CVPixelBufferGetWidth(pixelBuffer);
size_t height = CVPixelBufferGetHeight(pixelBuffer);
- (void)handleSourceBuffer:(CMSampleBufferRef)sampleBuffer
sampleType:(RPSampleBufferType)sampleType {
if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) ||
!CMSampleBufferDataIsReady(sampleBuffer)) {
return;
}
[source adaptOutputFormatToWidth:(int)(width/2) height:(int)(height/2) fps:8];
RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer];
int64_t timeStampNs =
CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) * NSEC_PER_SEC;
RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer
rotation:RTCVideoRotation_0
timeStampNs:timeStampNs];
[self.delegate capturer:self didCaptureVideoFrame:videoFrame];
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
if (pixelBuffer == nil) {
return;
}
size_t width = CVPixelBufferGetWidth(pixelBuffer);
size_t height = CVPixelBufferGetHeight(pixelBuffer);
[source adaptOutputFormatToWidth:(int)(width / 2) height:(int)(height / 2) fps:8];
RTCCVPixelBuffer* rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer];
int64_t timeStampNs =
CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) * NSEC_PER_SEC;
RTCVideoFrame* videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer
rotation:RTCVideoRotation_0
timeStampNs:timeStampNs];
[self.delegate capturer:self didCaptureVideoFrame:videoFrame];
}
@end

36
common/darwin/Classes/FlutterRTCDataChannel.h Executable file → Normal file
View File

@ -1,30 +1,28 @@
#import "FlutterWebRTCPlugin.h"
#import <WebRTC/RTCDataChannel.h>
#import "FlutterWebRTCPlugin.h"
@interface RTCDataChannel (Flutter) <FlutterStreamHandler>
@property (nonatomic, strong, nonnull) NSString *peerConnectionId;
@property (nonatomic, strong, nonnull) NSString *flutterChannelId;
@property (nonatomic, strong, nullable) FlutterEventSink eventSink;
@property (nonatomic, strong, nullable) FlutterEventChannel *eventChannel;
@property (nonatomic, strong, nullable) NSArray<id> *eventQueue;
@property(nonatomic, strong, nonnull) NSString* peerConnectionId;
@property(nonatomic, strong, nonnull) NSString* flutterChannelId;
@property(nonatomic, strong, nullable) FlutterEventSink eventSink;
@property(nonatomic, strong, nullable) FlutterEventChannel* eventChannel;
@property(nonatomic, strong, nullable) NSArray<id>* eventQueue;
@end
@interface FlutterWebRTCPlugin (RTCDataChannel) <RTCDataChannelDelegate>
- (void)createDataChannel:(nonnull NSString*)peerConnectionId
label:(nonnull NSString*)label
config:(nonnull RTCDataChannelConfiguration*)config
messenger:(nonnull NSObject<FlutterBinaryMessenger>*)messenger
result:(nonnull FlutterResult)result;
-(void)createDataChannel:(nonnull NSString *)peerConnectionId
label:(nonnull NSString *)label
config:(nonnull RTCDataChannelConfiguration *)config
messenger:(nonnull NSObject<FlutterBinaryMessenger> *)messenger
result:(nonnull FlutterResult)result;
- (void)dataChannelClose:(nonnull NSString*)peerConnectionId
dataChannelId:(nonnull NSString*)dataChannelId;
-(void)dataChannelClose:(nonnull NSString *)peerConnectionId
dataChannelId:(nonnull NSString *)dataChannelId;
-(void)dataChannelSend:(nonnull NSString *)peerConnectionId
dataChannelId:(nonnull NSString *)dataChannelId
data:(nonnull NSString *)data
type:(nonnull NSString *)type;
- (void)dataChannelSend:(nonnull NSString*)peerConnectionId
dataChannelId:(nonnull NSString*)dataChannelId
data:(nonnull NSString*)data
type:(nonnull NSString*)type;
@end

263
common/darwin/Classes/FlutterRTCDataChannel.m Executable file → Normal file
View File

@ -1,191 +1,194 @@
#import <objc/runtime.h>
#import "FlutterRTCDataChannel.h"
#import "FlutterRTCPeerConnection.h"
#import <WebRTC/RTCDataChannelConfiguration.h>
#import <objc/runtime.h>
#import "FlutterRTCPeerConnection.h"
@implementation RTCDataChannel (Flutter)
- (NSString *)peerConnectionId
{
return objc_getAssociatedObject(self, _cmd);
- (NSString*)peerConnectionId {
return objc_getAssociatedObject(self, _cmd);
}
- (void)setPeerConnectionId:(NSString *)peerConnectionId
{
objc_setAssociatedObject(self, @selector(peerConnectionId), peerConnectionId, OBJC_ASSOCIATION_RETAIN_NONATOMIC);
- (void)setPeerConnectionId:(NSString*)peerConnectionId {
objc_setAssociatedObject(self, @selector(peerConnectionId), peerConnectionId,
OBJC_ASSOCIATION_RETAIN_NONATOMIC);
}
- (FlutterEventSink )eventSink
{
return objc_getAssociatedObject(self, _cmd);
- (FlutterEventSink)eventSink {
return objc_getAssociatedObject(self, _cmd);
}
- (void)setEventSink:(FlutterEventSink)eventSink
{
objc_setAssociatedObject(self, @selector(eventSink), eventSink, OBJC_ASSOCIATION_RETAIN_NONATOMIC);
- (void)setEventSink:(FlutterEventSink)eventSink {
objc_setAssociatedObject(self, @selector(eventSink), eventSink,
OBJC_ASSOCIATION_RETAIN_NONATOMIC);
}
- (NSArray<id> *)eventQueue
{
return objc_getAssociatedObject(self, _cmd);
- (NSArray<id>*)eventQueue {
return objc_getAssociatedObject(self, _cmd);
}
- (void)setEventQueue:(NSArray<id> *)eventQueue
{
objc_setAssociatedObject(self, @selector(eventQueue), eventQueue, OBJC_ASSOCIATION_RETAIN_NONATOMIC);
- (void)setEventQueue:(NSArray<id>*)eventQueue {
objc_setAssociatedObject(self, @selector(eventQueue), eventQueue,
OBJC_ASSOCIATION_RETAIN_NONATOMIC);
}
- (NSNumber *)flutterChannelId
{
return objc_getAssociatedObject(self, _cmd);
- (NSNumber*)flutterChannelId {
return objc_getAssociatedObject(self, _cmd);
}
- (void)setFlutterChannelId:(NSNumber *)flutterChannelId
{
objc_setAssociatedObject(self, @selector(flutterChannelId), flutterChannelId, OBJC_ASSOCIATION_RETAIN_NONATOMIC);
- (void)setFlutterChannelId:(NSNumber*)flutterChannelId {
objc_setAssociatedObject(self, @selector(flutterChannelId), flutterChannelId,
OBJC_ASSOCIATION_RETAIN_NONATOMIC);
}
- (FlutterEventChannel *)eventChannel
{
return objc_getAssociatedObject(self, _cmd);
- (FlutterEventChannel*)eventChannel {
return objc_getAssociatedObject(self, _cmd);
}
- (void)setEventChannel:(FlutterEventChannel *)eventChannel
{
objc_setAssociatedObject(self, @selector(eventChannel), eventChannel, OBJC_ASSOCIATION_RETAIN_NONATOMIC);
- (void)setEventChannel:(FlutterEventChannel*)eventChannel {
objc_setAssociatedObject(self, @selector(eventChannel), eventChannel,
OBJC_ASSOCIATION_RETAIN_NONATOMIC);
}
#pragma mark - FlutterStreamHandler methods
- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments {
self.eventSink = nil;
return nil;
self.eventSink = nil;
return nil;
}
- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments
eventSink:(nonnull FlutterEventSink)sink {
self.eventSink = sink;
NSEnumerator *enumerator = [self.eventQueue objectEnumerator];
id event;
while ((event = enumerator.nextObject) != nil) {
self.eventSink(event);
};
self.eventQueue = nil;
return nil;
self.eventSink = sink;
NSEnumerator* enumerator = [self.eventQueue objectEnumerator];
id event;
while ((event = enumerator.nextObject) != nil) {
self.eventSink(event);
};
self.eventQueue = nil;
return nil;
}
@end
@implementation FlutterWebRTCPlugin (RTCDataChannel)
-(void)createDataChannel:(nonnull NSString *)peerConnectionId
label:(NSString *)label
config:(RTCDataChannelConfiguration *)config
messenger:(NSObject<FlutterBinaryMessenger>*)messenger
result:(nonnull FlutterResult)result
{
RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId];
RTCDataChannel *dataChannel = [peerConnection dataChannelForLabel:label configuration:config];
if (nil != dataChannel) {
dataChannel.peerConnectionId = peerConnectionId;
NSString *flutterId = [[NSUUID UUID] UUIDString];
peerConnection.dataChannels[flutterId] = dataChannel;
dataChannel.flutterChannelId = flutterId;
dataChannel.delegate = self;
dataChannel.eventQueue = nil;
FlutterEventChannel *eventChannel = [FlutterEventChannel
eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/dataChannelEvent%1$@%2$@", peerConnectionId, flutterId]
binaryMessenger:messenger];
dataChannel.eventChannel = eventChannel;
[eventChannel setStreamHandler:dataChannel];
result(@{@"label": label, @"id": [NSNumber numberWithInt:dataChannel.channelId], @"flutterId": flutterId});
}
- (void)createDataChannel:(nonnull NSString*)peerConnectionId
label:(NSString*)label
config:(RTCDataChannelConfiguration*)config
messenger:(NSObject<FlutterBinaryMessenger>*)messenger
result:(nonnull FlutterResult)result {
RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId];
RTCDataChannel* dataChannel = [peerConnection dataChannelForLabel:label configuration:config];
if (nil != dataChannel) {
dataChannel.peerConnectionId = peerConnectionId;
NSString* flutterId = [[NSUUID UUID] UUIDString];
peerConnection.dataChannels[flutterId] = dataChannel;
dataChannel.flutterChannelId = flutterId;
dataChannel.delegate = self;
dataChannel.eventQueue = nil;
FlutterEventChannel* eventChannel = [FlutterEventChannel
eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/dataChannelEvent%1$@%2$@",
peerConnectionId, flutterId]
binaryMessenger:messenger];
dataChannel.eventChannel = eventChannel;
[eventChannel setStreamHandler:dataChannel];
result(@{
@"label" : label,
@"id" : [NSNumber numberWithInt:dataChannel.channelId],
@"flutterId" : flutterId
});
}
}
-(void)dataChannelClose:(nonnull NSString *)peerConnectionId
dataChannelId:(nonnull NSString *)dataChannelId
{
RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId];
NSMutableDictionary *dataChannels = peerConnection.dataChannels;
RTCDataChannel *dataChannel = dataChannels[dataChannelId];
if(dataChannel) {
FlutterEventChannel *eventChannel = dataChannel.eventChannel;
[dataChannel close];
[dataChannels removeObjectForKey:dataChannelId];
[eventChannel setStreamHandler:nil];
dataChannel.eventChannel = nil;
}
- (void)dataChannelClose:(nonnull NSString*)peerConnectionId
dataChannelId:(nonnull NSString*)dataChannelId {
RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId];
NSMutableDictionary* dataChannels = peerConnection.dataChannels;
RTCDataChannel* dataChannel = dataChannels[dataChannelId];
if (dataChannel) {
FlutterEventChannel* eventChannel = dataChannel.eventChannel;
[dataChannel close];
[dataChannels removeObjectForKey:dataChannelId];
[eventChannel setStreamHandler:nil];
dataChannel.eventChannel = nil;
}
}
-(void)dataChannelSend:(nonnull NSString *)peerConnectionId
dataChannelId:(nonnull NSString *)dataChannelId
data:(id)data
type:(NSString *)type
{
RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId];
RTCDataChannel *dataChannel = peerConnection.dataChannels[dataChannelId];
NSData *bytes = [type isEqualToString:@"binary"] ?
((FlutterStandardTypedData*)data).data :
[data dataUsingEncoding:NSUTF8StringEncoding];
RTCDataBuffer *buffer = [[RTCDataBuffer alloc] initWithData:bytes isBinary:[type isEqualToString:@"binary"]];
[dataChannel sendData:buffer];
- (void)dataChannelSend:(nonnull NSString*)peerConnectionId
dataChannelId:(nonnull NSString*)dataChannelId
data:(id)data
type:(NSString*)type {
RTCPeerConnection* peerConnection = self.peerConnections[peerConnectionId];
RTCDataChannel* dataChannel = peerConnection.dataChannels[dataChannelId];
NSData* bytes = [type isEqualToString:@"binary"] ? ((FlutterStandardTypedData*)data).data
: [data dataUsingEncoding:NSUTF8StringEncoding];
RTCDataBuffer* buffer = [[RTCDataBuffer alloc] initWithData:bytes
isBinary:[type isEqualToString:@"binary"]];
[dataChannel sendData:buffer];
}
- (NSString *)stringForDataChannelState:(RTCDataChannelState)state
{
- (NSString*)stringForDataChannelState:(RTCDataChannelState)state {
switch (state) {
case RTCDataChannelStateConnecting: return @"connecting";
case RTCDataChannelStateOpen: return @"open";
case RTCDataChannelStateClosing: return @"closing";
case RTCDataChannelStateClosed: return @"closed";
case RTCDataChannelStateConnecting:
return @"connecting";
case RTCDataChannelStateOpen:
return @"open";
case RTCDataChannelStateClosing:
return @"closing";
case RTCDataChannelStateClosed:
return @"closed";
}
return nil;
}
- (void) sendEvent:(id)event withChannel:(RTCDataChannel *)channel {
if(channel.eventSink) {
channel.eventSink(event);
} else {
if(!channel.eventQueue) {
channel.eventQueue = [NSMutableArray array];
}
channel.eventQueue = [channel.eventQueue arrayByAddingObject:event];
- (void)sendEvent:(id)event withChannel:(RTCDataChannel*)channel {
if (channel.eventSink) {
channel.eventSink(event);
} else {
if (!channel.eventQueue) {
channel.eventQueue = [NSMutableArray array];
}
channel.eventQueue = [channel.eventQueue arrayByAddingObject:event];
}
}
#pragma mark - RTCDataChannelDelegate methods
// Called when the data channel state has changed.
- (void)dataChannelDidChangeState:(RTCDataChannel*)channel
{
[self sendEvent:@{ @"event" : @"dataChannelStateChanged",
@"id": [NSNumber numberWithInt:channel.channelId],
@"state": [self stringForDataChannelState:channel.readyState]} withChannel:channel];
- (void)dataChannelDidChangeState:(RTCDataChannel*)channel {
[self sendEvent:@{
@"event" : @"dataChannelStateChanged",
@"id" : [NSNumber numberWithInt:channel.channelId],
@"state" : [self stringForDataChannelState:channel.readyState]
}
withChannel:channel];
}
// Called when a data buffer was successfully received.
- (void)dataChannel:(RTCDataChannel *)channel didReceiveMessageWithBuffer:(RTCDataBuffer *)buffer
{
NSString *type;
id data;
if (buffer.isBinary) {
type = @"binary";
data = [FlutterStandardTypedData typedDataWithBytes:buffer.data];
} else {
type = @"text";
data = [[NSString alloc] initWithData:buffer.data
encoding:NSUTF8StringEncoding];
}
- (void)dataChannel:(RTCDataChannel*)channel didReceiveMessageWithBuffer:(RTCDataBuffer*)buffer {
NSString* type;
id data;
if (buffer.isBinary) {
type = @"binary";
data = [FlutterStandardTypedData typedDataWithBytes:buffer.data];
} else {
type = @"text";
data = [[NSString alloc] initWithData:buffer.data encoding:NSUTF8StringEncoding];
}
[self sendEvent:@{ @"event" : @"dataChannelReceiveMessage",
@"id": [NSNumber numberWithInt:channel.channelId],
@"type": type,
@"data": (data ? data : [NSNull null])} withChannel:channel];
[self sendEvent:@{
@"event" : @"dataChannelReceiveMessage",
@"id" : [NSNumber numberWithInt:channel.channelId],
@"type" : type,
@"data" : (data ? data : [NSNull null])
}
withChannel:channel];
}
@end

View File

@ -10,16 +10,13 @@
@interface FlutterWebRTCPlugin (DesktopCapturer)
-(void)getDisplayMedia:(nonnull NSDictionary *)constraints
result:(nonnull FlutterResult)result;
- (void)getDisplayMedia:(nonnull NSDictionary*)constraints result:(nonnull FlutterResult)result;
-(void)getDesktopSources:(nonnull NSDictionary *)argsMap
result:(nonnull FlutterResult)result;
- (void)getDesktopSources:(nonnull NSDictionary*)argsMap result:(nonnull FlutterResult)result;
-(void)updateDesktopSources:(nonnull NSDictionary *)argsMap
result:(nonnull FlutterResult)result;
- (void)updateDesktopSources:(nonnull NSDictionary*)argsMap result:(nonnull FlutterResult)result;
-(void)getDesktopSourceThumbnail:(nonnull NSDictionary *)argsMap
result:(nonnull FlutterResult)result;
- (void)getDesktopSourceThumbnail:(nonnull NSDictionary*)argsMap
result:(nonnull FlutterResult)result;
@end

View File

@ -4,405 +4,410 @@
#if TARGET_OS_IPHONE
#import <ReplayKit/ReplayKit.h>
#import "FlutterRPScreenRecorder.h"
#import "FlutterBroadcastScreenCapturer.h"
#import "FlutterRPScreenRecorder.h"
#endif
#if TARGET_OS_OSX
RTCDesktopMediaList *_screen = nil;
RTCDesktopMediaList *_window = nil;
NSArray<RTCDesktopSource *>* _captureSources;
RTCDesktopMediaList* _screen = nil;
RTCDesktopMediaList* _window = nil;
NSArray<RTCDesktopSource*>* _captureSources;
#endif
@implementation FlutterWebRTCPlugin (DesktopCapturer)
-(void)getDisplayMedia:(NSDictionary *)constraints
result:(FlutterResult)result {
NSString *mediaStreamId = [[NSUUID UUID] UUIDString];
RTCMediaStream *mediaStream = [self.peerConnectionFactory mediaStreamWithStreamId:mediaStreamId];
RTCVideoSource *videoSource = [self.peerConnectionFactory videoSourceForScreenCast:YES];
NSString *trackUUID = [[NSUUID UUID] UUIDString];
- (void)getDisplayMedia:(NSDictionary*)constraints result:(FlutterResult)result {
NSString* mediaStreamId = [[NSUUID UUID] UUIDString];
RTCMediaStream* mediaStream = [self.peerConnectionFactory mediaStreamWithStreamId:mediaStreamId];
RTCVideoSource* videoSource = [self.peerConnectionFactory videoSourceForScreenCast:YES];
NSString* trackUUID = [[NSUUID UUID] UUIDString];
#if TARGET_OS_IPHONE
BOOL useBroadcastExtension = false;
id videoConstraints = constraints[@"video"];
if ([videoConstraints isKindOfClass:[NSDictionary class]]) {
// constraints.video.deviceId
useBroadcastExtension = [((NSDictionary *)videoConstraints)[@"deviceId"] isEqualToString:@"broadcast"];
}
id screenCapturer;
if(useBroadcastExtension){
screenCapturer = [[FlutterBroadcastScreenCapturer alloc] initWithDelegate:videoSource];
} else {
screenCapturer = [[FlutterRPScreenRecorder alloc] initWithDelegate:videoSource];
}
[screenCapturer startCapture];
NSLog(@"start %@ capture", useBroadcastExtension ? @"broadcast" : @"replykit");
self.videoCapturerStopHandlers[trackUUID] = ^(CompletionHandler handler) {
NSLog(@"stop %@ capture, trackID %@", useBroadcastExtension ? @"broadcast" : @"replykit", trackUUID);
[screenCapturer stopCaptureWithCompletionHandler:handler];
};
BOOL useBroadcastExtension = false;
id videoConstraints = constraints[@"video"];
if ([videoConstraints isKindOfClass:[NSDictionary class]]) {
// constraints.video.deviceId
useBroadcastExtension =
[((NSDictionary*)videoConstraints)[@"deviceId"] isEqualToString:@"broadcast"];
}
if(useBroadcastExtension) {
NSString *extension = [[[NSBundle mainBundle] infoDictionary] valueForKey: kRTCScreenSharingExtension];
if(extension) {
RPSystemBroadcastPickerView *picker = [[RPSystemBroadcastPickerView alloc] init];
picker.preferredExtension = extension;
picker.showsMicrophoneButton = false;
SEL selector = NSSelectorFromString(@"buttonPressed:");
if([picker respondsToSelector:selector]) {
[picker performSelector:selector withObject:nil];
}
}
id screenCapturer;
if (useBroadcastExtension) {
screenCapturer = [[FlutterBroadcastScreenCapturer alloc] initWithDelegate:videoSource];
} else {
screenCapturer = [[FlutterRPScreenRecorder alloc] initWithDelegate:videoSource];
}
[screenCapturer startCapture];
NSLog(@"start %@ capture", useBroadcastExtension ? @"broadcast" : @"replykit");
self.videoCapturerStopHandlers[trackUUID] = ^(CompletionHandler handler) {
NSLog(@"stop %@ capture, trackID %@", useBroadcastExtension ? @"broadcast" : @"replykit",
trackUUID);
[screenCapturer stopCaptureWithCompletionHandler:handler];
};
if (useBroadcastExtension) {
NSString* extension =
[[[NSBundle mainBundle] infoDictionary] valueForKey:kRTCScreenSharingExtension];
if (extension) {
RPSystemBroadcastPickerView* picker = [[RPSystemBroadcastPickerView alloc] init];
picker.preferredExtension = extension;
picker.showsMicrophoneButton = false;
SEL selector = NSSelectorFromString(@"buttonPressed:");
if ([picker respondsToSelector:selector]) {
[picker performSelector:selector withObject:nil];
}
}
}
#endif
#if TARGET_OS_OSX
/* example for constraints:
{
'audio': false,
'video": {
'deviceId': {'exact': sourceId},
'mandatory': {
'frameRate': 30.0
},
/* example for constraints:
{
'audio': false,
'video": {
'deviceId': {'exact': sourceId},
'mandatory': {
'frameRate': 30.0
},
}
}
*/
NSString* sourceId = nil;
BOOL useDefaultScreen = NO;
NSInteger fps = 30;
id videoConstraints = constraints[@"video"];
if ([videoConstraints isKindOfClass:[NSNumber class]] && [videoConstraints boolValue] == YES) {
useDefaultScreen = YES;
} else if ([videoConstraints isKindOfClass:[NSDictionary class]]) {
NSDictionary* deviceId = videoConstraints[@"deviceId"];
if (deviceId != nil && [deviceId isKindOfClass:[NSDictionary class]]) {
if (deviceId[@"exact"] != nil) {
sourceId = deviceId[@"exact"];
if (sourceId == nil) {
result(@{@"error" : @"No deviceId.exact found"});
return;
}
}
*/
NSString *sourceId = nil;
BOOL useDefaultScreen = NO;
NSInteger fps = 30;
id videoConstraints = constraints[@"video"];
if([videoConstraints isKindOfClass:[NSNumber class]] && [videoConstraints boolValue] == YES) {
useDefaultScreen = YES;
} else if ([videoConstraints isKindOfClass:[NSDictionary class]]) {
NSDictionary *deviceId = videoConstraints[@"deviceId"];
if (deviceId != nil && [deviceId isKindOfClass:[NSDictionary class]]) {
if(deviceId[@"exact"] != nil) {
sourceId = deviceId[@"exact"];
if(sourceId == nil) {
result(@{@"error": @"No deviceId.exact found"});
return;
}
}
} else {
// fall back to default screen if no deviceId is specified
useDefaultScreen = YES;
}
id mandatory = videoConstraints[@"mandatory"];
if (mandatory != nil && [mandatory isKindOfClass:[NSDictionary class]]) {
id frameRate = mandatory[@"frameRate"];
if (frameRate != nil && [frameRate isKindOfClass:[NSNumber class]]) {
fps = [frameRate integerValue];
}
}
}
RTCDesktopCapturer *desktopCapturer;
RTCDesktopSource *source = nil;
if(useDefaultScreen){
desktopCapturer = [[RTCDesktopCapturer alloc] initWithDefaultScreen:self captureDelegate:videoSource];
}
} else {
source = [self getSourceById:sourceId];
if(source == nil) {
result(@{@"error": [NSString stringWithFormat:@"No source found for id: %@",sourceId]});
return;
}
desktopCapturer = [[RTCDesktopCapturer alloc] initWithSource:source delegate:self captureDelegate:videoSource];
// fall back to default screen if no deviceId is specified
useDefaultScreen = YES;
}
[desktopCapturer startCaptureWithFPS:fps];
NSLog(@"start desktop capture: sourceId: %@, type: %@, fps: %lu", sourceId, source.sourceType == RTCDesktopSourceTypeScreen ? @"screen" : @"window", fps);
id mandatory = videoConstraints[@"mandatory"];
if (mandatory != nil && [mandatory isKindOfClass:[NSDictionary class]]) {
id frameRate = mandatory[@"frameRate"];
if (frameRate != nil && [frameRate isKindOfClass:[NSNumber class]]) {
fps = [frameRate integerValue];
}
}
}
RTCDesktopCapturer* desktopCapturer;
RTCDesktopSource* source = nil;
if (useDefaultScreen) {
desktopCapturer = [[RTCDesktopCapturer alloc] initWithDefaultScreen:self
captureDelegate:videoSource];
} else {
source = [self getSourceById:sourceId];
if (source == nil) {
result(@{@"error" : [NSString stringWithFormat:@"No source found for id: %@", sourceId]});
return;
}
desktopCapturer = [[RTCDesktopCapturer alloc] initWithSource:source
delegate:self
captureDelegate:videoSource];
}
[desktopCapturer startCaptureWithFPS:fps];
NSLog(@"start desktop capture: sourceId: %@, type: %@, fps: %lu", sourceId,
source.sourceType == RTCDesktopSourceTypeScreen ? @"screen" : @"window", fps);
self.videoCapturerStopHandlers[trackUUID] = ^(CompletionHandler handler) {
NSLog(@"stop desktop capture: sourceId: %@, type: %@, trackID %@", sourceId, source.sourceType == RTCDesktopSourceTypeScreen ? @"screen" : @"window", trackUUID);
[desktopCapturer stopCapture];
handler();
};
self.videoCapturerStopHandlers[trackUUID] = ^(CompletionHandler handler) {
NSLog(@"stop desktop capture: sourceId: %@, type: %@, trackID %@", sourceId,
source.sourceType == RTCDesktopSourceTypeScreen ? @"screen" : @"window", trackUUID);
[desktopCapturer stopCapture];
handler();
};
#endif
RTCVideoTrack *videoTrack = [self.peerConnectionFactory videoTrackWithSource:videoSource trackId:trackUUID];
[mediaStream addVideoTrack:videoTrack];
RTCVideoTrack* videoTrack = [self.peerConnectionFactory videoTrackWithSource:videoSource
trackId:trackUUID];
[mediaStream addVideoTrack:videoTrack];
[self.localTracks setObject:videoTrack forKey:trackUUID];
[self.localTracks setObject:videoTrack forKey:trackUUID];
NSMutableArray *audioTracks = [NSMutableArray array];
NSMutableArray *videoTracks = [NSMutableArray array];
NSMutableArray* audioTracks = [NSMutableArray array];
NSMutableArray* videoTracks = [NSMutableArray array];
for (RTCVideoTrack *track in mediaStream.videoTracks) {
[videoTracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}];
}
for (RTCVideoTrack* track in mediaStream.videoTracks) {
[videoTracks addObject:@{
@"id" : track.trackId,
@"kind" : track.kind,
@"label" : track.trackId,
@"enabled" : @(track.isEnabled),
@"remote" : @(YES),
@"readyState" : @"live"
}];
}
self.localStreams[mediaStreamId] = mediaStream;
result(@{@"streamId": mediaStreamId, @"audioTracks" : audioTracks, @"videoTracks" : videoTracks });
self.localStreams[mediaStreamId] = mediaStream;
result(
@{@"streamId" : mediaStreamId, @"audioTracks" : audioTracks, @"videoTracks" : videoTracks});
}
-(void)getDesktopSources:(NSDictionary *)argsMap
result:(FlutterResult)result {
- (void)getDesktopSources:(NSDictionary*)argsMap result:(FlutterResult)result {
#if TARGET_OS_OSX
NSLog(@"getDesktopSources");
NSLog(@"getDesktopSources");
NSArray *types = [argsMap objectForKey:@"types"];
if (types == nil) {
result([FlutterError errorWithCode:@"ERROR"
message:@"types is required"
details:nil]);
return;
}
NSArray* types = [argsMap objectForKey:@"types"];
if (types == nil) {
result([FlutterError errorWithCode:@"ERROR" message:@"types is required" details:nil]);
return;
}
if(![self buildDesktopSourcesListWithTypes:types forceReload:YES result:result]) {
NSLog(@"getDesktopSources failed.");
return;
}
if (![self buildDesktopSourcesListWithTypes:types forceReload:YES result:result]) {
NSLog(@"getDesktopSources failed.");
return;
}
NSMutableArray *sources = [NSMutableArray array];
NSEnumerator *enumerator = [_captureSources objectEnumerator];
RTCDesktopSource *object;
while ((object = enumerator.nextObject) != nil) {
/*NSData *data = nil;
if([object thumbnail]) {
data = [[NSData alloc] init];
NSImage *resizedImg = [self resizeImage:[object thumbnail] forSize:NSMakeSize(320, 180)];
data = [resizedImg TIFFRepresentation];
}*/
[sources addObject:@{
@"id": object.sourceId,
@"name": object.name,
@"thumbnailSize": @{@"width": @0, @"height": @0},
@"type": object.sourceType == RTCDesktopSourceTypeScreen? @"screen" : @"window",
//@"thumbnail": data,
}];
}
result(@{@"sources": sources});
NSMutableArray* sources = [NSMutableArray array];
NSEnumerator* enumerator = [_captureSources objectEnumerator];
RTCDesktopSource* object;
while ((object = enumerator.nextObject) != nil) {
/*NSData *data = nil;
if([object thumbnail]) {
data = [[NSData alloc] init];
NSImage *resizedImg = [self resizeImage:[object thumbnail] forSize:NSMakeSize(320, 180)];
data = [resizedImg TIFFRepresentation];
}*/
[sources addObject:@{
@"id" : object.sourceId,
@"name" : object.name,
@"thumbnailSize" : @{@"width" : @0, @"height" : @0},
@"type" : object.sourceType == RTCDesktopSourceTypeScreen ? @"screen" : @"window",
//@"thumbnail": data,
}];
}
result(@{@"sources" : sources});
#else
result([FlutterError errorWithCode:@"ERROR"
message:@"Not supported on iOS"
details:nil]);
result([FlutterError errorWithCode:@"ERROR" message:@"Not supported on iOS" details:nil]);
#endif
}
-(void)getDesktopSourceThumbnail:(NSDictionary *)argsMap
result:(FlutterResult)result {
- (void)getDesktopSourceThumbnail:(NSDictionary*)argsMap result:(FlutterResult)result {
#if TARGET_OS_OSX
NSLog(@"getDesktopSourceThumbnail");
NSString* sourceId = argsMap[@"sourceId"];
RTCDesktopSource *object = [self getSourceById:sourceId];
if(object == nil) {
result(@{@"error": @"No source found"});
return;
}
NSImage *image = [object UpdateThumbnail];
if(image != nil) {
NSImage *resizedImg = [self resizeImage:image forSize:NSMakeSize(320, 180)];
NSData *data = [resizedImg TIFFRepresentation];
result(data);
} else {
result(@{@"error": @"No thumbnail found"});
}
NSLog(@"getDesktopSourceThumbnail");
NSString* sourceId = argsMap[@"sourceId"];
RTCDesktopSource* object = [self getSourceById:sourceId];
if (object == nil) {
result(@{@"error" : @"No source found"});
return;
}
NSImage* image = [object UpdateThumbnail];
if (image != nil) {
NSImage* resizedImg = [self resizeImage:image forSize:NSMakeSize(320, 180)];
NSData* data = [resizedImg TIFFRepresentation];
result(data);
} else {
result(@{@"error" : @"No thumbnail found"});
}
#else
result([FlutterError errorWithCode:@"ERROR"
message:@"Not supported on iOS"
details:nil]);
result([FlutterError errorWithCode:@"ERROR" message:@"Not supported on iOS" details:nil]);
#endif
}
-(void)updateDesktopSources:(NSDictionary *)argsMap
result:(FlutterResult)result {
- (void)updateDesktopSources:(NSDictionary*)argsMap result:(FlutterResult)result {
#if TARGET_OS_OSX
NSLog(@"updateDesktopSources");
NSArray *types = [argsMap objectForKey:@"types"];
if (types == nil) {
result([FlutterError errorWithCode:@"ERROR"
message:@"types is required"
details:nil]);
return;
}
if(![self buildDesktopSourcesListWithTypes:types forceReload:NO result:result]) {
NSLog(@"updateDesktopSources failed.");
return;
}
result(@{@"result": @YES});
NSLog(@"updateDesktopSources");
NSArray* types = [argsMap objectForKey:@"types"];
if (types == nil) {
result([FlutterError errorWithCode:@"ERROR" message:@"types is required" details:nil]);
return;
}
if (![self buildDesktopSourcesListWithTypes:types forceReload:NO result:result]) {
NSLog(@"updateDesktopSources failed.");
return;
}
result(@{@"result" : @YES});
#else
result([FlutterError errorWithCode:@"ERROR"
message:@"Not supported on iOS"
details:nil]);
result([FlutterError errorWithCode:@"ERROR" message:@"Not supported on iOS" details:nil]);
#endif
}
#if TARGET_OS_OSX
- (NSImage*)resizeImage:(NSImage*)sourceImage forSize:(CGSize)targetSize {
CGSize imageSize = sourceImage.size;
CGFloat width = imageSize.width;
CGFloat height = imageSize.height;
CGFloat targetWidth = targetSize.width;
CGFloat targetHeight = targetSize.height;
CGFloat scaleFactor = 0.0;
CGFloat scaledWidth = targetWidth;
CGFloat scaledHeight = targetHeight;
CGPoint thumbnailPoint = CGPointMake(0.0,0.0);
CGSize imageSize = sourceImage.size;
CGFloat width = imageSize.width;
CGFloat height = imageSize.height;
CGFloat targetWidth = targetSize.width;
CGFloat targetHeight = targetSize.height;
CGFloat scaleFactor = 0.0;
CGFloat scaledWidth = targetWidth;
CGFloat scaledHeight = targetHeight;
CGPoint thumbnailPoint = CGPointMake(0.0, 0.0);
if (CGSizeEqualToSize(imageSize, targetSize) == NO) {
CGFloat widthFactor = targetWidth / width;
CGFloat heightFactor = targetHeight / height;
if (CGSizeEqualToSize(imageSize, targetSize) == NO) {
CGFloat widthFactor = targetWidth / width;
CGFloat heightFactor = targetHeight / height;
// scale to fit the longer
scaleFactor = (widthFactor>heightFactor)?widthFactor:heightFactor;
scaledWidth = ceil(width * scaleFactor);
scaledHeight = ceil(height * scaleFactor);
// scale to fit the longer
scaleFactor = (widthFactor > heightFactor) ? widthFactor : heightFactor;
scaledWidth = ceil(width * scaleFactor);
scaledHeight = ceil(height * scaleFactor);
// center the image
if (widthFactor > heightFactor) {
thumbnailPoint.y = (targetHeight - scaledHeight) * 0.5;
} else if (widthFactor < heightFactor) {
thumbnailPoint.x = (targetWidth - scaledWidth) * 0.5;
}
// center the image
if (widthFactor > heightFactor) {
thumbnailPoint.y = (targetHeight - scaledHeight) * 0.5;
} else if (widthFactor < heightFactor) {
thumbnailPoint.x = (targetWidth - scaledWidth) * 0.5;
}
}
NSImage *newImage = [[NSImage alloc] initWithSize:NSMakeSize(scaledWidth, scaledHeight)];
CGRect thumbnailRect = {thumbnailPoint, {scaledWidth, scaledHeight}};
NSRect imageRect = NSMakeRect(0.0, 0.0, width, height);
NSImage* newImage = [[NSImage alloc] initWithSize:NSMakeSize(scaledWidth, scaledHeight)];
CGRect thumbnailRect = {thumbnailPoint, {scaledWidth, scaledHeight}};
NSRect imageRect = NSMakeRect(0.0, 0.0, width, height);
[newImage lockFocus];
[sourceImage drawInRect:thumbnailRect fromRect:imageRect operation:NSCompositeCopy fraction:1.0];
[newImage unlockFocus];
[newImage lockFocus];
[sourceImage drawInRect:thumbnailRect fromRect:imageRect operation:NSCompositeCopy fraction:1.0];
[newImage unlockFocus];
return newImage;
return newImage;
}
-(RTCDesktopSource *)getSourceById:(NSString *)sourceId {
NSEnumerator *enumerator = [_captureSources objectEnumerator];
RTCDesktopSource *object;
while ((object = enumerator.nextObject) != nil) {
if([sourceId isEqualToString:object.sourceId]) {
return object;
}
- (RTCDesktopSource*)getSourceById:(NSString*)sourceId {
NSEnumerator* enumerator = [_captureSources objectEnumerator];
RTCDesktopSource* object;
while ((object = enumerator.nextObject) != nil) {
if ([sourceId isEqualToString:object.sourceId]) {
return object;
}
return nil;
}
return nil;
}
- (BOOL)buildDesktopSourcesListWithTypes:(NSArray *)types forceReload:(BOOL)forceReload result:(FlutterResult)result {
BOOL captureWindow = NO;
BOOL captureScreen = NO;
_captureSources = [NSMutableArray array];
- (BOOL)buildDesktopSourcesListWithTypes:(NSArray*)types
forceReload:(BOOL)forceReload
result:(FlutterResult)result {
BOOL captureWindow = NO;
BOOL captureScreen = NO;
_captureSources = [NSMutableArray array];
NSEnumerator *typesEnumerator = [types objectEnumerator];
NSString *type;
while ((type = typesEnumerator.nextObject) != nil) {
if ([type isEqualToString:@"screen"]) {
captureScreen = YES;
} else if ([type isEqualToString:@"window"]) {
captureWindow = YES;
} else {
result([FlutterError errorWithCode:@"ERROR"
message:@"Invalid type"
details:nil]);
return NO;
}
NSEnumerator* typesEnumerator = [types objectEnumerator];
NSString* type;
while ((type = typesEnumerator.nextObject) != nil) {
if ([type isEqualToString:@"screen"]) {
captureScreen = YES;
} else if ([type isEqualToString:@"window"]) {
captureWindow = YES;
} else {
result([FlutterError errorWithCode:@"ERROR" message:@"Invalid type" details:nil]);
return NO;
}
}
if(!captureWindow && !captureScreen) {
result([FlutterError errorWithCode:@"ERROR"
message:@"At least one type is required"
details:nil]);
return NO;
}
if (!captureWindow && !captureScreen) {
result([FlutterError errorWithCode:@"ERROR"
message:@"At least one type is required"
details:nil]);
return NO;
}
if(captureWindow) {
if(!_window) _window = [[RTCDesktopMediaList alloc] initWithType:RTCDesktopSourceTypeWindow delegate:self];
[_window UpdateSourceList:forceReload updateAllThumbnails:YES];
NSArray<RTCDesktopSource *>* sources = [_window getSources];
_captureSources = [_captureSources arrayByAddingObjectsFromArray:sources];
}
if(captureScreen) {
if(!_screen) _screen = [[RTCDesktopMediaList alloc] initWithType:RTCDesktopSourceTypeScreen delegate:self];
[_screen UpdateSourceList:forceReload updateAllThumbnails:YES];
NSArray<RTCDesktopSource *>* sources = [_screen getSources];
_captureSources = [_captureSources arrayByAddingObjectsFromArray:sources];
}
NSLog(@"captureSources: %lu", [_captureSources count]);
return YES;
if (captureWindow) {
if (!_window)
_window = [[RTCDesktopMediaList alloc] initWithType:RTCDesktopSourceTypeWindow delegate:self];
[_window UpdateSourceList:forceReload updateAllThumbnails:YES];
NSArray<RTCDesktopSource*>* sources = [_window getSources];
_captureSources = [_captureSources arrayByAddingObjectsFromArray:sources];
}
if (captureScreen) {
if (!_screen)
_screen = [[RTCDesktopMediaList alloc] initWithType:RTCDesktopSourceTypeScreen delegate:self];
[_screen UpdateSourceList:forceReload updateAllThumbnails:YES];
NSArray<RTCDesktopSource*>* sources = [_screen getSources];
_captureSources = [_captureSources arrayByAddingObjectsFromArray:sources];
}
NSLog(@"captureSources: %lu", [_captureSources count]);
return YES;
}
#pragma mark - RTCDesktopMediaListDelegate delegate
#pragma clang diagnostic ignored "-Wobjc-protocol-method-implementation"
- (void)didDesktopSourceAdded:(RTC_OBJC_TYPE(RTCDesktopSource) *)source {
//NSLog(@"didDesktopSourceAdded: %@, id %@", source.name, source.sourceId);
if(self.eventSink) {
NSImage *image = [source UpdateThumbnail];
NSData *data = [[NSData alloc] init];
if(image != nil) {
NSImage *resizedImg = [self resizeImage:image forSize:NSMakeSize(320, 180)];
data = [resizedImg TIFFRepresentation];
}
self.eventSink(@{
@"event": @"desktopSourceAdded",
@"id": source.sourceId,
@"name": source.name,
@"thumbnailSize": @{@"width": @0, @"height": @0},
@"type": source.sourceType == RTCDesktopSourceTypeScreen? @"screen" : @"window",
@"thumbnail": data
});
// NSLog(@"didDesktopSourceAdded: %@, id %@", source.name, source.sourceId);
if (self.eventSink) {
NSImage* image = [source UpdateThumbnail];
NSData* data = [[NSData alloc] init];
if (image != nil) {
NSImage* resizedImg = [self resizeImage:image forSize:NSMakeSize(320, 180)];
data = [resizedImg TIFFRepresentation];
}
self.eventSink(@{
@"event" : @"desktopSourceAdded",
@"id" : source.sourceId,
@"name" : source.name,
@"thumbnailSize" : @{@"width" : @0, @"height" : @0},
@"type" : source.sourceType == RTCDesktopSourceTypeScreen ? @"screen" : @"window",
@"thumbnail" : data
});
}
}
#pragma clang diagnostic ignored "-Wobjc-protocol-method-implementation"
- (void)didDesktopSourceRemoved:(RTC_OBJC_TYPE(RTCDesktopSource) *) source {
//NSLog(@"didDesktopSourceRemoved: %@, id %@", source.name, source.sourceId);
if(self.eventSink) {
self.eventSink(@{
@"event": @"desktopSourceRemoved",
@"id": source.sourceId,
});
}
- (void)didDesktopSourceRemoved:(RTC_OBJC_TYPE(RTCDesktopSource) *)source {
// NSLog(@"didDesktopSourceRemoved: %@, id %@", source.name, source.sourceId);
if (self.eventSink) {
self.eventSink(@{
@"event" : @"desktopSourceRemoved",
@"id" : source.sourceId,
});
}
}
#pragma clang diagnostic ignored "-Wobjc-protocol-method-implementation"
- (void)didDesktopSourceNameChanged:(RTC_OBJC_TYPE(RTCDesktopSource) *) source {
//NSLog(@"didDesktopSourceNameChanged: %@, id %@", source.name, source.sourceId);
if(self.eventSink) {
self.eventSink(@{
@"event": @"desktopSourceNameChanged",
@"id": source.sourceId,
@"name": source.name,
});
}
- (void)didDesktopSourceNameChanged:(RTC_OBJC_TYPE(RTCDesktopSource) *)source {
// NSLog(@"didDesktopSourceNameChanged: %@, id %@", source.name, source.sourceId);
if (self.eventSink) {
self.eventSink(@{
@"event" : @"desktopSourceNameChanged",
@"id" : source.sourceId,
@"name" : source.name,
});
}
}
#pragma clang diagnostic ignored "-Wobjc-protocol-method-implementation"
- (void)didDesktopSourceThumbnailChanged:(RTC_OBJC_TYPE(RTCDesktopSource) *) source {
//NSLog(@"didDesktopSourceThumbnailChanged: %@, id %@", source.name, source.sourceId);
if(self.eventSink) {
NSImage *resizedImg = [self resizeImage:[source thumbnail] forSize:NSMakeSize(320, 180)];
NSData *data = [resizedImg TIFFRepresentation];
self.eventSink(@{
@"event": @"desktopSourceThumbnailChanged",
@"id": source.sourceId,
@"thumbnail": data
});
}
- (void)didDesktopSourceThumbnailChanged:(RTC_OBJC_TYPE(RTCDesktopSource) *)source {
// NSLog(@"didDesktopSourceThumbnailChanged: %@, id %@", source.name, source.sourceId);
if (self.eventSink) {
NSImage* resizedImg = [self resizeImage:[source thumbnail] forSize:NSMakeSize(320, 180)];
NSData* data = [resizedImg TIFFRepresentation];
self.eventSink(@{
@"event" : @"desktopSourceThumbnailChanged",
@"id" : source.sourceId,
@"thumbnail" : data
});
}
}
#pragma mark - RTCDesktopCapturerDelegate delegate
-(void)didSourceCaptureStart:(RTCDesktopCapturer *) capturer {
NSLog(@"didSourceCaptureStart");
- (void)didSourceCaptureStart:(RTCDesktopCapturer*)capturer {
NSLog(@"didSourceCaptureStart");
}
-(void)didSourceCapturePaused:(RTCDesktopCapturer *) capturer {
NSLog(@"didSourceCapturePaused");
- (void)didSourceCapturePaused:(RTCDesktopCapturer*)capturer {
NSLog(@"didSourceCapturePaused");
}
-(void)didSourceCaptureStop:(RTCDesktopCapturer *) capturer {
NSLog(@"didSourceCaptureStop");
- (void)didSourceCaptureStop:(RTCDesktopCapturer*)capturer {
NSLog(@"didSourceCaptureStop");
}
-(void)didSourceCaptureError:(RTCDesktopCapturer *) capturer{
NSLog(@"didSourceCaptureError");
- (void)didSourceCaptureError:(RTCDesktopCapturer*)capturer {
NSLog(@"didSourceCaptureError");
}
#endif

View File

@ -5,8 +5,10 @@
#endif
#import <WebRTC/WebRTC.h>
@interface FlutterRTCFrameCapturer : NSObject<RTCVideoRenderer>
@interface FlutterRTCFrameCapturer : NSObject <RTCVideoRenderer>
- (instancetype)initWithTrack:(RTCVideoTrack *) track toPath:(NSString *) path result:(FlutterResult)result;
- (instancetype)initWithTrack:(RTCVideoTrack*)track
toPath:(NSString*)path
result:(FlutterResult)result;
@end

View File

@ -10,171 +10,166 @@
@import CoreVideo;
@implementation FlutterRTCFrameCapturer {
RTCVideoTrack* _track;
NSString* _path;
FlutterResult _result;
bool _gotFrame;
RTCVideoTrack* _track;
NSString* _path;
FlutterResult _result;
bool _gotFrame;
}
- (instancetype)initWithTrack:(RTCVideoTrack *) track toPath:(NSString *) path result:(FlutterResult)result
{
self = [super init];
if (self) {
_gotFrame = false;
_track = track;
_path = path;
_result = result;
[track addRenderer:self];
}
return self;
- (instancetype)initWithTrack:(RTCVideoTrack*)track
toPath:(NSString*)path
result:(FlutterResult)result {
self = [super init];
if (self) {
_gotFrame = false;
_track = track;
_path = path;
_result = result;
[track addRenderer:self];
}
return self;
}
- (void)setSize:(CGSize)size
{
- (void)setSize:(CGSize)size {
}
- (void)renderFrame:(nullable RTCVideoFrame *)frame
{
if (_gotFrame || frame == nil)
return;
_gotFrame = true;
id <RTCVideoFrameBuffer> buffer = frame.buffer;
CVPixelBufferRef pixelBufferRef;
bool shouldRelease;
if (![buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
pixelBufferRef = [self convertToCVPixelBuffer:frame];
shouldRelease = true;
} else {
pixelBufferRef = ((RTCCVPixelBuffer *) buffer).pixelBuffer;
shouldRelease = false;
}
CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixelBufferRef];
CGRect outputSize;
if (@available(iOS 11, macOS 10.13, *)) {
switch (frame.rotation) {
case RTCVideoRotation_90:
ciImage = [ciImage imageByApplyingCGOrientation:kCGImagePropertyOrientationRight];
outputSize = CGRectMake(0, 0, frame.height, frame.width);
break;
case RTCVideoRotation_180:
ciImage = [ciImage imageByApplyingCGOrientation:kCGImagePropertyOrientationDown];
outputSize = CGRectMake(0, 0, frame.width, frame.height);
break;
case RTCVideoRotation_270:
ciImage = [ciImage imageByApplyingCGOrientation:kCGImagePropertyOrientationLeft];
outputSize = CGRectMake(0, 0, frame.height, frame.width);
break;
default:
outputSize = CGRectMake(0, 0, frame.width, frame.height);
break;
}
} else {
- (void)renderFrame:(nullable RTCVideoFrame*)frame {
if (_gotFrame || frame == nil)
return;
_gotFrame = true;
id<RTCVideoFrameBuffer> buffer = frame.buffer;
CVPixelBufferRef pixelBufferRef;
bool shouldRelease;
if (![buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
pixelBufferRef = [self convertToCVPixelBuffer:frame];
shouldRelease = true;
} else {
pixelBufferRef = ((RTCCVPixelBuffer*)buffer).pixelBuffer;
shouldRelease = false;
}
CIImage* ciImage = [CIImage imageWithCVPixelBuffer:pixelBufferRef];
CGRect outputSize;
if (@available(iOS 11, macOS 10.13, *)) {
switch (frame.rotation) {
case RTCVideoRotation_90:
ciImage = [ciImage imageByApplyingCGOrientation:kCGImagePropertyOrientationRight];
outputSize = CGRectMake(0, 0, frame.height, frame.width);
break;
case RTCVideoRotation_180:
ciImage = [ciImage imageByApplyingCGOrientation:kCGImagePropertyOrientationDown];
outputSize = CGRectMake(0, 0, frame.width, frame.height);
break;
case RTCVideoRotation_270:
ciImage = [ciImage imageByApplyingCGOrientation:kCGImagePropertyOrientationLeft];
outputSize = CGRectMake(0, 0, frame.height, frame.width);
break;
default:
outputSize = CGRectMake(0, 0, frame.width, frame.height);
break;
}
CIContext *tempContext = [CIContext contextWithOptions:nil];
CGImageRef cgImage = [tempContext
createCGImage:ciImage
fromRect:outputSize];
NSData *imageData;
#if TARGET_OS_IPHONE
UIImage *uiImage = [UIImage imageWithCGImage:cgImage];
if ([[_path pathExtension] isEqualToString:@"jpg"]) {
imageData = UIImageJPEGRepresentation(uiImage, 1.0f);
} else {
imageData = UIImagePNGRepresentation(uiImage);
}
#else
NSBitmapImageRep *newRep = [[NSBitmapImageRep alloc] initWithCGImage:cgImage];
[newRep setSize:NSSizeToCGSize(outputSize.size)];
NSDictionary<NSBitmapImageRepPropertyKey, id>* quality = @{
NSImageCompressionFactor: @1.0f
};
if ([[_path pathExtension] isEqualToString:@"jpg"]) {
imageData = [newRep representationUsingType:NSJPEGFileType properties:quality];
} else {
imageData = [newRep representationUsingType:NSPNGFileType properties:quality];
}
#endif
CGImageRelease(cgImage);
if (shouldRelease)
CVPixelBufferRelease(pixelBufferRef);
if (imageData && [imageData writeToFile:_path atomically:NO]) {
NSLog(@"File writed successfully to %@", _path);
_result(nil);
} else {
NSLog(@"Failed to write to file");
_result([FlutterError errorWithCode:@"CaptureFrameFailed"
message:@"Failed to write image data to file"
details:nil]);
}
dispatch_async(dispatch_get_main_queue(), ^{
[self->_track removeRenderer:self];
self->_track = nil;
});
} else {
outputSize = CGRectMake(0, 0, frame.width, frame.height);
}
CIContext* tempContext = [CIContext contextWithOptions:nil];
CGImageRef cgImage = [tempContext createCGImage:ciImage fromRect:outputSize];
NSData* imageData;
#if TARGET_OS_IPHONE
UIImage* uiImage = [UIImage imageWithCGImage:cgImage];
if ([[_path pathExtension] isEqualToString:@"jpg"]) {
imageData = UIImageJPEGRepresentation(uiImage, 1.0f);
} else {
imageData = UIImagePNGRepresentation(uiImage);
}
#else
NSBitmapImageRep* newRep = [[NSBitmapImageRep alloc] initWithCGImage:cgImage];
[newRep setSize:NSSizeToCGSize(outputSize.size)];
NSDictionary<NSBitmapImageRepPropertyKey, id>* quality = @{NSImageCompressionFactor : @1.0f};
if ([[_path pathExtension] isEqualToString:@"jpg"]) {
imageData = [newRep representationUsingType:NSJPEGFileType properties:quality];
} else {
imageData = [newRep representationUsingType:NSPNGFileType properties:quality];
}
#endif
CGImageRelease(cgImage);
if (shouldRelease)
CVPixelBufferRelease(pixelBufferRef);
if (imageData && [imageData writeToFile:_path atomically:NO]) {
NSLog(@"File writed successfully to %@", _path);
_result(nil);
} else {
NSLog(@"Failed to write to file");
_result([FlutterError errorWithCode:@"CaptureFrameFailed"
message:@"Failed to write image data to file"
details:nil]);
}
dispatch_async(dispatch_get_main_queue(), ^{
[self->_track removeRenderer:self];
self->_track = nil;
});
}
-(CVPixelBufferRef)convertToCVPixelBuffer:(RTCVideoFrame *) frame
{
id<RTCI420Buffer> i420Buffer = [frame.buffer toI420];
CVPixelBufferRef outputPixelBuffer;
size_t w = (size_t) roundf(i420Buffer.width);
size_t h = (size_t) roundf(i420Buffer.height);
NSDictionary *pixelAttributes = @{(id)kCVPixelBufferIOSurfacePropertiesKey : @{}};
CVPixelBufferCreate(kCFAllocatorDefault, w, h, kCVPixelFormatType_32BGRA, (__bridge CFDictionaryRef)(pixelAttributes), &outputPixelBuffer);
CVPixelBufferLockBaseAddress(outputPixelBuffer, 0);
const OSType pixelFormat = CVPixelBufferGetPixelFormatType(outputPixelBuffer);
if (pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange ||
pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) {
// NV12
uint8_t* dstY = CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 0);
const size_t dstYStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 0);
uint8_t* dstUV = CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 1);
const size_t dstUVStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 1);
[RTCYUVHelper I420ToNV12:i420Buffer.dataY
srcStrideY:i420Buffer.strideY
srcU:i420Buffer.dataU
srcStrideU:i420Buffer.strideU
srcV:i420Buffer.dataV
srcStrideV:i420Buffer.strideV
dstY:dstY
dstStrideY:(int)dstYStride
dstUV:dstUV
dstStrideUV:(int)dstUVStride
width:i420Buffer.width
width:i420Buffer.height];
} else {
uint8_t* dst = CVPixelBufferGetBaseAddress(outputPixelBuffer);
const size_t bytesPerRow = CVPixelBufferGetBytesPerRow(outputPixelBuffer);
if (pixelFormat == kCVPixelFormatType_32BGRA) {
// Corresponds to libyuv::FOURCC_ARGB
[RTCYUVHelper I420ToARGB:i420Buffer.dataY
srcStrideY:i420Buffer.strideY
srcU:i420Buffer.dataU
srcStrideU:i420Buffer.strideU
srcV:i420Buffer.dataV
srcStrideV:i420Buffer.strideV
dstARGB:dst
dstStrideARGB:(int)bytesPerRow
width:i420Buffer.width
height:i420Buffer.height];
} else if (pixelFormat == kCVPixelFormatType_32ARGB) {
// Corresponds to libyuv::FOURCC_BGRA
[RTCYUVHelper I420ToBGRA:i420Buffer.dataY
srcStrideY:i420Buffer.strideY
srcU:i420Buffer.dataU
srcStrideU:i420Buffer.strideU
srcV:i420Buffer.dataV
srcStrideV:i420Buffer.strideV
dstBGRA:dst
dstStrideBGRA:(int)bytesPerRow
width:i420Buffer.width
height:i420Buffer.height];
}
- (CVPixelBufferRef)convertToCVPixelBuffer:(RTCVideoFrame*)frame {
id<RTCI420Buffer> i420Buffer = [frame.buffer toI420];
CVPixelBufferRef outputPixelBuffer;
size_t w = (size_t)roundf(i420Buffer.width);
size_t h = (size_t)roundf(i420Buffer.height);
NSDictionary* pixelAttributes = @{(id)kCVPixelBufferIOSurfacePropertiesKey : @{}};
CVPixelBufferCreate(kCFAllocatorDefault, w, h, kCVPixelFormatType_32BGRA,
(__bridge CFDictionaryRef)(pixelAttributes), &outputPixelBuffer);
CVPixelBufferLockBaseAddress(outputPixelBuffer, 0);
const OSType pixelFormat = CVPixelBufferGetPixelFormatType(outputPixelBuffer);
if (pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange ||
pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) {
// NV12
uint8_t* dstY = CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 0);
const size_t dstYStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 0);
uint8_t* dstUV = CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 1);
const size_t dstUVStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 1);
[RTCYUVHelper I420ToNV12:i420Buffer.dataY
srcStrideY:i420Buffer.strideY
srcU:i420Buffer.dataU
srcStrideU:i420Buffer.strideU
srcV:i420Buffer.dataV
srcStrideV:i420Buffer.strideV
dstY:dstY
dstStrideY:(int)dstYStride
dstUV:dstUV
dstStrideUV:(int)dstUVStride
width:i420Buffer.width
width:i420Buffer.height];
} else {
uint8_t* dst = CVPixelBufferGetBaseAddress(outputPixelBuffer);
const size_t bytesPerRow = CVPixelBufferGetBytesPerRow(outputPixelBuffer);
if (pixelFormat == kCVPixelFormatType_32BGRA) {
// Corresponds to libyuv::FOURCC_ARGB
[RTCYUVHelper I420ToARGB:i420Buffer.dataY
srcStrideY:i420Buffer.strideY
srcU:i420Buffer.dataU
srcStrideU:i420Buffer.strideU
srcV:i420Buffer.dataV
srcStrideV:i420Buffer.strideV
dstARGB:dst
dstStrideARGB:(int)bytesPerRow
width:i420Buffer.width
height:i420Buffer.height];
} else if (pixelFormat == kCVPixelFormatType_32ARGB) {
// Corresponds to libyuv::FOURCC_BGRA
[RTCYUVHelper I420ToBGRA:i420Buffer.dataY
srcStrideY:i420Buffer.strideY
srcU:i420Buffer.dataU
srcStrideU:i420Buffer.strideU
srcV:i420Buffer.dataV
srcStrideV:i420Buffer.strideV
dstBGRA:dst
dstStrideBGRA:(int)bytesPerRow
width:i420Buffer.width
height:i420Buffer.height];
}
CVPixelBufferUnlockBaseAddress(outputPixelBuffer, 0);
return outputPixelBuffer;
}
CVPixelBufferUnlockBaseAddress(outputPixelBuffer, 0);
return outputPixelBuffer;
}
@end

View File

@ -3,30 +3,25 @@
@interface FlutterWebRTCPlugin (RTCMediaStream)
-(void)getUserMedia:(NSDictionary *)constraints
result:(FlutterResult)result;
- (void)getUserMedia:(NSDictionary*)constraints result:(FlutterResult)result;
-(void)createLocalMediaStream:(FlutterResult)result;
- (void)createLocalMediaStream:(FlutterResult)result;
-(void)getSources:(FlutterResult)result;
- (void)getSources:(FlutterResult)result;
-(void)mediaStreamTrackHasTorch:(RTCMediaStreamTrack *)track
result:(FlutterResult) result;
- (void)mediaStreamTrackHasTorch:(RTCMediaStreamTrack*)track result:(FlutterResult)result;
-(void)mediaStreamTrackSetTorch:(RTCMediaStreamTrack *)track
torch:(BOOL) torch
result:(FlutterResult) result;
- (void)mediaStreamTrackSetTorch:(RTCMediaStreamTrack*)track
torch:(BOOL)torch
result:(FlutterResult)result;
-(void)mediaStreamTrackSwitchCamera:(RTCMediaStreamTrack *)track
result:(FlutterResult) result;
- (void)mediaStreamTrackSwitchCamera:(RTCMediaStreamTrack*)track result:(FlutterResult)result;
-(void)mediaStreamTrackCaptureFrame:(RTCMediaStreamTrack *)track
toPath:(NSString *) path
result:(FlutterResult) result;
- (void)mediaStreamTrackCaptureFrame:(RTCMediaStreamTrack*)track
toPath:(NSString*)path
result:(FlutterResult)result;
-(void)selectAudioInput:(NSString *)deviceId
result:(FlutterResult) result;
- (void)selectAudioInput:(NSString*)deviceId result:(FlutterResult)result;
-(void)selectAudioOutput:(NSString *)deviceId
result:(FlutterResult) result;
- (void)selectAudioOutput:(NSString*)deviceId result:(FlutterResult)result;
@end

1132
common/darwin/Classes/FlutterRTCMediaStream.m Executable file → Normal file

File diff suppressed because it is too large Load Diff

44
common/darwin/Classes/FlutterRTCPeerConnection.h Executable file → Normal file
View File

@ -1,46 +1,48 @@
#import "FlutterWebRTCPlugin.h"
@interface RTCPeerConnection (Flutter) <FlutterStreamHandler>
@property (nonatomic, strong, nonnull) NSMutableDictionary<NSString *, RTCDataChannel *> *dataChannels;
@property (nonatomic, strong, nonnull) NSMutableDictionary<NSString *, RTCMediaStream *> *remoteStreams;
@property (nonatomic, strong, nonnull) NSMutableDictionary<NSString *, RTCMediaStreamTrack *> *remoteTracks;
@property (nonatomic, strong, nonnull) NSString *flutterId;
@property (nonatomic, strong, nullable) FlutterEventSink eventSink;
@property (nonatomic, strong, nullable) FlutterEventChannel* eventChannel;
@property(nonatomic, strong, nonnull) NSMutableDictionary<NSString*, RTCDataChannel*>* dataChannels;
@property(nonatomic, strong, nonnull)
NSMutableDictionary<NSString*, RTCMediaStream*>* remoteStreams;
@property(nonatomic, strong, nonnull)
NSMutableDictionary<NSString*, RTCMediaStreamTrack*>* remoteTracks;
@property(nonatomic, strong, nonnull) NSString* flutterId;
@property(nonatomic, strong, nullable) FlutterEventSink eventSink;
@property(nonatomic, strong, nullable) FlutterEventChannel* eventChannel;
@end
@interface FlutterWebRTCPlugin (RTCPeerConnection)
-(void) peerConnectionCreateOffer:(nonnull NSDictionary *)constraints
- (void)peerConnectionCreateOffer:(nonnull NSDictionary*)constraints
peerConnection:(nonnull RTCPeerConnection*)peerConnection
result:(nonnull FlutterResult)result;
-(void) peerConnectionCreateAnswer:(nonnull NSDictionary *)constraints
peerConnection:(nonnull RTCPeerConnection *)peerConnection
- (void)peerConnectionCreateAnswer:(nonnull NSDictionary*)constraints
peerConnection:(nonnull RTCPeerConnection*)peerConnection
result:(nonnull FlutterResult)result;
-(void) peerConnectionSetLocalDescription:(nonnull RTCSessionDescription *)sdp
peerConnection:(nonnull RTCPeerConnection *)peerConnection
- (void)peerConnectionSetLocalDescription:(nonnull RTCSessionDescription*)sdp
peerConnection:(nonnull RTCPeerConnection*)peerConnection
result:(nonnull FlutterResult)result;
-(void) peerConnectionSetRemoteDescription:(nonnull RTCSessionDescription *)sdp
peerConnection:(nonnull RTCPeerConnection *)peerConnection
- (void)peerConnectionSetRemoteDescription:(nonnull RTCSessionDescription*)sdp
peerConnection:(nonnull RTCPeerConnection*)peerConnection
result:(nonnull FlutterResult)result;
-(void) peerConnectionAddICECandidate:(nonnull RTCIceCandidate*)candidate
peerConnection:(nonnull RTCPeerConnection *)peerConnection
- (void)peerConnectionAddICECandidate:(nonnull RTCIceCandidate*)candidate
peerConnection:(nonnull RTCPeerConnection*)peerConnection
result:(nonnull FlutterResult)result;
-(void) peerConnectionGetStats:(nonnull RTCPeerConnection *)peerConnection
- (void)peerConnectionGetStats:(nonnull RTCPeerConnection*)peerConnection
result:(nonnull FlutterResult)result;
-(void) peerConnectionGetStatsForTrackId:(nonnull NSString *)trackID
peerConnection:(nonnull RTCPeerConnection *)peerConnection
result:(nonnull FlutterResult)result;
- (void)peerConnectionGetStatsForTrackId:(nonnull NSString*)trackID
peerConnection:(nonnull RTCPeerConnection*)peerConnection
result:(nonnull FlutterResult)result;
-(nonnull RTCMediaConstraints *) parseMediaConstraints:(nonnull NSDictionary *)constraints;
- (nonnull RTCMediaConstraints*)parseMediaConstraints:(nonnull NSDictionary*)constraints;
-(void) peerConnectionSetConfiguration:(nonnull RTCConfiguration*)configuration
- (void)peerConnectionSetConfiguration:(nonnull RTCConfiguration*)configuration
peerConnection:(nonnull RTCPeerConnection*)peerConnection;
@end

1069
common/darwin/Classes/FlutterRTCPeerConnection.m Executable file → Normal file

File diff suppressed because it is too large Load Diff

20
common/darwin/Classes/FlutterRTCVideoRenderer.h Executable file → Normal file
View File

@ -1,19 +1,20 @@
#import "FlutterWebRTCPlugin.h"
#import <WebRTC/RTCVideoRenderer.h>
#import <WebRTC/RTCMediaStream.h>
#import <WebRTC/RTCVideoFrame.h>
#import <WebRTC/RTCVideoRenderer.h>
#import <WebRTC/RTCVideoTrack.h>
@interface FlutterRTCVideoRenderer : NSObject <FlutterTexture, RTCVideoRenderer, FlutterStreamHandler>
@interface FlutterRTCVideoRenderer
: NSObject <FlutterTexture, RTCVideoRenderer, FlutterStreamHandler>
/**
* The {@link RTCVideoTrack}, if any, which this instance renders.
*/
@property (nonatomic, strong) RTCVideoTrack *videoTrack;
@property (nonatomic) int64_t textureId;
@property (nonatomic, weak) id<FlutterTextureRegistry> registry;
@property (nonatomic, strong) FlutterEventSink eventSink;
@property(nonatomic, strong) RTCVideoTrack* videoTrack;
@property(nonatomic) int64_t textureId;
@property(nonatomic, weak) id<FlutterTextureRegistry> registry;
@property(nonatomic, strong) FlutterEventSink eventSink;
- (instancetype)initWithTextureRegistry:(id<FlutterTextureRegistry>)registry
messenger:(NSObject<FlutterBinaryMessenger>*)messenger;
@ -22,12 +23,11 @@
@end
@interface FlutterWebRTCPlugin (FlutterVideoRendererManager)
- (FlutterRTCVideoRenderer *)createWithTextureRegistry:(id<FlutterTextureRegistry>)registry
messenger:(NSObject<FlutterBinaryMessenger>*)messenger;
- (FlutterRTCVideoRenderer*)createWithTextureRegistry:(id<FlutterTextureRegistry>)registry
messenger:(NSObject<FlutterBinaryMessenger>*)messenger;
-(void)rendererSetSrcObject:(FlutterRTCVideoRenderer*)renderer stream:(RTCVideoTrack*)videoTrack;
- (void)rendererSetSrcObject:(FlutterRTCVideoRenderer*)renderer stream:(RTCVideoTrack*)videoTrack;
@end

410
common/darwin/Classes/FlutterRTCVideoRenderer.m Executable file → Normal file
View File

@ -2,231 +2,228 @@
#import <AVFoundation/AVFoundation.h>
#import <CoreGraphics/CGImage.h>
#import <WebRTC/WebRTC.h>
#import <WebRTC/RTCYUVPlanarBuffer.h>
#import <WebRTC/RTCYUVHelper.h>
#import <WebRTC/RTCYUVPlanarBuffer.h>
#import <WebRTC/WebRTC.h>
#import <objc/runtime.h>
#import "FlutterWebRTCPlugin.h"
@implementation FlutterRTCVideoRenderer {
CGSize _frameSize;
CGSize _renderSize;
CVPixelBufferRef _pixelBufferRef;
RTCVideoRotation _rotation;
FlutterEventChannel* _eventChannel;
bool _isFirstFrameRendered;
CGSize _frameSize;
CGSize _renderSize;
CVPixelBufferRef _pixelBufferRef;
RTCVideoRotation _rotation;
FlutterEventChannel* _eventChannel;
bool _isFirstFrameRendered;
}
@synthesize textureId = _textureId;
@synthesize textureId = _textureId;
@synthesize registry = _registry;
@synthesize eventSink = _eventSink;
- (instancetype)initWithTextureRegistry:(id<FlutterTextureRegistry>)registry
messenger:(NSObject<FlutterBinaryMessenger>*)messenger{
self = [super init];
if (self){
_isFirstFrameRendered = false;
_frameSize = CGSizeZero;
_renderSize = CGSizeZero;
_rotation = -1;
_registry = registry;
_pixelBufferRef = nil;
_eventSink = nil;
_rotation = -1;
_textureId = [registry registerTexture:self];
/*Create Event Channel.*/
_eventChannel = [FlutterEventChannel
eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/Texture%lld", _textureId]
binaryMessenger:messenger];
[_eventChannel setStreamHandler:self];
}
return self;
messenger:(NSObject<FlutterBinaryMessenger>*)messenger {
self = [super init];
if (self) {
_isFirstFrameRendered = false;
_frameSize = CGSizeZero;
_renderSize = CGSizeZero;
_rotation = -1;
_registry = registry;
_pixelBufferRef = nil;
_eventSink = nil;
_rotation = -1;
_textureId = [registry registerTexture:self];
/*Create Event Channel.*/
_eventChannel = [FlutterEventChannel
eventChannelWithName:[NSString stringWithFormat:@"FlutterWebRTC/Texture%lld", _textureId]
binaryMessenger:messenger];
[_eventChannel setStreamHandler:self];
}
return self;
}
-(void)dealloc {
if(_pixelBufferRef){
CVBufferRelease(_pixelBufferRef);
}
- (void)dealloc {
if (_pixelBufferRef) {
CVBufferRelease(_pixelBufferRef);
}
}
- (CVPixelBufferRef)copyPixelBuffer {
if(_pixelBufferRef != nil){
CVBufferRetain(_pixelBufferRef);
return _pixelBufferRef;
}
return nil;
if (_pixelBufferRef != nil) {
CVBufferRetain(_pixelBufferRef);
return _pixelBufferRef;
}
return nil;
}
-(void)dispose{
[_registry unregisterTexture:_textureId];
- (void)dispose {
[_registry unregisterTexture:_textureId];
}
- (void)setVideoTrack:(RTCVideoTrack *)videoTrack {
RTCVideoTrack *oldValue = self.videoTrack;
if (oldValue != videoTrack) {
_isFirstFrameRendered = false;
if (oldValue) {
[oldValue removeRenderer:self];
}
_videoTrack = videoTrack;
_frameSize = CGSizeZero;
_renderSize = CGSizeZero;
_rotation = -1;
if (videoTrack) {
[videoTrack addRenderer:self];
}
- (void)setVideoTrack:(RTCVideoTrack*)videoTrack {
RTCVideoTrack* oldValue = self.videoTrack;
if (oldValue != videoTrack) {
_isFirstFrameRendered = false;
if (oldValue) {
[oldValue removeRenderer:self];
}
_videoTrack = videoTrack;
_frameSize = CGSizeZero;
_renderSize = CGSizeZero;
_rotation = -1;
if (videoTrack) {
[videoTrack addRenderer:self];
}
}
}
- (id<RTCI420Buffer>)correctRotation:(const id<RTCI420Buffer>)src
withRotation:(RTCVideoRotation)rotation {
int rotated_width = src.width;
int rotated_height = src.height;
-(id<RTCI420Buffer>) correctRotation:(const id<RTCI420Buffer>) src
withRotation:(RTCVideoRotation) rotation
{
int rotated_width = src.width;
int rotated_height = src.height;
if (rotation == RTCVideoRotation_90 || rotation == RTCVideoRotation_270) {
int temp = rotated_width;
rotated_width = rotated_height;
rotated_height = temp;
}
if (rotation == RTCVideoRotation_90 ||
rotation == RTCVideoRotation_270) {
int temp = rotated_width;
rotated_width = rotated_height;
rotated_height = temp;
}
id<RTCI420Buffer> buffer = [[RTCI420Buffer alloc] initWithWidth:rotated_width height:rotated_height];
[RTCYUVHelper I420Rotate:src.dataY
srcStrideY:src.strideY
srcU:src.dataU
srcStrideU:src.strideU
srcV:src.dataV
srcStrideV:src.strideV
dstY:(uint8_t*)buffer.dataY
dstStrideY:buffer.strideY
dstU:(uint8_t*)buffer.dataU
dstStrideU:buffer.strideU
dstV:(uint8_t*)buffer.dataV
dstStrideV:buffer.strideV
width:src.width
width:src.height
mode:rotation];
return buffer;
id<RTCI420Buffer> buffer = [[RTCI420Buffer alloc] initWithWidth:rotated_width
height:rotated_height];
[RTCYUVHelper I420Rotate:src.dataY
srcStrideY:src.strideY
srcU:src.dataU
srcStrideU:src.strideU
srcV:src.dataV
srcStrideV:src.strideV
dstY:(uint8_t*)buffer.dataY
dstStrideY:buffer.strideY
dstU:(uint8_t*)buffer.dataU
dstStrideU:buffer.strideU
dstV:(uint8_t*)buffer.dataV
dstStrideV:buffer.strideV
width:src.width
width:src.height
mode:rotation];
return buffer;
}
-(void)copyI420ToCVPixelBuffer:(CVPixelBufferRef)outputPixelBuffer withFrame:(RTCVideoFrame *) frame
{
id<RTCI420Buffer> i420Buffer = [self correctRotation:[frame.buffer toI420] withRotation:frame.rotation];
CVPixelBufferLockBaseAddress(outputPixelBuffer, 0);
- (void)copyI420ToCVPixelBuffer:(CVPixelBufferRef)outputPixelBuffer
withFrame:(RTCVideoFrame*)frame {
id<RTCI420Buffer> i420Buffer = [self correctRotation:[frame.buffer toI420]
withRotation:frame.rotation];
CVPixelBufferLockBaseAddress(outputPixelBuffer, 0);
const OSType pixelFormat = CVPixelBufferGetPixelFormatType(outputPixelBuffer);
if (pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange ||
pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) {
// NV12
uint8_t* dstY = CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 0);
const size_t dstYStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 0);
uint8_t* dstUV = CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 1);
const size_t dstUVStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 1);
[RTCYUVHelper I420ToNV12:i420Buffer.dataY
srcStrideY:i420Buffer.strideY
srcU:i420Buffer.dataU
srcStrideU:i420Buffer.strideU
srcV:i420Buffer.dataV
srcStrideV:i420Buffer.strideV
dstY:dstY
dstStrideY:(int)dstYStride
dstUV:dstUV
dstStrideUV:(int)dstUVStride
width:i420Buffer.width
width:i420Buffer.height];
const OSType pixelFormat = CVPixelBufferGetPixelFormatType(outputPixelBuffer);
if (pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange ||
pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) {
// NV12
uint8_t* dstY = CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 0);
const size_t dstYStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 0);
uint8_t* dstUV = CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 1);
const size_t dstUVStride = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 1);
} else {
uint8_t* dst = CVPixelBufferGetBaseAddress(outputPixelBuffer);
const size_t bytesPerRow = CVPixelBufferGetBytesPerRow(outputPixelBuffer);
if (pixelFormat == kCVPixelFormatType_32BGRA) {
// Corresponds to libyuv::FOURCC_ARGB
[RTCYUVHelper I420ToARGB:i420Buffer.dataY
srcStrideY:i420Buffer.strideY
srcU:i420Buffer.dataU
srcStrideU:i420Buffer.strideU
srcV:i420Buffer.dataV
srcStrideV:i420Buffer.strideV
dstARGB:dst
dstStrideARGB:(int)bytesPerRow
width:i420Buffer.width
height:i420Buffer.height];
[RTCYUVHelper I420ToNV12:i420Buffer.dataY
srcStrideY:i420Buffer.strideY
srcU:i420Buffer.dataU
srcStrideU:i420Buffer.strideU
srcV:i420Buffer.dataV
srcStrideV:i420Buffer.strideV
dstY:dstY
dstStrideY:(int)dstYStride
dstUV:dstUV
dstStrideUV:(int)dstUVStride
width:i420Buffer.width
width:i420Buffer.height];
} else if (pixelFormat == kCVPixelFormatType_32ARGB) {
// Corresponds to libyuv::FOURCC_BGRA
[RTCYUVHelper I420ToBGRA:i420Buffer.dataY
srcStrideY:i420Buffer.strideY
srcU:i420Buffer.dataU
srcStrideU:i420Buffer.strideU
srcV:i420Buffer.dataV
srcStrideV:i420Buffer.strideV
dstBGRA:dst
dstStrideBGRA:(int)bytesPerRow
width:i420Buffer.width
height:i420Buffer.height];
}
} else {
uint8_t* dst = CVPixelBufferGetBaseAddress(outputPixelBuffer);
const size_t bytesPerRow = CVPixelBufferGetBytesPerRow(outputPixelBuffer);
if (pixelFormat == kCVPixelFormatType_32BGRA) {
// Corresponds to libyuv::FOURCC_ARGB
[RTCYUVHelper I420ToARGB:i420Buffer.dataY
srcStrideY:i420Buffer.strideY
srcU:i420Buffer.dataU
srcStrideU:i420Buffer.strideU
srcV:i420Buffer.dataV
srcStrideV:i420Buffer.strideV
dstARGB:dst
dstStrideARGB:(int)bytesPerRow
width:i420Buffer.width
height:i420Buffer.height];
} else if (pixelFormat == kCVPixelFormatType_32ARGB) {
// Corresponds to libyuv::FOURCC_BGRA
[RTCYUVHelper I420ToBGRA:i420Buffer.dataY
srcStrideY:i420Buffer.strideY
srcU:i420Buffer.dataU
srcStrideU:i420Buffer.strideU
srcV:i420Buffer.dataV
srcStrideV:i420Buffer.strideV
dstBGRA:dst
dstStrideBGRA:(int)bytesPerRow
width:i420Buffer.width
height:i420Buffer.height];
}
CVPixelBufferUnlockBaseAddress(outputPixelBuffer, 0);
}
CVPixelBufferUnlockBaseAddress(outputPixelBuffer, 0);
}
#pragma mark - RTCVideoRenderer methods
- (void)renderFrame:(RTCVideoFrame *)frame {
- (void)renderFrame:(RTCVideoFrame*)frame {
[self copyI420ToCVPixelBuffer:_pixelBufferRef withFrame:frame];
[self copyI420ToCVPixelBuffer:_pixelBufferRef withFrame:frame];
__weak FlutterRTCVideoRenderer *weakSelf = self;
if(_renderSize.width != frame.width || _renderSize.height != frame.height){
dispatch_async(dispatch_get_main_queue(), ^{
FlutterRTCVideoRenderer *strongSelf = weakSelf;
if(strongSelf.eventSink){
strongSelf.eventSink(@{
@"event" : @"didTextureChangeVideoSize",
@"id": @(strongSelf.textureId),
@"width": @(frame.width),
@"height": @(frame.height),
});
}
});
_renderSize = CGSizeMake(frame.width, frame.height);
}
if(frame.rotation != _rotation){
dispatch_async(dispatch_get_main_queue(), ^{
FlutterRTCVideoRenderer *strongSelf = weakSelf;
if(strongSelf.eventSink){
strongSelf.eventSink(@{
@"event" : @"didTextureChangeRotation",
@"id": @(strongSelf.textureId),
@"rotation": @(frame.rotation),
});
}
});
_rotation = frame.rotation;
}
//Notify the Flutter new pixelBufferRef to be ready.
__weak FlutterRTCVideoRenderer* weakSelf = self;
if (_renderSize.width != frame.width || _renderSize.height != frame.height) {
dispatch_async(dispatch_get_main_queue(), ^{
FlutterRTCVideoRenderer *strongSelf = weakSelf;
[strongSelf.registry textureFrameAvailable:strongSelf.textureId];
if (!strongSelf->_isFirstFrameRendered) {
if (strongSelf.eventSink) {
strongSelf.eventSink(@{@"event":@"didFirstFrameRendered"});
strongSelf->_isFirstFrameRendered = true;
}
}
FlutterRTCVideoRenderer* strongSelf = weakSelf;
if (strongSelf.eventSink) {
strongSelf.eventSink(@{
@"event" : @"didTextureChangeVideoSize",
@"id" : @(strongSelf.textureId),
@"width" : @(frame.width),
@"height" : @(frame.height),
});
}
});
_renderSize = CGSizeMake(frame.width, frame.height);
}
if (frame.rotation != _rotation) {
dispatch_async(dispatch_get_main_queue(), ^{
FlutterRTCVideoRenderer* strongSelf = weakSelf;
if (strongSelf.eventSink) {
strongSelf.eventSink(@{
@"event" : @"didTextureChangeRotation",
@"id" : @(strongSelf.textureId),
@"rotation" : @(frame.rotation),
});
}
});
_rotation = frame.rotation;
}
// Notify the Flutter new pixelBufferRef to be ready.
dispatch_async(dispatch_get_main_queue(), ^{
FlutterRTCVideoRenderer* strongSelf = weakSelf;
[strongSelf.registry textureFrameAvailable:strongSelf.textureId];
if (!strongSelf->_isFirstFrameRendered) {
if (strongSelf.eventSink) {
strongSelf.eventSink(@{@"event" : @"didFirstFrameRendered"});
strongSelf->_isFirstFrameRendered = true;
}
}
});
}
/**
@ -235,44 +232,41 @@
* @param size The size of the video frame to render.
*/
- (void)setSize:(CGSize)size {
if(_pixelBufferRef == nil || (size.width != _frameSize.width || size.height != _frameSize.height))
{
if(_pixelBufferRef){
CVBufferRelease(_pixelBufferRef);
}
NSDictionary *pixelAttributes = @{(id)kCVPixelBufferIOSurfacePropertiesKey : @{}};
CVPixelBufferCreate(kCFAllocatorDefault,
size.width, size.height,
kCVPixelFormatType_32BGRA,
(__bridge CFDictionaryRef)(pixelAttributes), &_pixelBufferRef);
_frameSize = size;
if (_pixelBufferRef == nil ||
(size.width != _frameSize.width || size.height != _frameSize.height)) {
if (_pixelBufferRef) {
CVBufferRelease(_pixelBufferRef);
}
NSDictionary* pixelAttributes = @{(id)kCVPixelBufferIOSurfacePropertiesKey : @{}};
CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, kCVPixelFormatType_32BGRA,
(__bridge CFDictionaryRef)(pixelAttributes), &_pixelBufferRef);
_frameSize = size;
}
}
#pragma mark - FlutterStreamHandler methods
- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments {
_eventSink = nil;
return nil;
_eventSink = nil;
return nil;
}
- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments
eventSink:(nonnull FlutterEventSink)sink {
_eventSink = sink;
return nil;
_eventSink = sink;
return nil;
}
@end
@implementation FlutterWebRTCPlugin (FlutterVideoRendererManager)
- (FlutterRTCVideoRenderer *)createWithTextureRegistry:(id<FlutterTextureRegistry>)registry
messenger:(NSObject<FlutterBinaryMessenger>*)messenger{
return [[FlutterRTCVideoRenderer alloc] initWithTextureRegistry:registry messenger:messenger];
- (FlutterRTCVideoRenderer*)createWithTextureRegistry:(id<FlutterTextureRegistry>)registry
messenger:(NSObject<FlutterBinaryMessenger>*)messenger {
return [[FlutterRTCVideoRenderer alloc] initWithTextureRegistry:registry messenger:messenger];
}
-(void)rendererSetSrcObject:(FlutterRTCVideoRenderer*)renderer stream:(RTCVideoTrack*)videoTrack {
renderer.videoTrack = videoTrack;
- (void)rendererSetSrcObject:(FlutterRTCVideoRenderer*)renderer stream:(RTCVideoTrack*)videoTrack {
renderer.videoTrack = videoTrack;
}
@end

View File

@ -14,41 +14,46 @@ typedef void (^CompletionHandler)(void);
typedef void (^CapturerStopHandler)(CompletionHandler handler);
@interface FlutterWebRTCPlugin : NSObject<FlutterPlugin, RTCPeerConnectionDelegate, FlutterStreamHandler
@interface FlutterWebRTCPlugin : NSObject <FlutterPlugin,
RTCPeerConnectionDelegate,
FlutterStreamHandler
#if TARGET_OS_OSX
, RTCDesktopMediaListDelegate, RTCDesktopCapturerDelegate
,
RTCDesktopMediaListDelegate,
RTCDesktopCapturerDelegate
#endif
>
>
@property (nonatomic, strong) RTCPeerConnectionFactory *peerConnectionFactory;
@property (nonatomic, strong) NSMutableDictionary<NSString *, RTCPeerConnection *> *peerConnections;
@property (nonatomic, strong) NSMutableDictionary<NSString *, RTCMediaStream *> *localStreams;
@property (nonatomic, strong) NSMutableDictionary<NSString *, RTCMediaStreamTrack *> *localTracks;
@property (nonatomic, strong) NSMutableDictionary<NSNumber *, FlutterRTCVideoRenderer *> *renders;
@property (nonatomic, strong) NSMutableDictionary<NSString *, CapturerStopHandler> *videoCapturerStopHandlers;
@property(nonatomic, strong) RTCPeerConnectionFactory* peerConnectionFactory;
@property(nonatomic, strong) NSMutableDictionary<NSString*, RTCPeerConnection*>* peerConnections;
@property(nonatomic, strong) NSMutableDictionary<NSString*, RTCMediaStream*>* localStreams;
@property(nonatomic, strong) NSMutableDictionary<NSString*, RTCMediaStreamTrack*>* localTracks;
@property(nonatomic, strong) NSMutableDictionary<NSNumber*, FlutterRTCVideoRenderer*>* renders;
@property(nonatomic, strong)
NSMutableDictionary<NSString*, CapturerStopHandler>* videoCapturerStopHandlers;
#if TARGET_OS_IPHONE
@property (nonatomic, retain) UIViewController *viewController;/*for broadcast or ReplayKit */
@property(nonatomic, retain) UIViewController* viewController; /*for broadcast or ReplayKit */
#endif
@property (nonatomic, strong) FlutterEventSink eventSink;
@property (nonatomic, strong) NSObject<FlutterBinaryMessenger>* messenger;
@property (nonatomic, strong) RTCCameraVideoCapturer *videoCapturer;
@property (nonatomic, strong) FlutterRTCFrameCapturer *frameCapturer;
@property (nonatomic, strong) AVAudioSessionPort preferredInput;
@property (nonatomic) BOOL _usingFrontCamera;
@property (nonatomic) int _targetWidth;
@property (nonatomic) int _targetHeight;
@property (nonatomic) int _targetFps;
@property(nonatomic, strong) FlutterEventSink eventSink;
@property(nonatomic, strong) NSObject<FlutterBinaryMessenger>* messenger;
@property(nonatomic, strong) RTCCameraVideoCapturer* videoCapturer;
@property(nonatomic, strong) FlutterRTCFrameCapturer* frameCapturer;
@property(nonatomic, strong) AVAudioSessionPort preferredInput;
@property(nonatomic) BOOL _usingFrontCamera;
@property(nonatomic) int _targetWidth;
@property(nonatomic) int _targetHeight;
@property(nonatomic) int _targetFps;
- (RTCMediaStream*)streamForId:(NSString*)streamId peerConnectionId:(NSString *)peerConnectionId;
- (NSDictionary*)mediaStreamToMap:(RTCMediaStream *)stream ownerTag:(NSString*)ownerTag;
- (RTCMediaStream*)streamForId:(NSString*)streamId peerConnectionId:(NSString*)peerConnectionId;
- (NSDictionary*)mediaStreamToMap:(RTCMediaStream*)stream ownerTag:(NSString*)ownerTag;
- (NSDictionary*)mediaTrackToMap:(RTCMediaStreamTrack*)track;
- (NSDictionary*)receiverToMap:(RTCRtpReceiver*)receiver;
- (NSDictionary*)transceiverToMap:(RTCRtpTransceiver*)transceiver;
- (BOOL) hasLocalAudioTrack;
- (void) ensureAudioSession;
- (void) deactiveRtcAudioSession;
- (BOOL)hasLocalAudioTrack;
- (void)ensureAudioSession;
- (void)deactiveRtcAudioSession;
@end

File diff suppressed because it is too large Load Diff

3
format.sh Executable file
View File

@ -0,0 +1,3 @@
#!/bin/sh
find . -type f -name "*.cc" -o -type f -name "*.h" -o -type f -name "*.m" -o -type f -name "*.mm" | xargs clang-format -style=file -i

View File

@ -6,8 +6,8 @@
//
#import "FlutterBroadcastScreenCapturer.h"
#import "FlutterSocketConnectionFrameReader.h"
#import "FlutterSocketConnection.h"
#import "FlutterSocketConnectionFrameReader.h"
NSString* const kRTCScreensharingSocketFD = @"rtc_SSFD";
NSString* const kRTCAppGroupIdentifier = @"RTCAppGroupIdentifier";
@ -15,52 +15,55 @@ NSString* const kRTCScreenSharingExtension = @"RTCScreenSharingExtension";
@interface FlutterBroadcastScreenCapturer ()
@property (nonatomic, retain) FlutterSocketConnectionFrameReader *capturer;
@property(nonatomic, retain) FlutterSocketConnectionFrameReader* capturer;
@end
@interface FlutterBroadcastScreenCapturer (Private)
@property (nonatomic, readonly) NSString *appGroupIdentifier;
@property(nonatomic, readonly) NSString* appGroupIdentifier;
@end
@implementation FlutterBroadcastScreenCapturer
- (void)startCapture {
if (!self.appGroupIdentifier) {
return;
}
NSString *socketFilePath = [self filePathForApplicationGroupIdentifier:self.appGroupIdentifier];
FlutterSocketConnectionFrameReader *frameReader = [[FlutterSocketConnectionFrameReader alloc] initWithDelegate:self.delegate];
FlutterSocketConnection *connection = [[FlutterSocketConnection alloc] initWithFilePath:socketFilePath];
self.capturer = frameReader;
[self.capturer startCaptureWithConnection:connection];
if (!self.appGroupIdentifier) {
return;
}
NSString* socketFilePath = [self filePathForApplicationGroupIdentifier:self.appGroupIdentifier];
FlutterSocketConnectionFrameReader* frameReader =
[[FlutterSocketConnectionFrameReader alloc] initWithDelegate:self.delegate];
FlutterSocketConnection* connection =
[[FlutterSocketConnection alloc] initWithFilePath:socketFilePath];
self.capturer = frameReader;
[self.capturer startCaptureWithConnection:connection];
}
- (void)stopCapture {
[self.capturer stopCapture];
[self.capturer stopCapture];
}
- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler
{
[self stopCapture];
if(completionHandler != nil) {
completionHandler();
}
- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler {
[self stopCapture];
if (completionHandler != nil) {
completionHandler();
}
}
// MARK: Private Methods
- (NSString *)appGroupIdentifier {
NSDictionary *infoDictionary = [[NSBundle mainBundle] infoDictionary];
return infoDictionary[kRTCAppGroupIdentifier];
- (NSString*)appGroupIdentifier {
NSDictionary* infoDictionary = [[NSBundle mainBundle] infoDictionary];
return infoDictionary[kRTCAppGroupIdentifier];
}
- (NSString *)filePathForApplicationGroupIdentifier:(nonnull NSString *)identifier {
NSURL *sharedContainer = [[NSFileManager defaultManager] containerURLForSecurityApplicationGroupIdentifier:identifier];
NSString *socketFilePath = [[sharedContainer URLByAppendingPathComponent:kRTCScreensharingSocketFD] path];
return socketFilePath;
- (NSString*)filePathForApplicationGroupIdentifier:(nonnull NSString*)identifier {
NSURL* sharedContainer =
[[NSFileManager defaultManager] containerURLForSecurityApplicationGroupIdentifier:identifier];
NSString* socketFilePath =
[[sharedContainer URLByAppendingPathComponent:kRTCScreensharingSocketFD] path];
return socketFilePath;
}
@end

View File

@ -11,8 +11,8 @@ NS_ASSUME_NONNULL_BEGIN
@interface FlutterSocketConnection : NSObject
- (instancetype)initWithFilePath:(nonnull NSString *)filePath;
- (void)openWithStreamDelegate:(id <NSStreamDelegate>)streamDelegate;
- (instancetype)initWithFilePath:(nonnull NSString*)filePath;
- (void)openWithStreamDelegate:(id<NSStreamDelegate>)streamDelegate;
- (void)close;
@end

View File

@ -12,137 +12,146 @@
@interface FlutterSocketConnection ()
@property (nonatomic, assign) int serverSocket;
@property (nonatomic, strong) dispatch_source_t listeningSource;
@property(nonatomic, assign) int serverSocket;
@property(nonatomic, strong) dispatch_source_t listeningSource;
@property (nonatomic, strong) NSThread *networkThread;
@property(nonatomic, strong) NSThread* networkThread;
@property (nonatomic, strong) NSInputStream *inputStream;
@property (nonatomic, strong) NSOutputStream *outputStream;
@property(nonatomic, strong) NSInputStream* inputStream;
@property(nonatomic, strong) NSOutputStream* outputStream;
@end
@implementation FlutterSocketConnection
- (instancetype)initWithFilePath:(nonnull NSString *)filePath {
self = [super init];
[self setupNetworkThread];
self.serverSocket = socket(AF_UNIX, SOCK_STREAM, 0);
if (self.serverSocket < 0) {
NSLog(@"failure creating socket");
return nil;
}
if (![self setupSocketWithFileAtPath: filePath]) {
close(self.serverSocket);
return nil;
}
return self;
- (instancetype)initWithFilePath:(nonnull NSString*)filePath {
self = [super init];
[self setupNetworkThread];
self.serverSocket = socket(AF_UNIX, SOCK_STREAM, 0);
if (self.serverSocket < 0) {
NSLog(@"failure creating socket");
return nil;
}
if (![self setupSocketWithFileAtPath:filePath]) {
close(self.serverSocket);
return nil;
}
return self;
}
- (void)openWithStreamDelegate:(id <NSStreamDelegate>)streamDelegate {
int status = listen(self.serverSocket, 10);
if (status < 0) {
NSLog(@"failure: socket listening");
return;
}
dispatch_source_t listeningSource = dispatch_source_create(DISPATCH_SOURCE_TYPE_READ, self.serverSocket, 0, NULL);
dispatch_source_set_event_handler(listeningSource, ^ {
int clientSocket = accept(self.serverSocket, NULL, NULL);
if (clientSocket < 0) {
NSLog(@"failure accepting connection");
return;
}
CFReadStreamRef readStream;
CFWriteStreamRef writeStream;
CFStreamCreatePairWithSocket(kCFAllocatorDefault, clientSocket, &readStream, &writeStream);
- (void)openWithStreamDelegate:(id<NSStreamDelegate>)streamDelegate {
int status = listen(self.serverSocket, 10);
if (status < 0) {
NSLog(@"failure: socket listening");
return;
}
self.inputStream = (__bridge_transfer NSInputStream *)readStream;
self.inputStream.delegate = streamDelegate;
[self.inputStream setProperty:@"kCFBooleanTrue" forKey:@"kCFStreamPropertyShouldCloseNativeSocket"];
self.outputStream = (__bridge_transfer NSOutputStream *)writeStream;
[self.outputStream setProperty:@"kCFBooleanTrue" forKey:@"kCFStreamPropertyShouldCloseNativeSocket"];
[self.networkThread start];
[self performSelector:@selector(scheduleStreams) onThread:self.networkThread withObject:nil waitUntilDone:true];
[self.inputStream open];
[self.outputStream open];
});
self.listeningSource = listeningSource;
dispatch_resume(listeningSource);
dispatch_source_t listeningSource =
dispatch_source_create(DISPATCH_SOURCE_TYPE_READ, self.serverSocket, 0, NULL);
dispatch_source_set_event_handler(listeningSource, ^{
int clientSocket = accept(self.serverSocket, NULL, NULL);
if (clientSocket < 0) {
NSLog(@"failure accepting connection");
return;
}
CFReadStreamRef readStream;
CFWriteStreamRef writeStream;
CFStreamCreatePairWithSocket(kCFAllocatorDefault, clientSocket, &readStream, &writeStream);
self.inputStream = (__bridge_transfer NSInputStream*)readStream;
self.inputStream.delegate = streamDelegate;
[self.inputStream setProperty:@"kCFBooleanTrue"
forKey:@"kCFStreamPropertyShouldCloseNativeSocket"];
self.outputStream = (__bridge_transfer NSOutputStream*)writeStream;
[self.outputStream setProperty:@"kCFBooleanTrue"
forKey:@"kCFStreamPropertyShouldCloseNativeSocket"];
[self.networkThread start];
[self performSelector:@selector(scheduleStreams)
onThread:self.networkThread
withObject:nil
waitUntilDone:true];
[self.inputStream open];
[self.outputStream open];
});
self.listeningSource = listeningSource;
dispatch_resume(listeningSource);
}
- (void)close {
if (![self.networkThread isExecuting]){
return;
}
[self performSelector:@selector(unscheduleStreams) onThread:self.networkThread withObject:nil waitUntilDone:true];
if (![self.networkThread isExecuting]) {
return;
}
self.inputStream.delegate = nil;
self.outputStream.delegate = nil;
[self performSelector:@selector(unscheduleStreams)
onThread:self.networkThread
withObject:nil
waitUntilDone:true];
[self.inputStream close];
[self.outputStream close];
[self.networkThread cancel];
dispatch_source_cancel(self.listeningSource);
close(self.serverSocket);
self.inputStream.delegate = nil;
self.outputStream.delegate = nil;
[self.inputStream close];
[self.outputStream close];
[self.networkThread cancel];
dispatch_source_cancel(self.listeningSource);
close(self.serverSocket);
}
// MARK: - Private Methods
- (void)setupNetworkThread {
self.networkThread = [[NSThread alloc] initWithBlock:^{
do {
@autoreleasepool {
[[NSRunLoop currentRunLoop] run];
}
} while (![NSThread currentThread].isCancelled);
}];
self.networkThread.qualityOfService = NSQualityOfServiceUserInitiated;
self.networkThread = [[NSThread alloc] initWithBlock:^{
do {
@autoreleasepool {
[[NSRunLoop currentRunLoop] run];
}
} while (![NSThread currentThread].isCancelled);
}];
self.networkThread.qualityOfService = NSQualityOfServiceUserInitiated;
}
- (BOOL)setupSocketWithFileAtPath:(NSString *)filePath {
struct sockaddr_un addr;
memset(&addr, 0, sizeof(addr));
addr.sun_family = AF_UNIX;
- (BOOL)setupSocketWithFileAtPath:(NSString*)filePath {
struct sockaddr_un addr;
memset(&addr, 0, sizeof(addr));
addr.sun_family = AF_UNIX;
if (filePath.length > sizeof(addr.sun_path)) {
NSLog(@"failure: path too long");
return false;
}
if (filePath.length > sizeof(addr.sun_path)) {
NSLog(@"failure: path too long");
return false;
}
unlink(filePath.UTF8String);
strncpy(addr.sun_path, filePath.UTF8String, sizeof(addr.sun_path) - 1);
int status = bind(self.serverSocket, (struct sockaddr *)&addr, sizeof(addr));
if (status < 0) {
NSLog(@"failure: socket binding");
return false;
}
return true;
unlink(filePath.UTF8String);
strncpy(addr.sun_path, filePath.UTF8String, sizeof(addr.sun_path) - 1);
int status = bind(self.serverSocket, (struct sockaddr*)&addr, sizeof(addr));
if (status < 0) {
NSLog(@"failure: socket binding");
return false;
}
return true;
}
- (void)scheduleStreams {
[self.inputStream scheduleInRunLoop:NSRunLoop.currentRunLoop forMode:NSRunLoopCommonModes];
[self.outputStream scheduleInRunLoop:NSRunLoop.currentRunLoop forMode:NSRunLoopCommonModes];
[self.inputStream scheduleInRunLoop:NSRunLoop.currentRunLoop forMode:NSRunLoopCommonModes];
[self.outputStream scheduleInRunLoop:NSRunLoop.currentRunLoop forMode:NSRunLoopCommonModes];
}
- (void)unscheduleStreams {
[self.inputStream removeFromRunLoop:NSRunLoop.currentRunLoop forMode:NSRunLoopCommonModes];
[self.outputStream removeFromRunLoop:NSRunLoop.currentRunLoop forMode:NSRunLoopCommonModes];
[self.inputStream removeFromRunLoop:NSRunLoop.currentRunLoop forMode:NSRunLoopCommonModes];
[self.outputStream removeFromRunLoop:NSRunLoop.currentRunLoop forMode:NSRunLoopCommonModes];
}
@end

View File

@ -12,10 +12,10 @@ NS_ASSUME_NONNULL_BEGIN
@class FlutterSocketConnection;
@interface FlutterSocketConnectionFrameReader: RTCVideoCapturer
@interface FlutterSocketConnectionFrameReader : RTCVideoCapturer
- (instancetype)initWithDelegate:(__weak id<RTCVideoCapturerDelegate>)delegate;
- (void)startCaptureWithConnection:(nonnull FlutterSocketConnection *)connection;
- (void)startCaptureWithConnection:(nonnull FlutterSocketConnection*)connection;
- (void)stopCapture;
@end

View File

@ -7,112 +7,119 @@
#include <mach/mach_time.h>
#import <ReplayKit/ReplayKit.h>
#import <WebRTC/RTCCVPixelBuffer.h>
#import <WebRTC/RTCVideoFrameBuffer.h>
#import <ReplayKit/ReplayKit.h>
#import "FlutterSocketConnectionFrameReader.h"
#import "FlutterSocketConnection.h"
#import "FlutterSocketConnectionFrameReader.h"
const NSUInteger kMaxReadLength = 10 * 1024;
@interface Message: NSObject
@interface Message : NSObject
@property (nonatomic, assign, readonly) CVImageBufferRef imageBuffer;
@property (nonatomic, copy, nullable) void (^didComplete)(BOOL succes, Message *message);
@property(nonatomic, assign, readonly) CVImageBufferRef imageBuffer;
@property(nonatomic, copy, nullable) void (^didComplete)(BOOL succes, Message* message);
- (NSInteger)appendBytes: (UInt8 *)buffer length:(NSUInteger)length;
- (NSInteger)appendBytes:(UInt8*)buffer length:(NSUInteger)length;
@end
@interface Message ()
@property (nonatomic, assign) CVImageBufferRef imageBuffer;
@property (nonatomic, assign) int imageOrientation;
@property (nonatomic, assign) CFHTTPMessageRef framedMessage;
@property(nonatomic, assign) CVImageBufferRef imageBuffer;
@property(nonatomic, assign) int imageOrientation;
@property(nonatomic, assign) CFHTTPMessageRef framedMessage;
@end
@implementation Message
- (instancetype)init {
self = [super init];
if (self) {
self.imageBuffer = NULL;
}
return self;
self = [super init];
if (self) {
self.imageBuffer = NULL;
}
return self;
}
- (void)dealloc {
CVPixelBufferRelease(_imageBuffer);
CVPixelBufferRelease(_imageBuffer);
}
/** Returns the amount of missing bytes to complete the message, or -1 when not enough bytes were provided to compute the message length */
- (NSInteger)appendBytes: (UInt8 *)buffer length:(NSUInteger)length {
if (!_framedMessage) {
_framedMessage = CFHTTPMessageCreateEmpty(kCFAllocatorDefault, false);
}
CFHTTPMessageAppendBytes(_framedMessage, buffer, length);
if (!CFHTTPMessageIsHeaderComplete(_framedMessage)) {
return -1;
}
NSInteger contentLength = [CFBridgingRelease(CFHTTPMessageCopyHeaderFieldValue(_framedMessage, (__bridge CFStringRef)@"Content-Length")) integerValue];
NSInteger bodyLength = (NSInteger)[CFBridgingRelease(CFHTTPMessageCopyBody(_framedMessage)) length];
/** Returns the amount of missing bytes to complete the message, or -1 when not enough bytes were
* provided to compute the message length */
- (NSInteger)appendBytes:(UInt8*)buffer length:(NSUInteger)length {
if (!_framedMessage) {
_framedMessage = CFHTTPMessageCreateEmpty(kCFAllocatorDefault, false);
}
NSInteger missingBytesCount = contentLength - bodyLength;
if (missingBytesCount == 0) {
BOOL success = [self unwrapMessage:self.framedMessage];
self.didComplete(success, self);
CFRelease(self.framedMessage);
self.framedMessage = NULL;
}
return missingBytesCount;
CFHTTPMessageAppendBytes(_framedMessage, buffer, length);
if (!CFHTTPMessageIsHeaderComplete(_framedMessage)) {
return -1;
}
NSInteger contentLength = [CFBridgingRelease(CFHTTPMessageCopyHeaderFieldValue(
_framedMessage, (__bridge CFStringRef) @"Content-Length")) integerValue];
NSInteger bodyLength =
(NSInteger)[CFBridgingRelease(CFHTTPMessageCopyBody(_framedMessage)) length];
NSInteger missingBytesCount = contentLength - bodyLength;
if (missingBytesCount == 0) {
BOOL success = [self unwrapMessage:self.framedMessage];
self.didComplete(success, self);
CFRelease(self.framedMessage);
self.framedMessage = NULL;
}
return missingBytesCount;
}
// MARK: Private Methods
- (CIContext *)imageContext {
// Initializing a CIContext object is costly, so we use a singleton instead
static CIContext *imageContext = nil;
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
imageContext = [[CIContext alloc] initWithOptions:nil];
});
return imageContext;
- (CIContext*)imageContext {
// Initializing a CIContext object is costly, so we use a singleton instead
static CIContext* imageContext = nil;
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
imageContext = [[CIContext alloc] initWithOptions:nil];
});
return imageContext;
}
- (BOOL)unwrapMessage:(CFHTTPMessageRef)framedMessage {
size_t width = [CFBridgingRelease(CFHTTPMessageCopyHeaderFieldValue(_framedMessage, (__bridge CFStringRef)@"Buffer-Width")) integerValue];
size_t height = [CFBridgingRelease(CFHTTPMessageCopyHeaderFieldValue(_framedMessage, (__bridge CFStringRef)@"Buffer-Height")) integerValue];
_imageOrientation = [CFBridgingRelease(CFHTTPMessageCopyHeaderFieldValue(_framedMessage, (__bridge CFStringRef)@"Buffer-Orientation")) intValue];
NSData *messageData = CFBridgingRelease(CFHTTPMessageCopyBody(_framedMessage));
// Copy the pixel buffer
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, width, height, kCVPixelFormatType_32BGRA, NULL, &_imageBuffer);
if (status != kCVReturnSuccess) {
NSLog(@"CVPixelBufferCreate failed");
return false;
}
[self copyImageData:messageData toPixelBuffer:&_imageBuffer];
size_t width = [CFBridgingRelease(CFHTTPMessageCopyHeaderFieldValue(
_framedMessage, (__bridge CFStringRef) @"Buffer-Width")) integerValue];
size_t height = [CFBridgingRelease(CFHTTPMessageCopyHeaderFieldValue(
_framedMessage, (__bridge CFStringRef) @"Buffer-Height")) integerValue];
_imageOrientation = [CFBridgingRelease(CFHTTPMessageCopyHeaderFieldValue(
_framedMessage, (__bridge CFStringRef) @"Buffer-Orientation")) intValue];
return true;
NSData* messageData = CFBridgingRelease(CFHTTPMessageCopyBody(_framedMessage));
// Copy the pixel buffer
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, width, height,
kCVPixelFormatType_32BGRA, NULL, &_imageBuffer);
if (status != kCVReturnSuccess) {
NSLog(@"CVPixelBufferCreate failed");
return false;
}
[self copyImageData:messageData toPixelBuffer:&_imageBuffer];
return true;
}
- (void)copyImageData:(NSData *)data toPixelBuffer:(CVPixelBufferRef*)pixelBuffer {
CVPixelBufferLockBaseAddress(*pixelBuffer, 0);
CIImage *image = [CIImage imageWithData:data];
[self.imageContext render:image toCVPixelBuffer:*pixelBuffer];
- (void)copyImageData:(NSData*)data toPixelBuffer:(CVPixelBufferRef*)pixelBuffer {
CVPixelBufferLockBaseAddress(*pixelBuffer, 0);
CVPixelBufferUnlockBaseAddress(*pixelBuffer, 0);
CIImage* image = [CIImage imageWithData:data];
[self.imageContext render:image toCVPixelBuffer:*pixelBuffer];
CVPixelBufferUnlockBaseAddress(*pixelBuffer, 0);
}
@end
@ -121,131 +128,132 @@ const NSUInteger kMaxReadLength = 10 * 1024;
@interface FlutterSocketConnectionFrameReader () <NSStreamDelegate>
@property (nonatomic, strong) FlutterSocketConnection *connection;
@property (nonatomic, strong) Message *message;
@property(nonatomic, strong) FlutterSocketConnection* connection;
@property(nonatomic, strong) Message* message;
@end
@implementation FlutterSocketConnectionFrameReader {
mach_timebase_info_data_t _timebaseInfo;
NSInteger _readLength;
int64_t _startTimeStampNs;
mach_timebase_info_data_t _timebaseInfo;
NSInteger _readLength;
int64_t _startTimeStampNs;
}
- (instancetype)initWithDelegate:(__weak id<RTCVideoCapturerDelegate>)delegate {
self = [super initWithDelegate:delegate];
if (self) {
mach_timebase_info(&_timebaseInfo);
}
return self;
self = [super initWithDelegate:delegate];
if (self) {
mach_timebase_info(&_timebaseInfo);
}
return self;
}
- (void)startCaptureWithConnection:(FlutterSocketConnection *)connection {
_startTimeStampNs = -1;
self.connection = connection;
self.message = nil;
[self.connection openWithStreamDelegate:self];
- (void)startCaptureWithConnection:(FlutterSocketConnection*)connection {
_startTimeStampNs = -1;
self.connection = connection;
self.message = nil;
[self.connection openWithStreamDelegate:self];
}
- (void)stopCapture {
[self.connection close];
[self.connection close];
}
// MARK: Private Methods
- (void)readBytesFromStream:(NSInputStream *)stream {
if (!stream.hasBytesAvailable) {
return;
}
if (!self.message) {
self.message = [[Message alloc] init];
_readLength = kMaxReadLength;
__weak __typeof__(self) weakSelf = self;
self.message.didComplete = ^(BOOL success, Message *message) {
if (success) {
[weakSelf didCaptureVideoFrame:message.imageBuffer withOrientation:message.imageOrientation];
}
weakSelf.message = nil;
};
}
uint8_t buffer[_readLength];
NSInteger numberOfBytesRead = [stream read:buffer maxLength:_readLength];
if (numberOfBytesRead < 0) {
NSLog(@"error reading bytes from stream");
return;
}
_readLength = [self.message appendBytes:buffer length:numberOfBytesRead];
if (_readLength == -1 || _readLength > kMaxReadLength) {
_readLength = kMaxReadLength;
}
- (void)readBytesFromStream:(NSInputStream*)stream {
if (!stream.hasBytesAvailable) {
return;
}
if (!self.message) {
self.message = [[Message alloc] init];
_readLength = kMaxReadLength;
__weak __typeof__(self) weakSelf = self;
self.message.didComplete = ^(BOOL success, Message* message) {
if (success) {
[weakSelf didCaptureVideoFrame:message.imageBuffer
withOrientation:message.imageOrientation];
}
weakSelf.message = nil;
};
}
uint8_t buffer[_readLength];
NSInteger numberOfBytesRead = [stream read:buffer maxLength:_readLength];
if (numberOfBytesRead < 0) {
NSLog(@"error reading bytes from stream");
return;
}
_readLength = [self.message appendBytes:buffer length:numberOfBytesRead];
if (_readLength == -1 || _readLength > kMaxReadLength) {
_readLength = kMaxReadLength;
}
}
- (void)didCaptureVideoFrame:(CVPixelBufferRef)pixelBuffer
withOrientation:(CGImagePropertyOrientation) orientation {
int64_t currentTime = mach_absolute_time();
int64_t currentTimeStampNs = currentTime * _timebaseInfo.numer / _timebaseInfo.denom;
if (_startTimeStampNs < 0) {
_startTimeStampNs = currentTimeStampNs;
}
RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer: pixelBuffer];
int64_t frameTimeStampNs = currentTimeStampNs - _startTimeStampNs;
RTCVideoRotation rotation;
switch (orientation) {
case kCGImagePropertyOrientationLeft:
rotation = RTCVideoRotation_90;
break;
case kCGImagePropertyOrientationDown:
rotation = RTCVideoRotation_180;
break;
case kCGImagePropertyOrientationRight:
rotation = RTCVideoRotation_270;
break;
default:
rotation = RTCVideoRotation_0;
break;
}
withOrientation:(CGImagePropertyOrientation)orientation {
int64_t currentTime = mach_absolute_time();
int64_t currentTimeStampNs = currentTime * _timebaseInfo.numer / _timebaseInfo.denom;
RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithBuffer: rtcPixelBuffer
rotation: rotation
timeStampNs: frameTimeStampNs];
if (_startTimeStampNs < 0) {
_startTimeStampNs = currentTimeStampNs;
}
[self.delegate capturer:self didCaptureVideoFrame:videoFrame];
RTCCVPixelBuffer* rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer];
int64_t frameTimeStampNs = currentTimeStampNs - _startTimeStampNs;
RTCVideoRotation rotation;
switch (orientation) {
case kCGImagePropertyOrientationLeft:
rotation = RTCVideoRotation_90;
break;
case kCGImagePropertyOrientationDown:
rotation = RTCVideoRotation_180;
break;
case kCGImagePropertyOrientationRight:
rotation = RTCVideoRotation_270;
break;
default:
rotation = RTCVideoRotation_0;
break;
}
RTCVideoFrame* videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer
rotation:rotation
timeStampNs:frameTimeStampNs];
[self.delegate capturer:self didCaptureVideoFrame:videoFrame];
}
@end
@implementation FlutterSocketConnectionFrameReader (NSStreamDelegate)
- (void)stream:(NSStream *)aStream handleEvent:(NSStreamEvent)eventCode {
switch (eventCode) {
case NSStreamEventOpenCompleted:
NSLog(@"server stream open completed");
break;
case NSStreamEventHasBytesAvailable:
[self readBytesFromStream: (NSInputStream *)aStream];
break;
case NSStreamEventEndEncountered:
NSLog(@"server stream end encountered");
[self stopCapture];
break;
case NSStreamEventErrorOccurred:
NSLog(@"server stream error encountered: %@", aStream.streamError.localizedDescription);
break;
default:
break;
}
- (void)stream:(NSStream*)aStream handleEvent:(NSStreamEvent)eventCode {
switch (eventCode) {
case NSStreamEventOpenCompleted:
NSLog(@"server stream open completed");
break;
case NSStreamEventHasBytesAvailable:
[self readBytesFromStream:(NSInputStream*)aStream];
break;
case NSStreamEventEndEncountered:
NSLog(@"server stream end encountered");
[self stopCapture];
break;
case NSStreamEventErrorOccurred:
NSLog(@"server stream error encountered: %@", aStream.streamError.localizedDescription);
break;
default:
break;
}
}
@end

View File

@ -100,8 +100,9 @@ static void ForwardToHandler(FlBinaryMessenger* messenger,
g_autoptr(GBytes) response = g_bytes_new(reply, reply_size);
GError* error = nullptr;
if (!fl_binary_messenger_send_response(messenger, (FlBinaryMessengerResponseHandle *)handler, response,
&error)) {
if (!fl_binary_messenger_send_response(
messenger, (FlBinaryMessengerResponseHandle*)handler, response,
&error)) {
g_warning("Failed to send binary response: %s", error->message);
}
};

View File

@ -64,9 +64,9 @@ class CustomEncodableValue {
// Allow implicit conversion to std::any to allow direct use of any_cast.
// NOLINTNEXTLINE(google-explicit-constructor)
operator std::any&() { return value_; }
operator std::any &() { return value_; }
// NOLINTNEXTLINE(google-explicit-constructor)
operator const std::any&() const { return value_; }
operator const std::any &() const { return value_; }
#if defined(FLUTTER_ENABLE_RTTI) && FLUTTER_ENABLE_RTTI
// Passthrough to std::any's type().

View File

@ -28,7 +28,7 @@ class PluginRegistrar {
public:
// Creates a new PluginRegistrar. |core_registrar| and the messenger it
// provides must remain valid as long as this object exists.
explicit PluginRegistrar(FlPluginRegistrar *core_registrar);
explicit PluginRegistrar(FlPluginRegistrar* core_registrar);
virtual ~PluginRegistrar();
@ -54,7 +54,7 @@ class PluginRegistrar {
void AddPlugin(std::unique_ptr<Plugin> plugin);
protected:
FlPluginRegistrar *registrar() { return registrar_; }
FlPluginRegistrar* registrar() { return registrar_; }
// Destroys all owned plugins. Subclasses should call this at the beginning of
// their destructors to prevent the possibility of an owned plugin trying to
@ -63,7 +63,7 @@ class PluginRegistrar {
private:
// Handle for interacting with the C API's registrar.
FlPluginRegistrar *registrar_;
FlPluginRegistrar* registrar_;
std::unique_ptr<BinaryMessenger> messenger_;
@ -99,7 +99,7 @@ class PluginRegistrarManager {
// Calling this multiple times for the same registrar_ref with different
// template types results in undefined behavior.
template <class T>
T* GetRegistrar(FlPluginRegistrar *registrar_ref) {
T* GetRegistrar(FlPluginRegistrar* registrar_ref) {
auto insert_result =
registrars_.emplace(registrar_ref, std::make_unique<T>(registrar_ref));
auto& registrar_pair = *(insert_result.first);
@ -116,10 +116,10 @@ class PluginRegistrarManager {
private:
PluginRegistrarManager();
using WrapperMap = std::map<FlPluginRegistrar*,
std::unique_ptr<PluginRegistrar>>;
using WrapperMap =
std::map<FlPluginRegistrar*, std::unique_ptr<PluginRegistrar>>;
static void OnRegistrarDestroyed(FlPluginRegistrar *registrar);
static void OnRegistrarDestroyed(FlPluginRegistrar* registrar);
WrapperMap* registrars() { return &registrars_; }

View File

@ -48,12 +48,10 @@ class string {
LIB_PORTABLE_API string();
LIB_PORTABLE_API void init(const char* str, size_t len);
LIB_PORTABLE_API void destroy();
inline string(const char* str) { init(str, strlen(str)); }
inline string(const std::string& str) {
init(str.c_str(), str.length());
}
inline string(const std::string& str) { init(str.c_str(), str.length()); }
inline string(const string& o) {
init(o.m_dynamic == 0 ? o.m_buf : o.m_dynamic, o.m_length);
@ -73,9 +71,7 @@ class string {
return *this;
}
inline size_t size() {
return m_length;
}
inline size_t size() { return m_length; }
inline const char* c_string() const {
return m_dynamic == 0 ? m_buf : m_dynamic;
@ -86,7 +82,7 @@ class string {
}
};
inline std::string to_std_string(const string& str) {
inline std::string to_std_string(const string& str) {
return str.std_string();
}
@ -106,7 +102,8 @@ class vector {
public:
class move_ref {
friend class vector;
friend class vector;
private:
vector<T>& m_ref;
move_ref(vector<T>& ref) : m_ref(ref) {}
@ -155,10 +152,7 @@ class vector {
}
}
~vector() {
destroy_all();
}
~vector() { destroy_all(); }
vector<T>& operator=(const vector<T>& o) {
if (m_size < o.m_size) {
@ -207,7 +201,7 @@ class vector {
T& operator[](size_t i) { return m_array[i]; }
const T& operator[](size_t i) const { return m_array[i]; }
void clear() { destroy_all(); }
protected:

View File

@ -8,6 +8,7 @@ namespace libwebrtc {
class RTCAudioDevice : public RefCountInterface {
public:
typedef fixed_size_function<void()> OnDeviceChangeCallback;
public:
static const int kAdmMaxDeviceNameSize = 128;
static const int kAdmMaxFileNameSize = 512;

View File

@ -34,7 +34,9 @@ class RTCDataChannelObserver {
class RTCDataChannel : public RefCountInterface {
public:
virtual void Send(const uint8_t* data, uint32_t size, bool binary = false) = 0;
virtual void Send(const uint8_t* data,
uint32_t size,
bool binary = false) = 0;
virtual void Close() = 0;

View File

@ -1,17 +1,18 @@
#ifndef LIB_WEBRTC_RTC_DESKTOP_CAPTURER_HXX
#define LIB_WEBRTC_RTC_DESKTOP_CAPTURER_HXX
#include "rtc_desktop_media_list.h"
#include "rtc_types.h"
#include "rtc_video_device.h"
#include "rtc_desktop_media_list.h"
namespace libwebrtc {
class DesktopCapturerObserver;
class RTCDesktopCapturer : public RefCountInterface {
public:
enum CaptureState { CS_RUNNING, CS_STOPPED, CS_FAILED};
public:
enum CaptureState { CS_RUNNING, CS_STOPPED, CS_FAILED };
public:
virtual void RegisterDesktopCapturerObserver(
DesktopCapturerObserver* observer) = 0;
@ -30,16 +31,16 @@ class RTCDesktopCapturer : public RefCountInterface {
};
class DesktopCapturerObserver {
public:
public:
virtual void OnStart(scoped_refptr<RTCDesktopCapturer> capturer) = 0;
virtual void OnPaused(scoped_refptr<RTCDesktopCapturer> capturer) = 0;
virtual void OnStop(scoped_refptr<RTCDesktopCapturer> capturer) = 0;
virtual void OnError(scoped_refptr<RTCDesktopCapturer> capturer) = 0;
protected:
~DesktopCapturerObserver() {}
protected:
~DesktopCapturerObserver() {}
};
} //namespace libwebrtc
} // namespace libwebrtc
#endif // LIB_WEBRTC_RTC_DESKTOP_CAPTURER_HXX
#endif // LIB_WEBRTC_RTC_DESKTOP_CAPTURER_HXX

View File

@ -11,12 +11,15 @@ class RTCDesktopMediaList;
class RTCDesktopDevice : public RefCountInterface {
public:
virtual scoped_refptr<RTCDesktopCapturer> CreateDesktopCapturer(scoped_refptr<MediaSource> source) = 0;
virtual scoped_refptr<RTCDesktopMediaList> GetDesktopMediaList(DesktopType type) = 0;
virtual scoped_refptr<RTCDesktopCapturer> CreateDesktopCapturer(
scoped_refptr<MediaSource> source) = 0;
virtual scoped_refptr<RTCDesktopMediaList> GetDesktopMediaList(
DesktopType type) = 0;
protected:
virtual ~RTCDesktopDevice() {}
};
} // namespace libwebrtc
} // namespace libwebrtc
#endif // LIB_WEBRTC_RTC_VIDEO_DEVICE_HXX

View File

@ -20,45 +20,47 @@ class MediaSource : public RefCountInterface {
virtual bool UpdateThumbnail() = 0;
protected:
protected:
virtual ~MediaSource() {}
};
class MediaListObserver {
public:
virtual void OnMediaSourceAdded(scoped_refptr<MediaSource> source) = 0;
virtual void OnMediaSourceRemoved(scoped_refptr<MediaSource> source) = 0;
public:
virtual void OnMediaSourceAdded(scoped_refptr<MediaSource> source) = 0;
virtual void OnMediaSourceNameChanged(scoped_refptr<MediaSource> source) = 0;
virtual void OnMediaSourceRemoved(scoped_refptr<MediaSource> source) = 0;
virtual void OnMediaSourceThumbnailChanged(scoped_refptr<MediaSource> source) = 0;
virtual void OnMediaSourceNameChanged(scoped_refptr<MediaSource> source) = 0;
protected:
virtual ~MediaListObserver() {}
virtual void OnMediaSourceThumbnailChanged(
scoped_refptr<MediaSource> source) = 0;
protected:
virtual ~MediaListObserver() {}
};
class RTCDesktopMediaList : public RefCountInterface {
public:
virtual void RegisterMediaListObserver(
MediaListObserver* observer) = 0;
virtual void RegisterMediaListObserver(MediaListObserver* observer) = 0;
virtual void DeRegisterMediaListObserver() = 0;
virtual DesktopType type() const = 0;
virtual int32_t UpdateSourceList(bool force_reload = false, bool get_thumbnail = true) = 0;
virtual int32_t UpdateSourceList(bool force_reload = false,
bool get_thumbnail = true) = 0;
virtual int GetSourceCount() const = 0;
virtual scoped_refptr<MediaSource> GetSource(int index) = 0;
virtual bool GetThumbnail(scoped_refptr<MediaSource> source, bool notify = false) = 0;
virtual bool GetThumbnail(scoped_refptr<MediaSource> source,
bool notify = false) = 0;
protected:
~RTCDesktopMediaList() {}
};
} //namespace libwebrtc
} // namespace libwebrtc
#endif // LIB_WEBRTC_RTC_DESKTOP_MEDIA_LIST_HXX
#endif // LIB_WEBRTC_RTC_DESKTOP_MEDIA_LIST_HXX

View File

@ -17,8 +17,8 @@ class RTCMediaConstraints : public RefCountInterface {
kDAEchoCancellation; // googDAEchoCancellation
LIB_WEBRTC_API static const char* kAutoGainControl; // googAutoGainControl
LIB_WEBRTC_API static const char* kNoiseSuppression; // googNoiseSuppression
LIB_WEBRTC_API static const char* kHighpassFilter; // googHighpassFilter
LIB_WEBRTC_API static const char* kAudioMirroring; // googAudioMirroring
LIB_WEBRTC_API static const char* kHighpassFilter; // googHighpassFilter
LIB_WEBRTC_API static const char* kAudioMirroring; // googAudioMirroring
LIB_WEBRTC_API static const char*
kAudioNetworkAdaptorConfig; // goodAudioNetworkAdaptorConfig

View File

@ -56,8 +56,8 @@ enum RTCIceConnectionState {
};
class RTCStatsMember : public RefCountInterface {
public:
// Member value types.
public:
// Member value types.
enum Type {
kBool, // bool
kInt32, // int32_t
@ -78,6 +78,7 @@ public:
kMapStringUint64, // std::map<std::string, uint64_t>
kMapStringDouble, // std::map<std::string, double>
};
public:
virtual string GetName() const = 0;
virtual Type GetType() const = 0;
@ -99,7 +100,8 @@ public:
virtual vector<string> ValueSequenceString() const = 0;
virtual map<string, uint64_t> ValueMapStringUint64() const = 0;
virtual map<string, double> ValueMapStringDouble() const = 0;
protected:
protected:
virtual ~RTCStatsMember() {}
};
@ -173,7 +175,8 @@ class RTCPeerConnection : public RefCountInterface {
virtual int RemoveStream(scoped_refptr<RTCMediaStream> stream) = 0;
virtual scoped_refptr<RTCMediaStream> CreateLocalMediaStream(const string stream_id) = 0;
virtual scoped_refptr<RTCMediaStream> CreateLocalMediaStream(
const string stream_id) = 0;
virtual scoped_refptr<RTCDataChannel> CreateDataChannel(
const string label,

View File

@ -34,8 +34,8 @@ class RTCPeerConnectionFactory : public RefCountInterface {
virtual scoped_refptr<RTCAudioDevice> GetAudioDevice() = 0;
virtual scoped_refptr<RTCVideoDevice> GetVideoDevice() = 0;
#ifdef RTC_DESKTOP_DEVICE
virtual scoped_refptr<RTCDesktopDevice> GetDesktopDevice() = 0;
#ifdef RTC_DESKTOP_DEVICE
virtual scoped_refptr<RTCDesktopDevice> GetDesktopDevice() = 0;
#endif
virtual scoped_refptr<RTCAudioSource> CreateAudioSource(
const string audio_source_label) = 0;
@ -44,7 +44,7 @@ class RTCPeerConnectionFactory : public RefCountInterface {
scoped_refptr<RTCVideoCapturer> capturer,
const string video_source_label,
scoped_refptr<RTCMediaConstraints> constraints) = 0;
#ifdef RTC_DESKTOP_DEVICE
#ifdef RTC_DESKTOP_DEVICE
virtual scoped_refptr<RTCVideoSource> CreateDesktopSource(
scoped_refptr<RTCDesktopCapturer> capturer,
const string video_source_label,

View File

@ -20,8 +20,8 @@ namespace libwebrtc {
enum { kMaxIceServerSize = 8 };
//template <typename T>
//using vector = bsp::inlined_vector<T, 16, true>;
// template <typename T>
// using vector = bsp::inlined_vector<T, 16, true>;
template <typename Key, typename T>
using map = std::map<Key, T>;
@ -32,8 +32,8 @@ enum class RTCMediaType { ANY, AUDIO, VIDEO, DATA };
using string = portable::string;
//template <typename Key, typename T>
//using map = portable::map<Key, T>;
// template <typename Key, typename T>
// using map = portable::map<Key, T>;
template <typename T>
using vector = portable::vector<T>;

View File

@ -5,98 +5,100 @@
#endif
#include "uuidxx.h"
#include <random>
#include <stdio.h>
#include <inttypes.h>
#include <stdio.h>
#include <string.h>
#include <random>
using namespace std;
using namespace uuidxx;
bool uuid::operator == (const uuid & guid2) const
{
return memcmp(&guid2, this, sizeof(uuid)) == 0;
bool uuid::operator==(const uuid& guid2) const {
return memcmp(&guid2, this, sizeof(uuid)) == 0;
}
bool uuid::operator != (const uuid & guid2) const
{
return !(*this == guid2);
bool uuid::operator!=(const uuid& guid2) const {
return !(*this == guid2);
}
bool uuid::operator < (const uuid &guid2) const
{
return memcmp(this, &guid2, sizeof(uuid)) < 0;
bool uuid::operator<(const uuid& guid2) const {
return memcmp(this, &guid2, sizeof(uuid)) < 0;
}
bool uuid::operator > (const uuid &guid2) const
{
return memcmp(this, &guid2, sizeof(uuid)) > 0;
bool uuid::operator>(const uuid& guid2) const {
return memcmp(this, &guid2, sizeof(uuid)) > 0;
}
uuid::uuid (const std::string &uuidString)
: uuid(uuidString.c_str())
{
uuid::uuid(const std::string& uuidString) : uuid(uuidString.c_str()) {}
uuid::uuid(const char* uuidString) {
if (uuidString == nullptr) {
// special case, and prevents random bugs
memset(this, 0, sizeof(uuid));
return;
}
if (uuidString[0] == '{') {
sscanf(uuidString,
"{%08" SCNx32 "-%04" SCNx16 "-%04" SCNx16 "-%02" SCNx8 "%02" SCNx8
"-%02" SCNx8 "%02" SCNx8 "%02" SCNx8 "%02" SCNx8 "%02" SCNx8
"%02" SCNx8 "}",
&Uuid.Data1, &Uuid.Data2, &Uuid.Data3, &Uuid.Data4[0],
&Uuid.Data4[1], &Uuid.Data4[2], &Uuid.Data4[3], &Uuid.Data4[4],
&Uuid.Data4[5], &Uuid.Data4[6], &Uuid.Data4[7]);
} else {
sscanf(uuidString,
"%08" SCNx32 "-%04" SCNx16 "-%04" SCNx16 "-%02" SCNx8 "%02" SCNx8
"-%02" SCNx8 "%02" SCNx8 "%02" SCNx8 "%02" SCNx8 "%02" SCNx8
"%02" SCNx8 "",
&Uuid.Data1, &Uuid.Data2, &Uuid.Data3, &Uuid.Data4[0],
&Uuid.Data4[1], &Uuid.Data4[2], &Uuid.Data4[3], &Uuid.Data4[4],
&Uuid.Data4[5], &Uuid.Data4[6], &Uuid.Data4[7]);
}
}
uuid::uuid (const char *uuidString)
{
if (uuidString == nullptr)
{
//special case, and prevents random bugs
memset(this, 0, sizeof(uuid));
return;
}
if (uuidString[0] == '{')
{
sscanf(uuidString, "{%08" SCNx32 "-%04" SCNx16 "-%04" SCNx16 "-%02" SCNx8 "%02" SCNx8 "-%02" SCNx8 "%02" SCNx8 "%02" SCNx8 "%02" SCNx8 "%02" SCNx8 "%02" SCNx8 "}", &Uuid.Data1, &Uuid.Data2, &Uuid.Data3, &Uuid.Data4[0], &Uuid.Data4[1], &Uuid.Data4[2], &Uuid.Data4[3], &Uuid.Data4[4], &Uuid.Data4[5], &Uuid.Data4[6], &Uuid.Data4[7]);
}
else
{
sscanf(uuidString, "%08" SCNx32 "-%04" SCNx16 "-%04" SCNx16 "-%02" SCNx8 "%02" SCNx8 "-%02" SCNx8 "%02" SCNx8 "%02" SCNx8 "%02" SCNx8 "%02" SCNx8 "%02" SCNx8 "", &Uuid.Data1, &Uuid.Data2, &Uuid.Data3, &Uuid.Data4[0], &Uuid.Data4[1], &Uuid.Data4[2], &Uuid.Data4[3], &Uuid.Data4[4], &Uuid.Data4[5], &Uuid.Data4[6], &Uuid.Data4[7]);
}
string uuid::ToString(bool withBraces) const {
char buffer[39];
sprintf(buffer, "%s%08X-%04X-%04X-%02X%02X-%02X%02X%02X%02X%02X%02X%s",
withBraces ? "{" : "", Uuid.Data1, Uuid.Data2, Uuid.Data3,
Uuid.Data4[0], Uuid.Data4[1], Uuid.Data4[2], Uuid.Data4[3],
Uuid.Data4[4], Uuid.Data4[5], Uuid.Data4[6], Uuid.Data4[7],
withBraces ? "}" : "");
return buffer;
}
string uuid::ToString(bool withBraces) const
{
char buffer[39];
sprintf(buffer, "%s%08X-%04X-%04X-%02X%02X-%02X%02X%02X%02X%02X%02X%s", withBraces ? "{" : "", Uuid.Data1, Uuid.Data2, Uuid.Data3, Uuid.Data4[0], Uuid.Data4[1], Uuid.Data4[2], Uuid.Data4[3], Uuid.Data4[4], Uuid.Data4[5], Uuid.Data4[6], Uuid.Data4[7], withBraces ? "}" : "");
return buffer;
uuid uuid::FromString(const char* uuidString) {
uuid temp(uuidString);
return temp;
}
uuid uuid::FromString(const char *uuidString)
{
uuid temp(uuidString);
return temp;
uuid uuid::FromString(const std::string& uuidString) {
uuid temp(uuidString.c_str());
return temp;
}
uuid uuid::FromString(const std::string &uuidString)
{
uuid temp(uuidString.c_str());
return temp;
}
uuid uuid::Generatev4()
{
//mach-o does not support TLS and clang still has issues with thread_local
uuid uuid::Generatev4() {
// mach-o does not support TLS and clang still has issues with thread_local
#if !defined(__APPLE__) && !defined(__clang__)
thread_local std::random_device rd;
thread_local auto gen = std::mt19937_64(rd());
thread_local std::random_device rd;
thread_local auto gen = std::mt19937_64(rd());
#else
std::random_device rd;
std::mt19937_64 gen(rd());
std::random_device rd;
std::mt19937_64 gen(rd());
#endif
std::uniform_int_distribution<uint64_t> dis64;
std::uniform_int_distribution<uint64_t> dis64;
uuid newGuid;
newGuid.WideIntegers[0] = dis64(gen);
newGuid.WideIntegers[1] = dis64(gen);
uuid newGuid;
newGuid.WideIntegers[0] = dis64(gen);
newGuid.WideIntegers[1] = dis64(gen);
//RFC4122 defines (psuedo)random uuids (in big-endian notation):
//MSB of DATA4[0] specifies the variant and should be 0b10 to indicate standard uuid,
//and MSB of DATA3 should be 0b0100 to indicate version 4
newGuid.Bytes.Data4[0] = (newGuid.Bytes.Data4[0] & 0x3F) | static_cast<uint8_t>(0x80);
newGuid.Bytes.Data3[1] = (newGuid.Bytes.Data3[1] & 0x0F) | static_cast<uint8_t>(0x40);
// RFC4122 defines (psuedo)random uuids (in big-endian notation):
// MSB of DATA4[0] specifies the variant and should be 0b10 to indicate
// standard uuid, and MSB of DATA3 should be 0b0100 to indicate version 4
newGuid.Bytes.Data4[0] =
(newGuid.Bytes.Data4[0] & 0x3F) | static_cast<uint8_t>(0x80);
newGuid.Bytes.Data3[1] =
(newGuid.Bytes.Data3[1] & 0x0F) | static_cast<uint8_t>(0x40);
return newGuid;
return newGuid;
}

View File

@ -1,80 +1,66 @@
#pragma once
#include <stdexcept>
#include <cstdint>
#include <stdexcept>
#include <string>
namespace uuidxx
{
enum class Variant
{
Nil,
Version1,
Version2,
Version3,
Version4,
Version5
};
namespace uuidxx {
enum class Variant { Nil, Version1, Version2, Version3, Version4, Version5 };
class NotImplemented : public std::logic_error
{
public:
NotImplemented() : std::logic_error("Function not yet implemented") { };
};
class NotImplemented : public std::logic_error {
public:
NotImplemented() : std::logic_error("Function not yet implemented"){};
};
union uuid
{
private:
static uuid Generatev4();
union uuid {
private:
static uuid Generatev4();
public:
uint64_t WideIntegers[2];
struct _internalData
{
uint32_t Data1;
uint16_t Data2;
uint16_t Data3;
uint8_t Data4[8];
} Uuid;
struct _byteRepresentation
{
uint8_t Data1[4];
uint8_t Data2[2];
uint8_t Data3[2];
uint8_t Data4[8];
} Bytes;
public:
uint64_t WideIntegers[2];
struct _internalData {
uint32_t Data1;
uint16_t Data2;
uint16_t Data3;
uint8_t Data4[8];
} Uuid;
struct _byteRepresentation {
uint8_t Data1[4];
uint8_t Data2[2];
uint8_t Data3[2];
uint8_t Data4[8];
} Bytes;
bool operator == (const uuid &guid2) const;
bool operator != (const uuid &guid2) const;
bool operator < (const uuid &guid2) const;
bool operator > (const uuid &guid2) const;
bool operator==(const uuid& guid2) const;
bool operator!=(const uuid& guid2) const;
bool operator<(const uuid& guid2) const;
bool operator>(const uuid& guid2) const;
uuid() = default;
uuid() = default;
uuid(const char *uuidString);
uuid(const std::string &uuidString);
static uuid FromString(const char *uuidString);
static uuid FromString(const std::string &uuidString);
uuid(const char* uuidString);
uuid(const std::string& uuidString);
static uuid FromString(const char* uuidString);
static uuid FromString(const std::string& uuidString);
static inline uuid Generate(Variant v = Variant::Version4)
{
switch (v)
{
case Variant::Nil:
return uuid(nullptr); //special case;
case Variant::Version1:
case Variant::Version2:
case Variant::Version3:
case Variant::Version5:
throw new NotImplemented();
case Variant::Version4:
return Generatev4();
}
return uuid(nullptr);
}
static inline uuid Generate(Variant v = Variant::Version4) {
switch (v) {
case Variant::Nil:
return uuid(nullptr); // special case;
case Variant::Version1:
case Variant::Version2:
case Variant::Version3:
case Variant::Version5:
throw new NotImplemented();
case Variant::Version4:
return Generatev4();
}
return uuid(nullptr);
}
std::string ToString(bool withBraces = true) const;
};
std::string ToString(bool withBraces = true) const;
};
static_assert(sizeof(uuid) == 2 * sizeof(int64_t), "Check uuid type declaration/padding!");
}
static_assert(sizeof(uuid) == 2 * sizeof(int64_t),
"Check uuid type declaration/padding!");
} // namespace uuidxx