First commit.

This commit is contained in:
维伟段
2018-04-27 10:51:53 +08:00
parent 8cd17ded3b
commit 6bb2d0c15f
170 changed files with 8925 additions and 0 deletions

3
CHANGELOG.md Normal file
View File

@ -0,0 +1,3 @@
## [0.0.1] - TODO: Add release date.
* TODO: Describe initial release.

1
LICENSE Normal file
View File

@ -0,0 +1 @@
TODO: Add your license here.

6
android/.classpath Normal file
View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.8/"/>
<classpathentry kind="con" path="org.eclipse.buildship.core.gradleclasspathcontainer"/>
<classpathentry kind="output" path="bin"/>
</classpath>

8
android/.gitignore vendored Normal file
View File

@ -0,0 +1,8 @@
*.iml
.gradle
/local.properties
/.idea/workspace.xml
/.idea/libraries
.DS_Store
/build
/captures

23
android/.project Normal file
View File

@ -0,0 +1,23 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>webrtc</name>
<comment>Project webrtc created by Buildship.</comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.buildship.core.gradleprojectbuilder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.eclipse.jdt.core.javanature</nature>
<nature>org.eclipse.buildship.core.gradleprojectnature</nature>
</natures>
</projectDescription>

View File

@ -0,0 +1,2 @@
#Fri Apr 27 10:40:39 CST 2018
connection.project.dir=../example/android

41
android/build.gradle Normal file
View File

@ -0,0 +1,41 @@
group 'com.cloudwebrtc.webrtc'
version '1.0-SNAPSHOT'
buildscript {
repositories {
google()
jcenter()
}
dependencies {
classpath 'com.android.tools.build:gradle:3.0.1'
}
}
rootProject.allprojects {
repositories {
google()
jcenter()
}
}
apply plugin: 'com.android.library'
android {
compileSdkVersion 27
defaultConfig {
minSdkVersion 16
testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
ndk {
abiFilters "armeabi-v7a", "x86"
}
}
lintOptions {
disable 'InvalidPackage'
}
}
dependencies {
implementation fileTree(dir: 'libs', include: ['*.jar'])
}

View File

@ -0,0 +1 @@
org.gradle.jvmargs=-Xmx1536M

Binary file not shown.

BIN
android/libs/libwebrtc.jar Normal file

Binary file not shown.

1
android/settings.gradle Normal file
View File

@ -0,0 +1 @@
rootProject.name = 'webrtc'

View File

@ -0,0 +1,3 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.cloudwebrtc.webrtc">
</manifest>

View File

@ -0,0 +1,39 @@
package com.cloudwebrtc.webrtc;
import io.flutter.plugin.common.MethodChannel;
import io.flutter.plugin.common.MethodChannel.MethodCallHandler;
import io.flutter.plugin.common.MethodChannel.Result;
import io.flutter.plugin.common.MethodCall;
import io.flutter.plugin.common.PluginRegistry.Registrar;
/**
* WebrtcPlugin
*/
public class WebrtcPlugin implements MethodCallHandler {
private final Registrar registrar;
private final MethodChannel channel;
/**
* Plugin registration.
*/
public static void registerWith(Registrar registrar) {
final MethodChannel channel = new MethodChannel(registrar.messenger(), "webrtc");
channel.setMethodCallHandler(new WebrtcPlugin(channel,registrar));
}
private WebrtcPlugin(Registrar registrar, MethodChannel channel) {
this.registrar = registrar;
this.channel = channel;
//channel.invokeMethod("onMessage", message.getData());
}
@Override
public void onMethodCall(MethodCall call, Result result) {
if (call.method.equals("getPlatformVersion")) {
result.success("Android " + android.os.Build.VERSION.RELEASE);
} else {
result.notImplemented();
}
}
}

10
example/.gitignore vendored Normal file
View File

@ -0,0 +1,10 @@
.DS_Store
.atom/
.idea
.vscode/
.packages
.pub/
build/
ios/.generated/
packages
.flutter-plugins

8
example/.metadata Normal file
View File

@ -0,0 +1,8 @@
# This file tracks properties of this Flutter project.
# Used by Flutter tool to assess capabilities and perform upgrades etc.
#
# This file should be version controlled and should not be manually edited.
version:
revision: 3b84503403563ba77cf5388b98a89e39f2c2151e
channel: dev

8
example/README.md Normal file
View File

@ -0,0 +1,8 @@
# webrtc_example
Demonstrates how to use the webrtc plugin.
## Getting Started
For help getting started with Flutter, view our online
[documentation](https://flutter.io/).

10
example/android/.gitignore vendored Normal file
View File

@ -0,0 +1,10 @@
*.iml
*.class
.gradle
/local.properties
/.idea/workspace.xml
/.idea/libraries
.DS_Store
/build
/captures
GeneratedPluginRegistrant.java

17
example/android/.project Normal file
View File

@ -0,0 +1,17 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>android</name>
<comment>Project android created by Buildship.</comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.eclipse.buildship.core.gradleprojectbuilder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.eclipse.buildship.core.gradleprojectnature</nature>
</natures>
</projectDescription>

View File

@ -0,0 +1,2 @@
#Fri Apr 27 10:40:39 CST 2018
connection.project.dir=

View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.8/"/>
<classpathentry kind="con" path="org.eclipse.buildship.core.gradleclasspathcontainer"/>
<classpathentry kind="output" path="bin"/>
</classpath>

View File

@ -0,0 +1,23 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>app</name>
<comment>Project app created by Buildship.</comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.buildship.core.gradleprojectbuilder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.eclipse.jdt.core.javanature</nature>
<nature>org.eclipse.buildship.core.gradleprojectnature</nature>
</natures>
</projectDescription>

View File

@ -0,0 +1,2 @@
#Fri Apr 27 10:40:39 CST 2018
connection.project.dir=..

View File

@ -0,0 +1,51 @@
def localProperties = new Properties()
def localPropertiesFile = rootProject.file('local.properties')
if (localPropertiesFile.exists()) {
localPropertiesFile.withReader('UTF-8') { reader ->
localProperties.load(reader)
}
}
def flutterRoot = localProperties.getProperty('flutter.sdk')
if (flutterRoot == null) {
throw new GradleException("Flutter SDK not found. Define location with flutter.sdk in the local.properties file.")
}
apply plugin: 'com.android.application'
apply from: "$flutterRoot/packages/flutter_tools/gradle/flutter.gradle"
android {
compileSdkVersion 27
lintOptions {
disable 'InvalidPackage'
}
defaultConfig {
// TODO: Specify your own unique Application ID (https://developer.android.com/studio/build/application-id.html).
applicationId "com.cloudwebrtc.webrtcexample"
minSdkVersion 16
targetSdkVersion 27
versionCode 1
versionName "1.0"
testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
}
buildTypes {
release {
// TODO: Add your own signing config for the release build.
// Signing with the debug keys for now, so `flutter run --release` works.
signingConfig signingConfigs.debug
}
}
}
flutter {
source '../..'
}
dependencies {
testImplementation 'junit:junit:4.12'
androidTestImplementation 'com.android.support.test:runner:1.0.1'
androidTestImplementation 'com.android.support.test.espresso:espresso-core:3.0.1'
}

View File

@ -0,0 +1,39 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.cloudwebrtc.webrtcexample">
<!-- The INTERNET permission is required for development. Specifically,
flutter needs it to communicate with the running application
to allow setting breakpoints, to provide hot reload, etc.
-->
<uses-permission android:name="android.permission.INTERNET"/>
<!-- io.flutter.app.FlutterApplication is an android.app.Application that
calls FlutterMain.startInitialization(this); in its onCreate method.
In most cases you can leave this as-is, but you if you want to provide
additional functionality it is fine to subclass or reimplement
FlutterApplication and put your custom class here. -->
<application
android:name="io.flutter.app.FlutterApplication"
android:label="webrtc_example"
android:icon="@mipmap/ic_launcher">
<activity
android:name=".MainActivity"
android:launchMode="singleTop"
android:theme="@style/LaunchTheme"
android:configChanges="orientation|keyboardHidden|keyboard|screenSize|locale|layoutDirection|fontScale"
android:hardwareAccelerated="true"
android:windowSoftInputMode="adjustResize">
<!-- This keeps the window background of the activity showing
until Flutter renders its first frame. It can be removed if
there is no splash screen (such as the default splash screen
defined in @style/LaunchTheme). -->
<meta-data
android:name="io.flutter.app.android.SplashScreenUntilFirstFrame"
android:value="true" />
<intent-filter>
<action android:name="android.intent.action.MAIN"/>
<category android:name="android.intent.category.LAUNCHER"/>
</intent-filter>
</activity>
</application>
</manifest>

View File

@ -0,0 +1,14 @@
package com.cloudwebrtc.webrtcexample;
import android.os.Bundle;
import io.flutter.app.FlutterActivity;
import io.flutter.plugins.GeneratedPluginRegistrant;
public class MainActivity extends FlutterActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
GeneratedPluginRegistrant.registerWith(this);
}
}

View File

@ -0,0 +1,12 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Modify this file to customize your launch splash screen -->
<layer-list xmlns:android="http://schemas.android.com/apk/res/android">
<item android:drawable="@android:color/white" />
<!-- You can insert your own image assets here -->
<!-- <item>
<bitmap
android:gravity="center"
android:src="@mipmap/launch_image" />
</item> -->
</layer-list>

Binary file not shown.

After

Width:  |  Height:  |  Size: 544 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 442 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 721 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

View File

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<style name="LaunchTheme" parent="@android:style/Theme.Black.NoTitleBar">
<!-- Show a splash screen on the activity. Automatically removed when
Flutter draws its first frame -->
<item name="android:windowBackground">@drawable/launch_background</item>
</style>
</resources>

View File

@ -0,0 +1,29 @@
buildscript {
repositories {
google()
jcenter()
}
dependencies {
classpath 'com.android.tools.build:gradle:3.0.1'
}
}
allprojects {
repositories {
google()
jcenter()
}
}
rootProject.buildDir = '../build'
subprojects {
project.buildDir = "${rootProject.buildDir}/${project.name}"
}
subprojects {
project.evaluationDependsOn(':app')
}
task clean(type: Delete) {
delete rootProject.buildDir
}

View File

@ -0,0 +1 @@
org.gradle.jvmargs=-Xmx1536M

Binary file not shown.

View File

@ -0,0 +1,6 @@
#Fri Jun 23 08:50:38 CEST 2017
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-4.1-all.zip

160
example/android/gradlew vendored Executable file
View File

@ -0,0 +1,160 @@
#!/usr/bin/env bash
##############################################################################
##
## Gradle start up script for UN*X
##
##############################################################################
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS=""
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn ( ) {
echo "$*"
}
die ( ) {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
esac
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >/dev/null
APP_HOME="`pwd -P`"
cd "$SAVED" >/dev/null
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin, switch paths to Windows format before running java
if $cygwin ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
JAVACMD=`cygpath --unix "$JAVACMD"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=$((i+1))
done
case $i in
(0) set -- ;;
(1) set -- "$args0" ;;
(2) set -- "$args0" "$args1" ;;
(3) set -- "$args0" "$args1" "$args2" ;;
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
function splitJvmOpts() {
JVM_OPTS=("$@")
}
eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"

90
example/android/gradlew.bat vendored Normal file
View File

@ -0,0 +1,90 @@
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS=
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto init
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto init
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:init
@rem Get command-line arguments, handling Windowz variants
if not "%OS%" == "Windows_NT" goto win9xME_args
if "%@eval[2+2]" == "4" goto 4NT_args
:win9xME_args
@rem Slurp the command line arguments.
set CMD_LINE_ARGS=
set _SKIP=2
:win9xME_args_slurp
if "x%~1" == "x" goto execute
set CMD_LINE_ARGS=%*
goto execute
:4NT_args
@rem Get arguments from the 4NT Shell from JP Software
set CMD_LINE_ARGS=%$
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega

View File

@ -0,0 +1,15 @@
include ':app'
def flutterProjectRoot = rootProject.projectDir.parentFile.toPath()
def plugins = new Properties()
def pluginsFile = new File(flutterProjectRoot.toFile(), '.flutter-plugins')
if (pluginsFile.exists()) {
pluginsFile.withReader('UTF-8') { reader -> plugins.load(reader) }
}
plugins.each { name, path ->
def pluginDirectory = flutterProjectRoot.resolve(path).resolve('android').toFile()
include ":$name"
project(":$name").projectDir = pluginDirectory
}

42
example/ios/.gitignore vendored Normal file
View File

@ -0,0 +1,42 @@
.idea/
.vagrant/
.sconsign.dblite
.svn/
.DS_Store
*.swp
profile
DerivedData/
build/
GeneratedPluginRegistrant.h
GeneratedPluginRegistrant.m
*.pbxuser
*.mode1v3
*.mode2v3
*.perspectivev3
!default.pbxuser
!default.mode1v3
!default.mode2v3
!default.perspectivev3
xcuserdata
*.moved-aside
*.pyc
*sync/
Icon?
.tags*
/Flutter/app.flx
/Flutter/app.zip
/Flutter/flutter_assets/
/Flutter/App.framework
/Flutter/Flutter.framework
/Flutter/Generated.xcconfig
/ServiceDefinitions.json
Pods/

64
example/ios/Podfile Normal file
View File

@ -0,0 +1,64 @@
# Uncomment this line to define a global platform for your project
# platform :ios, '9.0'
# CocoaPods analytics sends network stats synchronously affecting flutter build latency.
ENV['COCOAPODS_DISABLE_STATS'] = 'true'
def parse_KV_file(file, separator='=')
file_abs_path = File.expand_path(file)
if !File.exists? file_abs_path
return [];
end
pods_ary = []
skip_line_start_symbols = ["#", "/"]
File.foreach(file_abs_path) { |line|
next if skip_line_start_symbols.any? { |symbol| line =~ /^\s*#{symbol}/ }
plugin = line.split(pattern=separator)
if plugin.length == 2
podname = plugin[0].strip()
path = plugin[1].strip()
podpath = File.expand_path("#{path}", file_abs_path)
pods_ary.push({:name => podname, :path => podpath});
else
puts "Invalid plugin specification: #{line}"
end
}
return pods_ary
end
target 'Runner' do
# Prepare symlinks folder. We use symlinks to avoid having Podfile.lock
# referring to absolute paths on developers' machines.
system('rm -rf Pods/.symlinks')
system('mkdir -p Pods/.symlinks/flutter')
system('mkdir -p Pods/.symlinks/plugins')
# Flutter Pods
generated_xcode_build_settings = parse_KV_file('./Flutter/Generated.xcconfig')
if generated_xcode_build_settings.empty?
puts "Generated.xcconfig must exist. If you're running pod install manually, make sure flutter packages get is executed first."
end
generated_xcode_build_settings.map { |p|
if p[:name] == 'FLUTTER_FRAMEWORK_DIR'
symlink = File.join('Pods', '.symlinks', 'flutter', File.basename(p[:path]))
File.symlink(p[:path], symlink)
pod 'Flutter', :path => symlink
end
}
# Plugin Pods
plugin_pods = parse_KV_file('../.flutter-plugins')
plugin_pods.map { |p|
symlink = File.join('Pods', '.symlinks', 'plugins', File.basename(p[:path]))
File.symlink(p[:path], symlink)
pod p[:name], :path => File.join(symlink, 'ios')
}
end
post_install do |installer|
installer.pods_project.targets.each do |target|
target.build_configurations.each do |config|
config.build_settings['ENABLE_BITCODE'] = 'NO'
end
end
end

22
example/ios/Podfile.lock Normal file
View File

@ -0,0 +1,22 @@
PODS:
- Flutter (1.0.0)
- webrtc (0.0.1):
- Flutter
DEPENDENCIES:
- Flutter (from `Pods/.symlinks/flutter/ios`)
- webrtc (from `Pods/.symlinks/plugins/flutter/ios`)
EXTERNAL SOURCES:
Flutter:
:path: Pods/.symlinks/flutter/ios
webrtc:
:path: Pods/.symlinks/plugins/flutter/ios
SPEC CHECKSUMS:
Flutter: 9d0fac939486c9aba2809b7982dfdbb47a7b0296
webrtc: a92635e02d571aa2a0573504a976d3f1e32f0550
PODFILE CHECKSUM: 0420ab312a523ae0eecfb1fc19ea51dd82cc17c7
COCOAPODS: 1.5.0

View File

@ -0,0 +1,498 @@
// !$*UTF8*$!
{
archiveVersion = 1;
classes = {
};
objectVersion = 46;
objects = {
/* Begin PBXBuildFile section */
0B2E52A1193C475CC824D4E23320D30B /* flutter_assets in Resources */ = {isa = PBXBuildFile; fileRef = 127FE0431EC071958D67B4152ABE0141 /* flutter_assets */; };
20D64D35A2A20226B995BD7DE3CC03A5 /* libPods-Runner.a in Frameworks */ = {isa = PBXBuildFile; fileRef = C5CCD4E6461D1D83886AE2E0CB510AF0 /* libPods-Runner.a */; };
229CB39F2D80040AEE1ED23F285ACA94 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = A79A511C028ADC462437FA86E6F91AFF /* LaunchScreen.storyboard */; };
5AFE2FDDDEBDE94ED82B40DC8B153987 /* GeneratedPluginRegistrant.m in Sources */ = {isa = PBXBuildFile; fileRef = 9428FFA3DA242B0B23CE49085F8A8C98 /* GeneratedPluginRegistrant.m */; };
6354C7FECC33A60C0696301773D78092 /* AppFrameworkInfo.plist in Resources */ = {isa = PBXBuildFile; fileRef = 49096491DEF677A67E9C9D1D2F642FB2 /* AppFrameworkInfo.plist */; };
970259CB2284F35CC747F170CC0DF853 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = FF4B47BE30BEAD46325791691A899103 /* Main.storyboard */; };
9D577FED5514C03E7AB7E66F98D7A053 /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 33D01B15D123D484F5CECECF4955185C /* main.m */; };
9D7F825691A20481792F11A568AD8FAD /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = A333CF75606D18C461DFF9ABF7427251 /* Assets.xcassets */; };
AD0EC61B92FA8B6B7CF43562D02EBD49 /* Debug.xcconfig in Resources */ = {isa = PBXBuildFile; fileRef = 5F00C8B4B6548724073C4AAF93AD0795 /* Debug.xcconfig */; };
C43A44975A03EE6E1AB6C84DE6994F08 /* Generated.xcconfig in Resources */ = {isa = PBXBuildFile; fileRef = 2271EF72D6523FE6EB695CB1AFED02B6 /* Generated.xcconfig */; };
CAD7D9F3E9E40787135E737FEDF280DF /* App.framework in Embed Frameworks */ = {isa = PBXBuildFile; fileRef = F6B369AAD73AF22F43C5033650E3A5E7 /* App.framework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; };
CE5D87634A0A12EE98C9DC1D9D5BFFDE /* App.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = F6B369AAD73AF22F43C5033650E3A5E7 /* App.framework */; };
DF12DBCE3921EA73822D14A03AD6F79C /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 68DD12D69C99694B0B6CBEAFEC0CBD89 /* AppDelegate.m */; };
DFD15F91EC1333E9C1F771E4E93987B1 /* Flutter.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = B4035E976C526A3EEB983C311F82E74E /* Flutter.framework */; };
F16B5D11B6B398BD29B823EAA9ACFC69 /* Flutter.framework in Embed Frameworks */ = {isa = PBXBuildFile; fileRef = B4035E976C526A3EEB983C311F82E74E /* Flutter.framework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; };
/* End PBXBuildFile section */
/* Begin PBXCopyFilesBuildPhase section */
E55F38183CFB6C120953C92EF0C091F4 /* Embed Frameworks */ = {
isa = PBXCopyFilesBuildPhase;
buildActionMask = 2147483647;
dstPath = "";
dstSubfolderSpec = 10;
files = (
CAD7D9F3E9E40787135E737FEDF280DF /* App.framework in Embed Frameworks */,
F16B5D11B6B398BD29B823EAA9ACFC69 /* Flutter.framework in Embed Frameworks */,
);
name = "Embed Frameworks";
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXCopyFilesBuildPhase section */
/* Begin PBXFileReference section */
127FE0431EC071958D67B4152ABE0141 /* flutter_assets */ = {isa = PBXFileReference; lastKnownFileType = folder; name = flutter_assets; path = Flutter/flutter_assets; sourceTree = SOURCE_ROOT; };
21CE142FB216A639B6BE578625A89011 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = "<group>"; };
2271EF72D6523FE6EB695CB1AFED02B6 /* Generated.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Generated.xcconfig; path = Flutter/Generated.xcconfig; sourceTree = "<group>"; };
2959FB0650C06639B955C55500B01293 /* GeneratedPluginRegistrant.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = GeneratedPluginRegistrant.h; sourceTree = "<group>"; };
33D01B15D123D484F5CECECF4955185C /* main.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = "<group>"; };
49096491DEF677A67E9C9D1D2F642FB2 /* AppFrameworkInfo.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; name = AppFrameworkInfo.plist; path = Flutter/AppFrameworkInfo.plist; sourceTree = "<group>"; };
5E401A09567E49F31CDA639224D64C9B /* Release.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; name = Release.xcconfig; path = Flutter/Release.xcconfig; sourceTree = "<group>"; };
5F00C8B4B6548724073C4AAF93AD0795 /* Debug.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Debug.xcconfig; path = Flutter/Debug.xcconfig; sourceTree = "<group>"; };
68DD12D69C99694B0B6CBEAFEC0CBD89 /* AppDelegate.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = "<group>"; };
6E0F8D876B170C77FD896E647713843C /* Runner.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = Runner.app; sourceTree = BUILT_PRODUCTS_DIR; };
843A0C17DB3B7B95DB4CE8FC36407EDD /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = "<group>"; };
8DCF6C17523B59ABB095575CE6B532F1 /* AppDelegate.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = "<group>"; };
9428FFA3DA242B0B23CE49085F8A8C98 /* GeneratedPluginRegistrant.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GeneratedPluginRegistrant.m; sourceTree = "<group>"; };
A333CF75606D18C461DFF9ABF7427251 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = "<group>"; };
B4035E976C526A3EEB983C311F82E74E /* Flutter.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Flutter.framework; path = Flutter/Flutter.framework; sourceTree = "<group>"; };
C5CCD4E6461D1D83886AE2E0CB510AF0 /* libPods-Runner.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-Runner.a"; sourceTree = BUILT_PRODUCTS_DIR; };
F4B25E6138001208114FB8F975774109 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
F6B369AAD73AF22F43C5033650E3A5E7 /* App.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = App.framework; path = Flutter/App.framework; sourceTree = "<group>"; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
00C27919A6B50DC1DE2CD3F64D33BCCA /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
CE5D87634A0A12EE98C9DC1D9D5BFFDE /* App.framework in Frameworks */,
DFD15F91EC1333E9C1F771E4E93987B1 /* Flutter.framework in Frameworks */,
20D64D35A2A20226B995BD7DE3CC03A5 /* libPods-Runner.a in Frameworks */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXFrameworksBuildPhase section */
/* Begin PBXGroup section */
1F6DF5EA9BFBBD5EB76865BC73D9796E /* Supporting Files */ = {
isa = PBXGroup;
children = (
33D01B15D123D484F5CECECF4955185C /* main.m */,
);
name = "Supporting Files";
sourceTree = "<group>";
};
43EB847F420AC177D9B217DF4AF4DD93 /* Products */ = {
isa = PBXGroup;
children = (
6E0F8D876B170C77FD896E647713843C /* Runner.app */,
);
name = Products;
sourceTree = "<group>";
};
6FF1D9148056243A2D8A6D36BFA80D89 /* Pods */ = {
isa = PBXGroup;
children = (
);
name = Pods;
sourceTree = "<group>";
};
776D035819192312F762635C3CC86C98 /* Frameworks */ = {
isa = PBXGroup;
children = (
C5CCD4E6461D1D83886AE2E0CB510AF0 /* libPods-Runner.a */,
);
name = Frameworks;
sourceTree = "<group>";
};
BB99F626A23DAB811A61522FC9119888 = {
isa = PBXGroup;
children = (
D200E37E3AAFC3BCF315E1D381844A67 /* Flutter */,
776D035819192312F762635C3CC86C98 /* Frameworks */,
6FF1D9148056243A2D8A6D36BFA80D89 /* Pods */,
43EB847F420AC177D9B217DF4AF4DD93 /* Products */,
E63CF9E523D767D2740E40098133A381 /* Runner */,
);
sourceTree = "<group>";
};
D200E37E3AAFC3BCF315E1D381844A67 /* Flutter */ = {
isa = PBXGroup;
children = (
127FE0431EC071958D67B4152ABE0141 /* flutter_assets */,
F6B369AAD73AF22F43C5033650E3A5E7 /* App.framework */,
49096491DEF677A67E9C9D1D2F642FB2 /* AppFrameworkInfo.plist */,
5F00C8B4B6548724073C4AAF93AD0795 /* Debug.xcconfig */,
B4035E976C526A3EEB983C311F82E74E /* Flutter.framework */,
2271EF72D6523FE6EB695CB1AFED02B6 /* Generated.xcconfig */,
5E401A09567E49F31CDA639224D64C9B /* Release.xcconfig */,
);
name = Flutter;
sourceTree = "<group>";
};
E63CF9E523D767D2740E40098133A381 /* Runner */ = {
isa = PBXGroup;
children = (
1F6DF5EA9BFBBD5EB76865BC73D9796E /* Supporting Files */,
8DCF6C17523B59ABB095575CE6B532F1 /* AppDelegate.h */,
68DD12D69C99694B0B6CBEAFEC0CBD89 /* AppDelegate.m */,
A333CF75606D18C461DFF9ABF7427251 /* Assets.xcassets */,
2959FB0650C06639B955C55500B01293 /* GeneratedPluginRegistrant.h */,
9428FFA3DA242B0B23CE49085F8A8C98 /* GeneratedPluginRegistrant.m */,
F4B25E6138001208114FB8F975774109 /* Info.plist */,
A79A511C028ADC462437FA86E6F91AFF /* LaunchScreen.storyboard */,
FF4B47BE30BEAD46325791691A899103 /* Main.storyboard */,
);
path = Runner;
sourceTree = "<group>";
};
/* End PBXGroup section */
/* Begin PBXNativeTarget section */
E30F7D64842177B99FDDF63DCA10BCDE /* Runner */ = {
isa = PBXNativeTarget;
buildConfigurationList = 92578F7B9E3F6AC8F17192C290223DCF /* Build configuration list for PBXNativeTarget "Runner" */;
buildPhases = (
AC0061BF939EE938EA540EE9D04BF3B3 /* [CP] Check Pods Manifest.lock */,
7E9D2C9174A80CAA4BFDFDCB4F72C7A8 /* Run Script */,
FBC56BB6E87C12DAFB6BB5C3C53DF533 /* Sources */,
00C27919A6B50DC1DE2CD3F64D33BCCA /* Frameworks */,
8CF819BBBFB9AE05A1821B127DF85DF8 /* Resources */,
E55F38183CFB6C120953C92EF0C091F4 /* Embed Frameworks */,
3F42640EB4246C17D4CDE846C95B84D3 /* Thin Binary */,
77B83F427A96D66384A2D5A974F60E05 /* [CP] Embed Pods Frameworks */,
);
buildRules = (
);
dependencies = (
);
name = Runner;
productName = Runner;
productReference = 6E0F8D876B170C77FD896E647713843C /* Runner.app */;
productType = "com.apple.product-type.application";
};
/* End PBXNativeTarget section */
/* Begin PBXProject section */
18C1723432283E0CC55F10A6DCFD9E02 /* Project object */ = {
isa = PBXProject;
attributes = {
LastUpgradeCheck = 0910;
ORGANIZATIONNAME = "The Chromium Authors";
TargetAttributes = {
E30F7D64842177B99FDDF63DCA10BCDE = {
CreatedOnToolsVersion = 7.3.1;
DevelopmentTeam = 5J859T6AE8;
};
};
};
buildConfigurationList = 9FDCD9B0638A340D0E561864FC06FD7A /* Build configuration list for PBXProject "Runner" */;
compatibilityVersion = "Xcode 3.2";
developmentRegion = English;
hasScannedForEncodings = 0;
knownRegions = (
en,
Base,
);
mainGroup = BB99F626A23DAB811A61522FC9119888;
productRefGroup = 43EB847F420AC177D9B217DF4AF4DD93 /* Products */;
projectDirPath = "";
projectRoot = "";
targets = (
E30F7D64842177B99FDDF63DCA10BCDE /* Runner */,
);
};
/* End PBXProject section */
/* Begin PBXResourcesBuildPhase section */
8CF819BBBFB9AE05A1821B127DF85DF8 /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
6354C7FECC33A60C0696301773D78092 /* AppFrameworkInfo.plist in Resources */,
9D7F825691A20481792F11A568AD8FAD /* Assets.xcassets in Resources */,
AD0EC61B92FA8B6B7CF43562D02EBD49 /* Debug.xcconfig in Resources */,
C43A44975A03EE6E1AB6C84DE6994F08 /* Generated.xcconfig in Resources */,
229CB39F2D80040AEE1ED23F285ACA94 /* LaunchScreen.storyboard in Resources */,
970259CB2284F35CC747F170CC0DF853 /* Main.storyboard in Resources */,
0B2E52A1193C475CC824D4E23320D30B /* flutter_assets in Resources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXResourcesBuildPhase section */
/* Begin PBXShellScriptBuildPhase section */
3F42640EB4246C17D4CDE846C95B84D3 /* Thin Binary */ = {
isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
files = (
);
inputPaths = (
);
name = "Thin Binary";
outputPaths = (
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" thin";
};
77B83F427A96D66384A2D5A974F60E05 /* [CP] Embed Pods Frameworks */ = {
isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
files = (
);
inputPaths = (
"${SRCROOT}/Pods/Target Support Files/Pods-Runner/Pods-Runner-frameworks.sh",
"${PODS_ROOT}/.symlinks/plugins/flutter/ios/WebRTC.framework",
);
name = "[CP] Embed Pods Frameworks";
outputPaths = (
"${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/WebRTC.framework",
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "\"${SRCROOT}/Pods/Target Support Files/Pods-Runner/Pods-Runner-frameworks.sh\"\n";
showEnvVarsInLog = 0;
};
7E9D2C9174A80CAA4BFDFDCB4F72C7A8 /* Run Script */ = {
isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
files = (
);
inputPaths = (
);
name = "Run Script";
outputPaths = (
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" build";
};
AC0061BF939EE938EA540EE9D04BF3B3 /* [CP] Check Pods Manifest.lock */ = {
isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
files = (
);
inputPaths = (
"${PODS_PODFILE_DIR_PATH}/Podfile.lock",
"${PODS_ROOT}/Manifest.lock",
);
name = "[CP] Check Pods Manifest.lock";
outputPaths = (
"$(DERIVED_FILE_DIR)/Pods-Runner-checkManifestLockResult.txt",
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n";
showEnvVarsInLog = 0;
};
/* End PBXShellScriptBuildPhase section */
/* Begin PBXSourcesBuildPhase section */
FBC56BB6E87C12DAFB6BB5C3C53DF533 /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
DF12DBCE3921EA73822D14A03AD6F79C /* AppDelegate.m in Sources */,
5AFE2FDDDEBDE94ED82B40DC8B153987 /* GeneratedPluginRegistrant.m in Sources */,
9D577FED5514C03E7AB7E66F98D7A053 /* main.m in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXSourcesBuildPhase section */
/* Begin PBXVariantGroup section */
A79A511C028ADC462437FA86E6F91AFF /* LaunchScreen.storyboard */ = {
isa = PBXVariantGroup;
children = (
843A0C17DB3B7B95DB4CE8FC36407EDD /* Base */,
);
name = LaunchScreen.storyboard;
sourceTree = "<group>";
};
FF4B47BE30BEAD46325791691A899103 /* Main.storyboard */ = {
isa = PBXVariantGroup;
children = (
21CE142FB216A639B6BE578625A89011 /* Base */,
);
name = Main.storyboard;
sourceTree = "<group>";
};
/* End PBXVariantGroup section */
/* Begin XCBuildConfiguration section */
0D181B1A2BE749D68B72CB6CAB984878 /* Release */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = 5E401A09567E49F31CDA639224D64C9B /* Release.xcconfig */;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ANALYZER_NONNULL = YES;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
GCC_C_LANGUAGE_STANDARD = gnu99;
GCC_NO_COMMON_BLOCKS = YES;
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 8.0;
MTL_ENABLE_DEBUG_INFO = NO;
SDKROOT = iphoneos;
TARGETED_DEVICE_FAMILY = "1,2";
VALIDATE_PRODUCT = YES;
};
name = Release;
};
4C45D427E97AD8F895ECED02C40BF5E0 /* Release */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = 5E401A09567E49F31CDA639224D64C9B /* Release.xcconfig */;
buildSettings = {
ARCHS = arm64;
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
DEVELOPMENT_TEAM = 5J859T6AE8;
ENABLE_BITCODE = NO;
FRAMEWORK_SEARCH_PATHS = (
"$(inherited)",
"$(PROJECT_DIR)/Flutter",
"$(PROJECT_DIR)",
);
INFOPLIST_FILE = Runner/Info.plist;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
LIBRARY_SEARCH_PATHS = (
"$(inherited)",
"$(PROJECT_DIR)/Flutter",
);
PRODUCT_BUNDLE_IDENTIFIER = com.cloudwebrtc.webrtcExample;
PRODUCT_NAME = "$(TARGET_NAME)";
};
name = Release;
};
879B60EB63E8E59028AA9FB0AAE288D8 /* Debug */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = 5F00C8B4B6548724073C4AAF93AD0795 /* Debug.xcconfig */;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ANALYZER_NONNULL = YES;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = dwarf;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_TESTABILITY = YES;
GCC_C_LANGUAGE_STANDARD = gnu99;
GCC_DYNAMIC_NO_PIC = NO;
GCC_NO_COMMON_BLOCKS = YES;
GCC_OPTIMIZATION_LEVEL = 0;
GCC_PREPROCESSOR_DEFINITIONS = (
"DEBUG=1",
"$(inherited)",
);
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 8.0;
MTL_ENABLE_DEBUG_INFO = YES;
ONLY_ACTIVE_ARCH = YES;
SDKROOT = iphoneos;
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Debug;
};
AF60501F7ADFF8F784D4241A34A3EA7C /* Debug */ = {
isa = XCBuildConfiguration;
baseConfigurationReference = 5F00C8B4B6548724073C4AAF93AD0795 /* Debug.xcconfig */;
buildSettings = {
ARCHS = arm64;
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
DEVELOPMENT_TEAM = 5J859T6AE8;
ENABLE_BITCODE = NO;
FRAMEWORK_SEARCH_PATHS = (
"$(inherited)",
"$(PROJECT_DIR)/Flutter",
"$(PROJECT_DIR)",
);
INFOPLIST_FILE = Runner/Info.plist;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
LIBRARY_SEARCH_PATHS = (
"$(inherited)",
"$(PROJECT_DIR)/Flutter",
);
PRODUCT_BUNDLE_IDENTIFIER = com.cloudwebrtc.webrtcExample;
PRODUCT_NAME = "$(TARGET_NAME)";
};
name = Debug;
};
/* End XCBuildConfiguration section */
/* Begin XCConfigurationList section */
92578F7B9E3F6AC8F17192C290223DCF /* Build configuration list for PBXNativeTarget "Runner" */ = {
isa = XCConfigurationList;
buildConfigurations = (
AF60501F7ADFF8F784D4241A34A3EA7C /* Debug */,
4C45D427E97AD8F895ECED02C40BF5E0 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
9FDCD9B0638A340D0E561864FC06FD7A /* Build configuration list for PBXProject "Runner" */ = {
isa = XCConfigurationList;
buildConfigurations = (
879B60EB63E8E59028AA9FB0AAE288D8 /* Debug */,
0D181B1A2BE749D68B72CB6CAB984878 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
/* End XCConfigurationList section */
};
rootObject = 18C1723432283E0CC55F10A6DCFD9E02 /* Project object */;
}

View File

@ -0,0 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<Workspace
version = "1.0">
<FileRef
location = "group:Runner.xcodeproj">
</FileRef>
</Workspace>

View File

@ -0,0 +1,93 @@
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "0910"
version = "1.3">
<BuildAction
parallelizeBuildables = "YES"
buildImplicitDependencies = "YES">
<BuildActionEntries>
<BuildActionEntry
buildForTesting = "YES"
buildForRunning = "YES"
buildForProfiling = "YES"
buildForArchiving = "YES"
buildForAnalyzing = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "97C146ED1CF9000F007C117D"
BuildableName = "Runner.app"
BlueprintName = "Runner"
ReferencedContainer = "container:Runner.xcodeproj">
</BuildableReference>
</BuildActionEntry>
</BuildActionEntries>
</BuildAction>
<TestAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
language = ""
shouldUseLaunchSchemeArgsEnv = "YES">
<Testables>
</Testables>
<MacroExpansion>
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "97C146ED1CF9000F007C117D"
BuildableName = "Runner.app"
BlueprintName = "Runner"
ReferencedContainer = "container:Runner.xcodeproj">
</BuildableReference>
</MacroExpansion>
<AdditionalOptions>
</AdditionalOptions>
</TestAction>
<LaunchAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
language = ""
launchStyle = "0"
useCustomWorkingDirectory = "NO"
ignoresPersistentStateOnLaunch = "NO"
debugDocumentVersioning = "YES"
debugServiceExtension = "internal"
allowLocationSimulation = "YES">
<BuildableProductRunnable
runnableDebuggingMode = "0">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "97C146ED1CF9000F007C117D"
BuildableName = "Runner.app"
BlueprintName = "Runner"
ReferencedContainer = "container:Runner.xcodeproj">
</BuildableReference>
</BuildableProductRunnable>
<AdditionalOptions>
</AdditionalOptions>
</LaunchAction>
<ProfileAction
buildConfiguration = "Release"
shouldUseLaunchSchemeArgsEnv = "YES"
savedToolIdentifier = ""
useCustomWorkingDirectory = "NO"
debugDocumentVersioning = "YES">
<BuildableProductRunnable
runnableDebuggingMode = "0">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "97C146ED1CF9000F007C117D"
BuildableName = "Runner.app"
BlueprintName = "Runner"
ReferencedContainer = "container:Runner.xcodeproj">
</BuildableReference>
</BuildableProductRunnable>
</ProfileAction>
<AnalyzeAction
buildConfiguration = "Debug">
</AnalyzeAction>
<ArchiveAction
buildConfiguration = "Release"
revealArchiveInOrganizer = "YES">
</ArchiveAction>
</Scheme>

View File

@ -0,0 +1,10 @@
<?xml version="1.0" encoding="UTF-8"?>
<Workspace
version = "1.0">
<FileRef
location = "group:Runner.xcodeproj">
</FileRef>
<FileRef
location = "group:Pods/Pods.xcodeproj">
</FileRef>
</Workspace>

View File

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>IDEDidComputeMac32BitWarning</key>
<true/>
</dict>
</plist>

View File

@ -0,0 +1,6 @@
#import <UIKit/UIKit.h>
#import <Flutter/Flutter.h>
@interface AppDelegate : FlutterAppDelegate
@end

View File

@ -0,0 +1,12 @@
#include "AppDelegate.h"
#include "GeneratedPluginRegistrant.h"
@implementation AppDelegate
- (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
[GeneratedPluginRegistrant registerWithRegistry:self];
// Override point for customization after application launch.
return [super application:application didFinishLaunchingWithOptions:launchOptions];
}
@end

View File

@ -0,0 +1,122 @@
{
"images" : [
{
"size" : "20x20",
"idiom" : "iphone",
"filename" : "Icon-App-20x20@2x.png",
"scale" : "2x"
},
{
"size" : "20x20",
"idiom" : "iphone",
"filename" : "Icon-App-20x20@3x.png",
"scale" : "3x"
},
{
"size" : "29x29",
"idiom" : "iphone",
"filename" : "Icon-App-29x29@1x.png",
"scale" : "1x"
},
{
"size" : "29x29",
"idiom" : "iphone",
"filename" : "Icon-App-29x29@2x.png",
"scale" : "2x"
},
{
"size" : "29x29",
"idiom" : "iphone",
"filename" : "Icon-App-29x29@3x.png",
"scale" : "3x"
},
{
"size" : "40x40",
"idiom" : "iphone",
"filename" : "Icon-App-40x40@2x.png",
"scale" : "2x"
},
{
"size" : "40x40",
"idiom" : "iphone",
"filename" : "Icon-App-40x40@3x.png",
"scale" : "3x"
},
{
"size" : "60x60",
"idiom" : "iphone",
"filename" : "Icon-App-60x60@2x.png",
"scale" : "2x"
},
{
"size" : "60x60",
"idiom" : "iphone",
"filename" : "Icon-App-60x60@3x.png",
"scale" : "3x"
},
{
"size" : "20x20",
"idiom" : "ipad",
"filename" : "Icon-App-20x20@1x.png",
"scale" : "1x"
},
{
"size" : "20x20",
"idiom" : "ipad",
"filename" : "Icon-App-20x20@2x.png",
"scale" : "2x"
},
{
"size" : "29x29",
"idiom" : "ipad",
"filename" : "Icon-App-29x29@1x.png",
"scale" : "1x"
},
{
"size" : "29x29",
"idiom" : "ipad",
"filename" : "Icon-App-29x29@2x.png",
"scale" : "2x"
},
{
"size" : "40x40",
"idiom" : "ipad",
"filename" : "Icon-App-40x40@1x.png",
"scale" : "1x"
},
{
"size" : "40x40",
"idiom" : "ipad",
"filename" : "Icon-App-40x40@2x.png",
"scale" : "2x"
},
{
"size" : "76x76",
"idiom" : "ipad",
"filename" : "Icon-App-76x76@1x.png",
"scale" : "1x"
},
{
"size" : "76x76",
"idiom" : "ipad",
"filename" : "Icon-App-76x76@2x.png",
"scale" : "2x"
},
{
"size" : "83.5x83.5",
"idiom" : "ipad",
"filename" : "Icon-App-83.5x83.5@2x.png",
"scale" : "2x"
},
{
"size" : "1024x1024",
"idiom" : "ios-marketing",
"filename" : "Icon-App-1024x1024@1x.png",
"scale" : "1x"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 11 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 564 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.5 KiB

View File

@ -0,0 +1,23 @@
{
"images" : [
{
"idiom" : "universal",
"filename" : "LaunchImage.png",
"scale" : "1x"
},
{
"idiom" : "universal",
"filename" : "LaunchImage@2x.png",
"scale" : "2x"
},
{
"idiom" : "universal",
"filename" : "LaunchImage@3x.png",
"scale" : "3x"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 68 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 68 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 68 B

View File

@ -0,0 +1,5 @@
# Launch Screen Assets
You can customize the launch screen with your own desired assets by replacing the image files in this directory.
You can also do it by opening your Flutter project's Xcode project with `open ios/Runner.xcworkspace`, selecting `Runner/Assets.xcassets` in the Project Navigator and dropping in the desired images.

View File

@ -0,0 +1,37 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="12121" systemVersion="16G29" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" launchScreen="YES" colorMatched="YES" initialViewController="01J-lp-oVM">
<dependencies>
<deployment identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="12089"/>
</dependencies>
<scenes>
<!--View Controller-->
<scene sceneID="EHf-IW-A2E">
<objects>
<viewController id="01J-lp-oVM" sceneMemberID="viewController">
<layoutGuides>
<viewControllerLayoutGuide type="top" id="Ydg-fD-yQy"/>
<viewControllerLayoutGuide type="bottom" id="xbc-2k-c8Z"/>
</layoutGuides>
<view key="view" contentMode="scaleToFill" id="Ze5-6b-2t3">
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<subviews>
<imageView opaque="NO" clipsSubviews="YES" multipleTouchEnabled="YES" contentMode="center" image="LaunchImage" translatesAutoresizingMaskIntoConstraints="NO" id="YRO-k0-Ey4">
</imageView>
</subviews>
<color key="backgroundColor" red="1" green="1" blue="1" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<constraints>
<constraint firstItem="YRO-k0-Ey4" firstAttribute="centerX" secondItem="Ze5-6b-2t3" secondAttribute="centerX" id="1a2-6s-vTC"/>
<constraint firstItem="YRO-k0-Ey4" firstAttribute="centerY" secondItem="Ze5-6b-2t3" secondAttribute="centerY" id="4X2-HB-R7a"/>
</constraints>
</view>
</viewController>
<placeholder placeholderIdentifier="IBFirstResponder" id="iYj-Kq-Ea1" userLabel="First Responder" sceneMemberID="firstResponder"/>
</objects>
<point key="canvasLocation" x="53" y="375"/>
</scene>
</scenes>
<resources>
<image name="LaunchImage" width="168" height="185"/>
</resources>
</document>

View File

@ -0,0 +1,26 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="10117" systemVersion="15F34" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" initialViewController="BYZ-38-t0r">
<dependencies>
<deployment identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="10085"/>
</dependencies>
<scenes>
<!--Flutter View Controller-->
<scene sceneID="tne-QT-ifu">
<objects>
<viewController id="BYZ-38-t0r" customClass="FlutterViewController" sceneMemberID="viewController">
<layoutGuides>
<viewControllerLayoutGuide type="top" id="y3c-jy-aDJ"/>
<viewControllerLayoutGuide type="bottom" id="wfy-db-euE"/>
</layoutGuides>
<view key="view" contentMode="scaleToFill" id="8bC-Xf-vdC">
<rect key="frame" x="0.0" y="0.0" width="600" height="600"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<color key="backgroundColor" white="1" alpha="1" colorSpace="custom" customColorSpace="calibratedWhite"/>
</view>
</viewController>
<placeholder placeholderIdentifier="IBFirstResponder" id="dkx-z0-nzr" sceneMemberID="firstResponder"/>
</objects>
</scene>
</scenes>
</document>

View File

@ -0,0 +1,49 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>en</string>
<key>CFBundleExecutable</key>
<string>$(EXECUTABLE_NAME)</string>
<key>CFBundleIdentifier</key>
<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>webrtc_example</string>
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleShortVersionString</key>
<string>1.0</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleVersion</key>
<string>1</string>
<key>LSRequiresIPhoneOS</key>
<true/>
<key>UILaunchStoryboardName</key>
<string>LaunchScreen</string>
<key>UIMainStoryboardFile</key>
<string>Main</string>
<key>UIRequiredDeviceCapabilities</key>
<array>
<string>arm64</string>
</array>
<key>UISupportedInterfaceOrientations</key>
<array>
<string>UIInterfaceOrientationPortrait</string>
<string>UIInterfaceOrientationLandscapeLeft</string>
<string>UIInterfaceOrientationLandscapeRight</string>
</array>
<key>UISupportedInterfaceOrientations~ipad</key>
<array>
<string>UIInterfaceOrientationPortrait</string>
<string>UIInterfaceOrientationPortraitUpsideDown</string>
<string>UIInterfaceOrientationLandscapeLeft</string>
<string>UIInterfaceOrientationLandscapeRight</string>
</array>
<key>UIViewControllerBasedStatusBarAppearance</key>
<false/>
</dict>
</plist>

View File

@ -0,0 +1,9 @@
#import <UIKit/UIKit.h>
#import <Flutter/Flutter.h>
#import "AppDelegate.h"
int main(int argc, char * argv[]) {
@autoreleasepool {
return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class]));
}
}

55
example/lib/main.dart Normal file
View File

@ -0,0 +1,55 @@
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
import 'package:webrtc/webrtc.dart';
void main() => runApp(new MyApp());
class MyApp extends StatefulWidget {
@override
_MyAppState createState() => new _MyAppState();
}
class _MyAppState extends State<MyApp> {
String _platformVersion = 'Unknown';
@override
initState() {
super.initState();
initPlatformState();
}
// Platform messages are asynchronous, so we initialize in an async method.
initPlatformState() async {
String platformVersion;
// Platform messages may fail, so we use a try/catch PlatformException.
try {
platformVersion = await Webrtc.platformVersion;
} on PlatformException {
platformVersion = 'Failed to get platform version.';
}
// If the widget was removed from the tree while the asynchronous platform
// message was in flight, we want to discard the reply rather than calling
// setState to update our non-existent appearance.
if (!mounted)
return;
setState(() {
_platformVersion = platformVersion;
});
}
@override
Widget build(BuildContext context) {
return new MaterialApp(
home: new Scaffold(
appBar: new AppBar(
title: new Text('Plugin example app'),
),
body: new Center(
child: new Text('Running on: $_platformVersion\n'),
),
),
);
}
}

405
example/pubspec.lock Normal file
View File

@ -0,0 +1,405 @@
# Generated by pub
# See https://www.dartlang.org/tools/pub/glossary#lockfile
packages:
analyzer:
dependency: transitive
description:
name: analyzer
url: "https://pub.flutter-io.cn"
source: hosted
version: "0.31.1"
args:
dependency: transitive
description:
name: args
url: "https://pub.flutter-io.cn"
source: hosted
version: "1.3.0"
async:
dependency: transitive
description:
name: async
url: "https://pub.flutter-io.cn"
source: hosted
version: "2.0.4"
barback:
dependency: transitive
description:
name: barback
url: "https://pub.flutter-io.cn"
source: hosted
version: "0.15.2+14"
boolean_selector:
dependency: transitive
description:
name: boolean_selector
url: "https://pub.flutter-io.cn"
source: hosted
version: "1.0.2"
charcode:
dependency: transitive
description:
name: charcode
url: "https://pub.flutter-io.cn"
source: hosted
version: "1.1.1"
cli_util:
dependency: transitive
description:
name: cli_util
url: "https://pub.flutter-io.cn"
source: hosted
version: "0.1.2+1"
collection:
dependency: transitive
description:
name: collection
url: "https://pub.flutter-io.cn"
source: hosted
version: "1.14.5"
convert:
dependency: transitive
description:
name: convert
url: "https://pub.flutter-io.cn"
source: hosted
version: "2.0.1"
crypto:
dependency: transitive
description:
name: crypto
url: "https://pub.flutter-io.cn"
source: hosted
version: "2.0.2+1"
csslib:
dependency: transitive
description:
name: csslib
url: "https://pub.flutter-io.cn"
source: hosted
version: "0.14.1"
cupertino_icons:
dependency: "direct main"
description:
name: cupertino_icons
url: "https://pub.flutter-io.cn"
source: hosted
version: "0.1.1"
flutter:
dependency: "direct main"
description: flutter
source: sdk
version: "0.0.0"
flutter_test:
dependency: "direct dev"
description: flutter
source: sdk
version: "0.0.0"
front_end:
dependency: transitive
description:
name: front_end
url: "https://pub.flutter-io.cn"
source: hosted
version: "0.1.0-alpha.9"
glob:
dependency: transitive
description:
name: glob
url: "https://pub.flutter-io.cn"
source: hosted
version: "1.1.5"
html:
dependency: transitive
description:
name: html
url: "https://pub.flutter-io.cn"
source: hosted
version: "0.13.2+2"
http:
dependency: transitive
description:
name: http
url: "https://pub.flutter-io.cn"
source: hosted
version: "0.11.3+16"
http_multi_server:
dependency: transitive
description:
name: http_multi_server
url: "https://pub.flutter-io.cn"
source: hosted
version: "2.0.4"
http_parser:
dependency: transitive
description:
name: http_parser
url: "https://pub.flutter-io.cn"
source: hosted
version: "3.1.1"
io:
dependency: transitive
description:
name: io
url: "https://pub.flutter-io.cn"
source: hosted
version: "0.3.2+1"
isolate:
dependency: transitive
description:
name: isolate
url: "https://pub.flutter-io.cn"
source: hosted
version: "1.1.0"
js:
dependency: transitive
description:
name: js
url: "https://pub.flutter-io.cn"
source: hosted
version: "0.6.1"
kernel:
dependency: transitive
description:
name: kernel
url: "https://pub.flutter-io.cn"
source: hosted
version: "0.3.0-alpha.9"
logging:
dependency: transitive
description:
name: logging
url: "https://pub.flutter-io.cn"
source: hosted
version: "0.11.3+1"
matcher:
dependency: transitive
description:
name: matcher
url: "https://pub.flutter-io.cn"
source: hosted
version: "0.12.1+4"
meta:
dependency: transitive
description:
name: meta
url: "https://pub.flutter-io.cn"
source: hosted
version: "1.1.2"
mime:
dependency: transitive
description:
name: mime
url: "https://pub.flutter-io.cn"
source: hosted
version: "0.9.6"
mockito:
dependency: transitive
description:
name: mockito
url: "https://pub.flutter-io.cn"
source: hosted
version: "2.2.3"
multi_server_socket:
dependency: transitive
description:
name: multi_server_socket
url: "https://pub.flutter-io.cn"
source: hosted
version: "1.0.1"
node_preamble:
dependency: transitive
description:
name: node_preamble
url: "https://pub.flutter-io.cn"
source: hosted
version: "1.4.0"
package_config:
dependency: transitive
description:
name: package_config
url: "https://pub.flutter-io.cn"
source: hosted
version: "1.0.3"
package_resolver:
dependency: transitive
description:
name: package_resolver
url: "https://pub.flutter-io.cn"
source: hosted
version: "1.0.2"
path:
dependency: transitive
description:
name: path
url: "https://pub.flutter-io.cn"
source: hosted
version: "1.5.1"
plugin:
dependency: transitive
description:
name: plugin
url: "https://pub.flutter-io.cn"
source: hosted
version: "0.2.0+2"
pool:
dependency: transitive
description:
name: pool
url: "https://pub.flutter-io.cn"
source: hosted
version: "1.3.4"
pub_semver:
dependency: transitive
description:
name: pub_semver
url: "https://pub.flutter-io.cn"
source: hosted
version: "1.3.2"
quiver:
dependency: transitive
description:
name: quiver
url: "https://pub.flutter-io.cn"
source: hosted
version: "0.28.0"
shelf:
dependency: transitive
description:
name: shelf
url: "https://pub.flutter-io.cn"
source: hosted
version: "0.7.2"
shelf_packages_handler:
dependency: transitive
description:
name: shelf_packages_handler
url: "https://pub.flutter-io.cn"
source: hosted
version: "1.0.3"
shelf_static:
dependency: transitive
description:
name: shelf_static
url: "https://pub.flutter-io.cn"
source: hosted
version: "0.2.7"
shelf_web_socket:
dependency: transitive
description:
name: shelf_web_socket
url: "https://pub.flutter-io.cn"
source: hosted
version: "0.2.2"
sky_engine:
dependency: transitive
description: flutter
source: sdk
version: "0.0.99"
source_map_stack_trace:
dependency: transitive
description:
name: source_map_stack_trace
url: "https://pub.flutter-io.cn"
source: hosted
version: "1.1.4"
source_maps:
dependency: transitive
description:
name: source_maps
url: "https://pub.flutter-io.cn"
source: hosted
version: "0.10.4"
source_span:
dependency: transitive
description:
name: source_span
url: "https://pub.flutter-io.cn"
source: hosted
version: "1.4.0"
stack_trace:
dependency: transitive
description:
name: stack_trace
url: "https://pub.flutter-io.cn"
source: hosted
version: "1.9.1"
stream_channel:
dependency: transitive
description:
name: stream_channel
url: "https://pub.flutter-io.cn"
source: hosted
version: "1.6.3"
string_scanner:
dependency: transitive
description:
name: string_scanner
url: "https://pub.flutter-io.cn"
source: hosted
version: "1.0.2"
term_glyph:
dependency: transitive
description:
name: term_glyph
url: "https://pub.flutter-io.cn"
source: hosted
version: "1.0.0"
test:
dependency: transitive
description:
name: test
url: "https://pub.flutter-io.cn"
source: hosted
version: "0.12.30+4"
typed_data:
dependency: transitive
description:
name: typed_data
url: "https://pub.flutter-io.cn"
source: hosted
version: "1.1.5"
utf:
dependency: transitive
description:
name: utf
url: "https://pub.flutter-io.cn"
source: hosted
version: "0.9.0+4"
vector_math:
dependency: transitive
description:
name: vector_math
url: "https://pub.flutter-io.cn"
source: hosted
version: "2.0.5"
watcher:
dependency: transitive
description:
name: watcher
url: "https://pub.flutter-io.cn"
source: hosted
version: "0.9.7+7"
web_socket_channel:
dependency: transitive
description:
name: web_socket_channel
url: "https://pub.flutter-io.cn"
source: hosted
version: "1.0.7"
webrtc:
dependency: "direct dev"
description:
path: ".."
relative: true
source: path
version: "0.0.1"
yaml:
dependency: transitive
description:
name: yaml
url: "https://pub.flutter-io.cn"
source: hosted
version: "2.1.13"
sdks:
dart: ">=2.0.0-dev.23.0 <=2.0.0-edge.0d5cf900b021bf5c9fa593ffa12b15bcd1cc5fe0"

59
example/pubspec.yaml Normal file
View File

@ -0,0 +1,59 @@
name: webrtc_example
description: Demonstrates how to use the webrtc plugin.
dependencies:
flutter:
sdk: flutter
# The following adds the Cupertino Icons font to your application.
# Use with the CupertinoIcons class for iOS style icons.
cupertino_icons: ^0.1.0
dev_dependencies:
flutter_test:
sdk: flutter
webrtc:
path: ../
# For information on the generic Dart part of this file, see the
# following page: https://www.dartlang.org/tools/pub/pubspec
# The following section is specific to Flutter.
flutter:
# The following line ensures that the Material Icons font is
# included with your application, so that you can use the icons in
# the material Icons class.
uses-material-design: true
# To add assets to your application, add an assets section, like this:
# assets:
# - images/a_dot_burr.jpeg
# - images/a_dot_ham.jpeg
# An image asset can refer to one or more resolution-specific "variants", see
# https://flutter.io/assets-and-images/#resolution-aware.
# For details regarding adding assets from package dependencies, see
# https://flutter.io/assets-and-images/#from-packages
# To add custom fonts to your application, add a fonts section here,
# in this "flutter" section. Each entry in this list should have a
# "family" key with the font family name, and a "fonts" key with a
# list giving the asset and other descriptors for the font. For
# example:
# fonts:
# - family: Schyler
# fonts:
# - asset: fonts/Schyler-Regular.ttf
# - asset: fonts/Schyler-Italic.ttf
# style: italic
# - family: Trajan Pro
# fonts:
# - asset: fonts/TrajanPro.ttf
# - asset: fonts/TrajanPro_Bold.ttf
# weight: 700
#
# For details regarding fonts from package dependencies,
# see https://flutter.io/custom-fonts/#from-packages

View File

@ -0,0 +1,25 @@
// This is a basic Flutter widget test.
// To perform an interaction with a widget in your test, use the WidgetTester utility that Flutter
// provides. For example, you can send tap and scroll gestures. You can also use WidgetTester to
// find child widgets in the widget tree, read text, and verify that the values of widget properties
// are correct.
import 'package:flutter/material.dart';
import 'package:flutter_test/flutter_test.dart';
import 'package:webrtc_example/main.dart';
void main() {
testWidgets('Verify Platform version', (WidgetTester tester) async {
// Build our app and trigger a frame.
await tester.pumpWidget(new MyApp());
// Verify that platform version is retrieved.
expect(
find.byWidgetPredicate(
(Widget widget) =>
widget is Text && widget.data.startsWith('Running on:'),
),
findsOneWidget);
});
}

View File

@ -0,0 +1,17 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="JAVA_MODULE" version="4">
<component name="NewModuleRootManager" inherit-compiler-output="true">
<exclude-output />
<content url="file://$MODULE_DIR$">
<sourceFolder url="file://$MODULE_DIR$/lib" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/test" isTestSource="true" />
<excludeFolder url="file://$MODULE_DIR$/.idea" />
<excludeFolder url="file://$MODULE_DIR$/.pub" />
<excludeFolder url="file://$MODULE_DIR$/build" />
</content>
<orderEntry type="sourceFolder" forTests="false" />
<orderEntry type="library" name="Dart SDK" level="project" />
<orderEntry type="library" name="Flutter Plugins" level="project" />
<orderEntry type="library" name="Dart Packages" level="project" />
</component>
</module>

View File

@ -0,0 +1,26 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="JAVA_MODULE" version="4">
<component name="FacetManager">
<facet type="android" name="Android">
<configuration>
<option name="GEN_FOLDER_RELATIVE_PATH_APT" value="/android/gen" />
<option name="GEN_FOLDER_RELATIVE_PATH_AIDL" value="/android/gen" />
<option name="MANIFEST_FILE_RELATIVE_PATH" value="/android/AndroidManifest.xml" />
<option name="RES_FOLDER_RELATIVE_PATH" value="/android/res" />
<option name="ASSETS_FOLDER_RELATIVE_PATH" value="/android/assets" />
<option name="LIBS_FOLDER_RELATIVE_PATH" value="/android/libs" />
<option name="PROGUARD_LOGS_FOLDER_RELATIVE_PATH" value="/android/proguard_logs" />
</configuration>
</facet>
</component>
<component name="NewModuleRootManager" inherit-compiler-output="true">
<exclude-output />
<content url="file://$MODULE_DIR$/android">
<sourceFolder url="file://$MODULE_DIR$/android/app/src/main/java" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/android/gen" isTestSource="false" generated="true" />
</content>
<orderEntry type="jdk" jdkName="Android API 25 Platform" jdkType="Android SDK" />
<orderEntry type="sourceFolder" forTests="false" />
<orderEntry type="library" name="Flutter for Android" level="project" />
</component>
</module>

View File

@ -0,0 +1,12 @@
{
"folders": [
{
"path": "."
},
{
"path": "example",
"name": "example"
}
],
"settings": {}
}

31
ios/.gitignore vendored Normal file
View File

@ -0,0 +1,31 @@
.idea/
.vagrant/
.sconsign.dblite
.svn/
.DS_Store
*.swp
profile
DerivedData/
build/
*.pbxuser
*.mode1v3
*.mode2v3
*.perspectivev3
!default.pbxuser
!default.mode1v3
!default.mode2v3
!default.perspectivev3
xcuserdata
*.moved-aside
*.pyc
*sync/
Icon?
.tags*

0
ios/Assets/.gitkeep Normal file
View File

View File

@ -0,0 +1,16 @@
/*
* Copyright 2017 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
#import "WebRTC/RTCVideoCodecFactory.h"
@interface ARDVideoDecoderFactory : NSObject<RTCVideoDecoderFactory>
@end

View File

@ -0,0 +1,39 @@
/*
* Copyright 2017 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "ARDVideoDecoderFactory.h"
#import "WebRTC/RTCVideoCodecH264.h"
#import "WebRTC/RTCVideoDecoderVP8.h"
#import "WebRTC/RTCVideoDecoderVP9.h"
@implementation ARDVideoDecoderFactory
- (id<RTCVideoDecoder>)createDecoder:(RTCVideoCodecInfo *)info {
if ([info.name isEqualToString:@"H264"]) {
return [[RTCVideoDecoderH264 alloc] init];
} else if ([info.name isEqualToString:@"VP8"]) {
return [RTCVideoDecoderVP8 vp8Decoder];
} else if ([info.name isEqualToString:@"VP9"]) {
return [RTCVideoDecoderVP9 vp9Decoder];
}
return nil;
}
- (NSArray<RTCVideoCodecInfo *> *)supportedCodecs {
return @[
[[RTCVideoCodecInfo alloc] initWithName:@"H264" parameters:nil],
[[RTCVideoCodecInfo alloc] initWithName:@"VP8" parameters:nil],
[[RTCVideoCodecInfo alloc] initWithName:@"VP9" parameters:nil]
];
}
@end

View File

@ -0,0 +1,16 @@
/*
* Copyright 2017 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
#import "WebRTC/RTCVideoCodecFactory.h"
@interface ARDVideoEncoderFactory : NSObject<RTCVideoEncoderFactory>
@end

View File

@ -0,0 +1,64 @@
/*
* Copyright 2017 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "ARDVideoEncoderFactory.h"
#import "WebRTC/RTCVideoCodecH264.h"
#import "WebRTC/RTCVideoEncoderVP8.h"
#import "WebRTC/RTCVideoEncoderVP9.h"
static NSString *kLevel31ConstrainedHigh = @"640c1f";
static NSString *kLevel31ConstrainedBaseline = @"42e01f";
@implementation ARDVideoEncoderFactory
- (id<RTCVideoEncoder>)createEncoder:(RTCVideoCodecInfo *)info {
if ([info.name isEqualToString:@"H264"]) {
return [[RTCVideoEncoderH264 alloc] initWithCodecInfo:info];
} else if ([info.name isEqualToString:@"VP8"]) {
return [RTCVideoEncoderVP8 vp8Encoder];
} else if ([info.name isEqualToString:@"VP9"]) {
return [RTCVideoEncoderVP9 vp9Encoder];
}
return nil;
}
- (NSArray<RTCVideoCodecInfo *> *)supportedCodecs {
NSMutableArray<RTCVideoCodecInfo *> *codecs = [NSMutableArray array];
NSDictionary<NSString *, NSString *> *constrainedHighParams = @{
@"profile-level-id" : kLevel31ConstrainedHigh,
@"level-asymmetry-allowed" : @"1",
@"packetization-mode" : @"1",
};
RTCVideoCodecInfo *constrainedHighInfo =
[[RTCVideoCodecInfo alloc] initWithName:@"H264" parameters:constrainedHighParams];
[codecs addObject:constrainedHighInfo];
NSDictionary<NSString *, NSString *> *constrainedBaselineParams = @{
@"profile-level-id" : kLevel31ConstrainedBaseline,
@"level-asymmetry-allowed" : @"1",
@"packetization-mode" : @"1",
};
RTCVideoCodecInfo *constrainedBaselineInfo =
[[RTCVideoCodecInfo alloc] initWithName:@"H264" parameters:constrainedBaselineParams];
[codecs addObject:constrainedBaselineInfo];
RTCVideoCodecInfo *vp8Info = [[RTCVideoCodecInfo alloc] initWithName:@"VP8" parameters:nil];
[codecs addObject:vp8Info];
RTCVideoCodecInfo *vp9Info = [[RTCVideoCodecInfo alloc] initWithName:@"VP9" parameters:nil];
[codecs addObject:vp9Info];
return [codecs copy];
}
@end

View File

@ -0,0 +1,12 @@
#import "FlutterWebRTCPlugin.h"
#import <WebRTC/RTCDataChannel.h>
@interface RTCDataChannel (Flutter)
@property (nonatomic, strong) NSNumber *peerConnectionId;
@end
@interface FlutterWebRTCPlugin (RTCDataChannel) <RTCDataChannelDelegate>
@end

View File

@ -0,0 +1,120 @@
#import <objc/runtime.h>
#import "FlutterRTCDataChannel.h"
#import "FlutterRTCPeerConnection.h"
#import <WebRTC/RTCDataChannelConfiguration.h>
@implementation RTCDataChannel (Flutter)
- (NSNumber *)peerConnectionId
{
return objc_getAssociatedObject(self, _cmd);
}
- (void)setPeerConnectionId:(NSNumber *)peerConnectionId
{
objc_setAssociatedObject(self, @selector(peerConnectionId), peerConnectionId, OBJC_ASSOCIATION_RETAIN_NONATOMIC);
}
@end
@implementation FlutterWebRTCPlugin (RTCDataChannel)
-(void)createDataChannel:(nonnull NSNumber *)peerConnectionId
label:(NSString *)label
config:(RTCDataChannelConfiguration *)config
{
RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId];
RTCDataChannel *dataChannel = [peerConnection dataChannelForLabel:label configuration:config];
// XXX RTP data channels are not defined by the WebRTC standard, have been
// deprecated in Chromium, and Google have decided (in 2015) to no longer
// support them (in the face of multiple reported issues of breakages).
if (-1 != dataChannel.channelId) {
//dataChannel.peerConnectionId = peerConnectionId;
NSNumber *dataChannelId = [NSNumber numberWithInteger:dataChannel.channelId];
self.dataChannels[dataChannelId] = dataChannel;
dataChannel.delegate = self;
}
}
-(void)dataChannelClose:(nonnull NSNumber *)peerConnectionId
dataChannelId:(nonnull NSNumber *)dataChannelId
{
RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId];
NSMutableDictionary *dataChannels = self.dataChannels;
RTCDataChannel *dataChannel = dataChannels[dataChannelId];
[dataChannel close];
[dataChannels removeObjectForKey:dataChannelId];
}
-(void)dataChannelSend:(nonnull NSNumber *)peerConnectionId
dataChannelId:(nonnull NSNumber *)dataChannelId
data:(NSString *)data
type:(NSString *)type
{
RTCPeerConnection *peerConnection = self.peerConnections[peerConnectionId];
RTCDataChannel *dataChannel = self.dataChannels[dataChannelId];
NSData *bytes = [type isEqualToString:@"binary"] ?
[[NSData alloc] initWithBase64EncodedString:data options:0] :
[data dataUsingEncoding:NSUTF8StringEncoding];
RTCDataBuffer *buffer = [[RTCDataBuffer alloc] initWithData:bytes isBinary:[type isEqualToString:@"binary"]];
[dataChannel sendData:buffer];
}
- (NSString *)stringForDataChannelState:(RTCDataChannelState)state
{
switch (state) {
case RTCDataChannelStateConnecting: return @"connecting";
case RTCDataChannelStateOpen: return @"open";
case RTCDataChannelStateClosing: return @"closing";
case RTCDataChannelStateClosed: return @"closed";
}
return nil;
}
#pragma mark - RTCDataChannelDelegate methods
// Called when the data channel state has changed.
- (void)dataChannelDidChangeState:(RTCDataChannel*)channel
{
NSDictionary *event = @{@"id": @(channel.channelId),
@"peerConnectionId": channel.peerConnectionId,
@"state": [self stringForDataChannelState:channel.readyState]};
if(_eventSink) {
_eventSink(@{ @"event" : @"dataChannelStateChanged", @"body": event, });
}
}
// Called when a data buffer was successfully received.
- (void)dataChannel:(RTCDataChannel *)channel didReceiveMessageWithBuffer:(RTCDataBuffer *)buffer
{
NSString *type;
NSString *data;
if (buffer.isBinary) {
type = @"binary";
data = [buffer.data base64EncodedStringWithOptions:0];
} else {
type = @"text";
// XXX NSData has a length property which means that, when it represents
// text, the value of its bytes property does not have to be terminated by
// null. In such a case, NSString's stringFromUTF8String may fail and return
// nil (which would crash the process when inserting data into NSDictionary
// without the nil protection implemented below).
data = [[NSString alloc] initWithData:buffer.data
encoding:NSUTF8StringEncoding];
}
NSDictionary *event = @{@"id": @(channel.channelId),
@"peerConnectionId": channel.peerConnectionId,
@"type": type,
// XXX NSDictionary will crash the process upon
// attempting to insert nil. Such behavior is
// unacceptable given that protection in such a
// scenario is extremely simple.
@"data": (data ? data : [NSNull null])};
if(_eventSink) {
_eventSink(@{ @"event" : @"dataChannelReceiveMessage", @"body": event, });
}
}
@end

View File

@ -0,0 +1,10 @@
#import <Foundation/Foundation.h>
#import "FlutterWebRTCPlugin.h"
@interface FlutterWebRTCPlugin (RTCMediaStream)
-(void)getUserMedia:(NSDictionary *)constraints
result:(FlutterResult)result;
@end

View File

@ -0,0 +1,453 @@
#import <objc/runtime.h>
#import <WebRTC/RTCAVFoundationVideoSource.h>
#import <WebRTC/RTCAudioTrack.h>
#import <WebRTC/RTCVideoTrack.h>
#import <WebRTC/RTCMediaConstraints.h>
#import <WebRTC/RTCMediaStream.h>
#import <WebRTC/RTCMediaStreamTrack.h>
#import <WebRTC/RTCPeerConnectionFactory.h>
#import "FlutterRTCMediaStream.h"
#import "FlutterRTCPeerConnection.h"
@implementation AVCaptureDevice (Flutter)
- (NSString*)positionString {
switch (self.position) {
case AVCaptureDevicePositionUnspecified: return @"unspecified";
case AVCaptureDevicePositionBack: return @"back";
case AVCaptureDevicePositionFront: return @"front";
}
return nil;
}
@end
@implementation FlutterWebRTCPlugin (RTCMediaStream)
/**
* {@link https://www.w3.org/TR/mediacapture-streams/#navigatorusermediaerrorcallback}
*/
typedef void (^NavigatorUserMediaErrorCallback)(NSString *errorType, NSString *errorMessage);
/**
* {@link https://www.w3.org/TR/mediacapture-streams/#navigatorusermediasuccesscallback}
*/
typedef void (^NavigatorUserMediaSuccessCallback)(RTCMediaStream *mediaStream);
- (RTCMediaConstraints *)defaultMediaStreamConstraints {
NSDictionary *mandatoryConstraints
= @{ kRTCMediaConstraintsMinWidth : @"1280",
kRTCMediaConstraintsMinHeight : @"720",
kRTCMediaConstraintsMinFrameRate : @"30" };
RTCMediaConstraints* constraints =
[[RTCMediaConstraints alloc]
initWithMandatoryConstraints:mandatoryConstraints
optionalConstraints:nil];
return constraints;
}
/**
* Initializes a new {@link RTCAudioTrack} which satisfies specific constraints,
* adds it to a specific {@link RTCMediaStream}, and reports success to a
* specific callback. Implements the audio-specific counterpart of the
* {@code getUserMedia()} algorithm.
*
* @param constraints The {@code MediaStreamConstraints} which the new
* {@code RTCAudioTrack} instance is to satisfy.
* @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which
* success is to be reported.
* @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which
* failure is to be reported.
* @param mediaStream The {@link RTCMediaStream} which is being initialized as
* part of the execution of the {@code getUserMedia()} algorithm, to which a
* new {@code RTCAudioTrack} is to be added, and which is to be reported to
* {@code successCallback} upon success.
*/
- (void)getUserAudio:(NSDictionary *)constraints
successCallback:(NavigatorUserMediaSuccessCallback)successCallback
errorCallback:(NavigatorUserMediaErrorCallback)errorCallback
mediaStream:(RTCMediaStream *)mediaStream {
NSString *trackId = [[NSUUID UUID] UUIDString];
RTCAudioTrack *audioTrack
= [self.peerConnectionFactory audioTrackWithTrackId:trackId];
[mediaStream addAudioTrack:audioTrack];
successCallback(mediaStream);
}
// TODO: Use RCTConvert for constraints ...
-(void)getUserMedia:(NSDictionary *)constraints
result:(FlutterResult) result {
// Initialize RTCMediaStream with a unique label in order to allow multiple
// RTCMediaStream instances initialized by multiple getUserMedia calls to be
// added to 1 RTCPeerConnection instance. As suggested by
// https://www.w3.org/TR/mediacapture-streams/#mediastream to be a good
// practice, use a UUID (conforming to RFC4122).
NSString *mediaStreamId = [[NSUUID UUID] UUIDString];
RTCMediaStream *mediaStream
= [self.peerConnectionFactory mediaStreamWithStreamId:mediaStreamId];
[self
getUserMedia:constraints
successCallback:^ (RTCMediaStream *mediaStream) {
NSString *mediaStreamId = mediaStream.streamId;
NSMutableArray *tracks = [NSMutableArray array];
for (NSString *propertyName in @[ @"audioTracks", @"videoTracks" ]) {
SEL sel = NSSelectorFromString(propertyName);
for (RTCMediaStreamTrack *track in [mediaStream performSelector:sel]) {
NSString *trackId = track.trackId;
self.localTracks[trackId] = track;
[tracks addObject:@{
@"enabled": @(track.isEnabled),
@"id": trackId,
@"kind": track.kind,
@"label": trackId,
@"readyState": @"live",
@"remote": @(NO)
}];
}
}
self.localStreams[mediaStreamId] = mediaStream;
result(@[ mediaStreamId, tracks ]);
}
errorCallback:^ (NSString *errorType, NSString *errorMessage) {
result([FlutterError errorWithCode:[NSString stringWithFormat:@"Error %@", errorType]
message:errorMessage
details:nil]);
}
mediaStream:mediaStream];
}
/**
* Initializes a new {@link RTCAudioTrack} or a new {@link RTCVideoTrack} which
* satisfies specific constraints and adds it to a specific
* {@link RTCMediaStream} if the specified {@code mediaStream} contains no track
* of the respective media type and the specified {@code constraints} specify
* that a track of the respective media type is required; otherwise, reports
* success for the specified {@code mediaStream} to a specific
* {@link NavigatorUserMediaSuccessCallback}. In other words, implements a media
* type-specific iteration of or successfully concludes the
* {@code getUserMedia()} algorithm. The method will be recursively invoked to
* conclude the whole {@code getUserMedia()} algorithm either with (successful)
* satisfaction of the specified {@code constraints} or with failure.
*
* @param constraints The {@code MediaStreamConstraints} which specifies the
* requested media types and which the new {@code RTCAudioTrack} or
* {@code RTCVideoTrack} instance is to satisfy.
* @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which
* success is to be reported.
* @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which
* failure is to be reported.
* @param mediaStream The {@link RTCMediaStream} which is being initialized as
* part of the execution of the {@code getUserMedia()} algorithm.
*/
- (void)getUserMedia:(NSDictionary *)constraints
successCallback:(NavigatorUserMediaSuccessCallback)successCallback
errorCallback:(NavigatorUserMediaErrorCallback)errorCallback
mediaStream:(RTCMediaStream *)mediaStream {
// If mediaStream contains no audioTracks and the constraints request such a
// track, then run an iteration of the getUserMedia() algorithm to obtain
// local audio content.
if (mediaStream.audioTracks.count == 0) {
// constraints.audio
id audioConstraints = constraints[@"audio"];
BOOL constraintsIsDictionary = [audioConstraints isKindOfClass:[NSDictionary class]];
if (audioConstraints && (constraintsIsDictionary || [audioConstraints boolValue])) {
[self requestAccessForMediaType:AVMediaTypeAudio
constraints:constraints
successCallback:successCallback
errorCallback:errorCallback
mediaStream:mediaStream];
return;
}
}
// If mediaStream contains no videoTracks and the constraints request such a
// track, then run an iteration of the getUserMedia() algorithm to obtain
// local video content.
if (mediaStream.videoTracks.count == 0) {
// constraints.video
id videoConstraints = constraints[@"video"];
if (videoConstraints) {
BOOL requestAccessForVideo
= [videoConstraints isKindOfClass:[NSNumber class]]
? [videoConstraints boolValue]
: [videoConstraints isKindOfClass:[NSDictionary class]];
if (requestAccessForVideo) {
[self requestAccessForMediaType:AVMediaTypeVideo
constraints:constraints
successCallback:successCallback
errorCallback:errorCallback
mediaStream:mediaStream];
return;
}
}
}
// There are audioTracks and/or videoTracks in mediaStream as requested by
// constraints so the getUserMedia() is to conclude with success.
successCallback(mediaStream);
}
/**
* Initializes a new {@link RTCVideoTrack} which satisfies specific constraints,
* adds it to a specific {@link RTCMediaStream}, and reports success to a
* specific callback. Implements the video-specific counterpart of the
* {@code getUserMedia()} algorithm.
*
* @param constraints The {@code MediaStreamConstraints} which the new
* {@code RTCVideoTrack} instance is to satisfy.
* @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which
* success is to be reported.
* @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which
* failure is to be reported.
* @param mediaStream The {@link RTCMediaStream} which is being initialized as
* part of the execution of the {@code getUserMedia()} algorithm, to which a
* new {@code RTCVideoTrack} is to be added, and which is to be reported to
* {@code successCallback} upon success.
*/
- (void)getUserVideo:(NSDictionary *)constraints
successCallback:(NavigatorUserMediaSuccessCallback)successCallback
errorCallback:(NavigatorUserMediaErrorCallback)errorCallback
mediaStream:(RTCMediaStream *)mediaStream {
id videoConstraints = constraints[@"video"];
AVCaptureDevice *videoDevice;
if ([videoConstraints isKindOfClass:[NSDictionary class]]) {
// constraints.video.optional
id optionalVideoConstraints = videoConstraints[@"optional"];
if (optionalVideoConstraints
&& [optionalVideoConstraints isKindOfClass:[NSArray class]]) {
NSArray *options = optionalVideoConstraints;
for (id item in options) {
if ([item isKindOfClass:[NSDictionary class]]) {
NSString *sourceId = ((NSDictionary *)item)[@"sourceId"];
if (sourceId) {
videoDevice = [AVCaptureDevice deviceWithUniqueID:sourceId];
if (videoDevice) {
break;
}
}
}
}
}
if (!videoDevice) {
// constraints.video.facingMode
//
// https://www.w3.org/TR/mediacapture-streams/#def-constraint-facingMode
id facingMode = videoConstraints[@"facingMode"];
if (facingMode && [facingMode isKindOfClass:[NSString class]]) {
AVCaptureDevicePosition position;
if ([facingMode isEqualToString:@"environment"]) {
position = AVCaptureDevicePositionBack;
} else if ([facingMode isEqualToString:@"user"]) {
position = AVCaptureDevicePositionFront;
} else {
// If the specified facingMode value is not supported, fall back to
// the default video device.
position = AVCaptureDevicePositionUnspecified;
}
if (AVCaptureDevicePositionUnspecified != position) {
for (AVCaptureDevice *aVideoDevice in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
if (aVideoDevice.position == position) {
videoDevice = aVideoDevice;
break;
}
}
}
}
}
if (!videoDevice) {
videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
}
}
if (videoDevice) {
// TODO: Actually use constraints...
RTCAVFoundationVideoSource *videoSource = [self.peerConnectionFactory avFoundationVideoSourceWithConstraints:[self defaultMediaStreamConstraints]];
// FIXME The effort above to find a videoDevice value which satisfies the
// specified constraints was pretty much wasted. Salvage facingMode for
// starters because it is kind of a common and hence important feature on
// a mobile device.
switch (videoDevice.position) {
case AVCaptureDevicePositionBack:
if (videoSource.canUseBackCamera) {
videoSource.useBackCamera = YES;
}
break;
case AVCaptureDevicePositionFront:
videoSource.useBackCamera = NO;
break;
}
NSString *trackUUID = [[NSUUID UUID] UUIDString];
RTCVideoTrack *videoTrack = [self.peerConnectionFactory videoTrackWithSource:videoSource trackId:trackUUID];
[mediaStream addVideoTrack:videoTrack];
successCallback(mediaStream);
} else {
// According to step 6.2.3 of the getUserMedia() algorithm, if there is no
// source, fail with a new OverconstrainedError.
errorCallback(@"OverconstrainedError", /* errorMessage */ nil);
}
}
-(void)mediaStreamRelease:(RTCMediaStream *)stream
{
if (stream) {
for (RTCVideoTrack *track in stream.videoTracks) {
[self.localTracks removeObjectForKey:track.trackId];
}
for (RTCAudioTrack *track in stream.audioTracks) {
[self.localTracks removeObjectForKey:track.trackId];
}
[self.localStreams removeObjectForKey:stream.streamId];
}
}
/**
* Obtains local media content of a specific type. Requests access for the
* specified {@code mediaType} if necessary. In other words, implements a media
* type-specific iteration of the {@code getUserMedia()} algorithm.
*
* @param mediaType Either {@link AVMediaTypAudio} or {@link AVMediaTypeVideo}
* which specifies the type of the local media content to obtain.
* @param constraints The {@code MediaStreamConstraints} which are to be
* satisfied by the obtained local media content.
* @param successCallback The {@link NavigatorUserMediaSuccessCallback} to which
* success is to be reported.
* @param errorCallback The {@link NavigatorUserMediaErrorCallback} to which
* failure is to be reported.
* @param mediaStream The {@link RTCMediaStream} which is to collect the
* obtained local media content of the specified {@code mediaType}.
*/
- (void)requestAccessForMediaType:(NSString *)mediaType
constraints:(NSDictionary *)constraints
successCallback:(NavigatorUserMediaSuccessCallback)successCallback
errorCallback:(NavigatorUserMediaErrorCallback)errorCallback
mediaStream:(RTCMediaStream *)mediaStream {
// According to step 6.2.1 of the getUserMedia() algorithm, if there is no
// source, fail "with a new DOMException object whose name attribute has the
// value NotFoundError."
// XXX The following approach does not work for audio in Simulator. That is
// because audio capture is done using AVAudioSession which does not use
// AVCaptureDevice there. Anyway, Simulator will not (visually) request access
// for audio.
if (mediaType == AVMediaTypeVideo
&& [AVCaptureDevice devicesWithMediaType:mediaType].count == 0) {
// Since successCallback and errorCallback are asynchronously invoked
// elsewhere, make sure that the invocation here is consistent.
dispatch_async(dispatch_get_main_queue(), ^ {
errorCallback(@"DOMException", @"NotFoundError");
});
return;
}
[AVCaptureDevice
requestAccessForMediaType:mediaType
completionHandler:^ (BOOL granted) {
dispatch_async(dispatch_get_main_queue(), ^ {
if (granted) {
NavigatorUserMediaSuccessCallback scb
= ^ (RTCMediaStream *mediaStream) {
[self getUserMedia:constraints
successCallback:successCallback
errorCallback:errorCallback
mediaStream:mediaStream];
};
if (mediaType == AVMediaTypeAudio) {
[self getUserAudio:constraints
successCallback:scb
errorCallback:errorCallback
mediaStream:mediaStream];
} else if (mediaType == AVMediaTypeVideo) {
[self getUserVideo:constraints
successCallback:scb
errorCallback:errorCallback
mediaStream:mediaStream];
}
} else {
// According to step 10 Permission Failure of the getUserMedia()
// algorithm, if the user has denied permission, fail "with a new
// DOMException object whose name attribute has the value
// NotAllowedError."
errorCallback(@"DOMException", @"NotAllowedError");
}
});
}];
}
-(void)mediaStreamTrackGetSources{
NSMutableArray *sources = [NSMutableArray array];
NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in videoDevices) {
[sources addObject:@{
@"facing": device.positionString,
@"deviceId": device.uniqueID,
@"label": device.localizedName,
@"kind": @"videoinput",
}];
}
NSArray *audioDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio];
for (AVCaptureDevice *device in audioDevices) {
[sources addObject:@{
@"facing": @"",
@"deviceId": device.uniqueID,
@"label": device.localizedName,
@"kind": @"audioinput",
}];
}
//TODO: resolve(sources);
}
-(void)mediaStreamTrackRelease:(RTCMediaStream *)mediaStream track:(RTCMediaStreamTrack *)track
{
// what's different to mediaStreamTrackStop? only call mediaStream explicitly?
if (mediaStream && track) {
track.isEnabled = NO;
// FIXME this is called when track is removed from the MediaStream,
// but it doesn't mean it can not be added back using MediaStream.addTrack
//TODO: [self.localTracks removeObjectForKey:trackID];
if ([track.kind isEqualToString:@"audio"]) {
[mediaStream removeAudioTrack:(RTCAudioTrack *)track];
} else if([track.kind isEqualToString:@"video"]) {
[mediaStream removeVideoTrack:(RTCVideoTrack *)track];
}
}
}
-(void)mediaStreamTrackSetEnabled:(RTCMediaStreamTrack *)track : (BOOL)enabled
{
if (track && track.isEnabled != enabled) {
track.isEnabled = enabled;
}
}
-(void)mediaStreamTrackSwitchCamera:(RTCMediaStreamTrack *)track
{
if (track) {
RTCVideoTrack *videoTrack = (RTCVideoTrack *)track;
RTCVideoSource *source = videoTrack.source;
if ([source isKindOfClass:[RTCAVFoundationVideoSource class]]) {
RTCAVFoundationVideoSource *avSource = (RTCAVFoundationVideoSource *)source;
avSource.useBackCamera = !avSource.useBackCamera;
}
}
}
-(void)mediaStreamTrackStop:(RTCMediaStreamTrack *)track
{
if (track) {
track.isEnabled = NO;
[self.localTracks removeObjectForKey:track.trackId];
}
}
@end

View File

@ -0,0 +1,40 @@
#import "FlutterWebRTCPlugin.h"
@interface RTCPeerConnection (Flutter)
@property (nonatomic, strong) NSMutableDictionary<NSNumber *, RTCDataChannel *> *dataChannels;
@property (nonatomic, strong) NSNumber *reactTag;
@property (nonatomic, strong) NSMutableDictionary<NSString *, RTCMediaStream *> *remoteStreams;
@property (nonatomic, strong) NSMutableDictionary<NSString *, RTCMediaStreamTrack *> *remoteTracks;
@end
@interface FlutterWebRTCPlugin (RTCPeerConnection)
-(void) peerConnectionCreateOffer:(NSDictionary *)constraints
peerConnection:(RTCPeerConnection*)peerConnection
result:(FlutterResult)result;
-(void) peerConnectionCreateAnswer:(NSDictionary *)constraints
peerConnection:(RTCPeerConnection *)peerConnection
result:(FlutterResult)result;
-(void) peerConnectionSetLocalDescription:(RTCSessionDescription *)sdp
peerConnection:(RTCPeerConnection *)peerConnection
result:(FlutterResult)result;
-(void) peerConnectionSetRemoteDescription:(RTCSessionDescription *)sdp
peerConnection:(RTCPeerConnection *)peerConnection
result:(FlutterResult)result;
-(void) peerConnectionAddICECandidate:(RTCIceCandidate*)candidate
peerConnection:(RTCPeerConnection *)peerConnection
result:(FlutterResult)result;
-(void) peerConnectionGetStats:(nonnull NSString *)trackID
peerConnection:(RTCPeerConnection *)peerConnection
result:(FlutterResult)result;
- (RTCMediaConstraints *)parseMediaConstraints:(NSDictionary *)constraints;
@end

View File

@ -0,0 +1,484 @@
#import <objc/runtime.h>
#import "FlutterWebRTCPlugin.h"
#import "FlutterRTCPeerConnection.h"
#import "FlutterRTCDataChannel.h"
#import <WebRTC/RTCConfiguration.h>
#import <WebRTC/RTCIceCandidate.h>
#import <WebRTC/RTCIceServer.h>
#import <WebRTC/RTCMediaConstraints.h>
#import <WebRTC/RTCIceCandidate.h>
#import <WebRTC/RTCLegacyStatsReport.h>
#import <WebRTC/RTCSessionDescription.h>
#import <WebRTC/RTCConfiguration.h>
#import <WebRTC/RTCAudioTrack.h>
#import <WebRTC/RTCVideoTrack.h>
#import <WebRTC/RTCMediaStream.h>
@implementation RTCPeerConnection (Flutter)
- (NSMutableDictionary<NSNumber *, RTCDataChannel *> *)dataChannels
{
return objc_getAssociatedObject(self, _cmd);
}
- (void)setDataChannels:(NSMutableDictionary<NSNumber *, RTCDataChannel *> *)dataChannels
{
objc_setAssociatedObject(self, @selector(dataChannels), dataChannels, OBJC_ASSOCIATION_RETAIN_NONATOMIC);
}
- (NSNumber *)reactTag
{
return objc_getAssociatedObject(self, _cmd);
}
- (void)setReactTag:(NSNumber *)reactTag
{
objc_setAssociatedObject(self, @selector(reactTag), reactTag, OBJC_ASSOCIATION_RETAIN_NONATOMIC);
}
- (NSMutableDictionary<NSString *, RTCMediaStream *> *)remoteStreams
{
return objc_getAssociatedObject(self, _cmd);
}
- (void)setRemoteStreams:(NSMutableDictionary<NSString *,RTCMediaStream *> *)remoteStreams
{
objc_setAssociatedObject(self, @selector(remoteStreams), remoteStreams, OBJC_ASSOCIATION_RETAIN_NONATOMIC);
}
- (NSMutableDictionary<NSString *, RTCMediaStreamTrack *> *)remoteTracks
{
return objc_getAssociatedObject(self, _cmd);
}
- (void)setRemoteTracks:(NSMutableDictionary<NSString *,RTCMediaStreamTrack *> *)remoteTracks
{
objc_setAssociatedObject(self, @selector(remoteTracks), remoteTracks, OBJC_ASSOCIATION_RETAIN_NONATOMIC);
}
@end
@implementation FlutterWebRTCPlugin (RTCPeerConnection)
-(void) peerConnectionSetConfiguration:(RTCConfiguration*)configuration
peerConnection:(RTCPeerConnection*)peerConnection
{
[peerConnection setConfiguration:configuration];
}
-(void) peerConnectionCreateOffer:(NSDictionary *)constraints
peerConnection:(RTCPeerConnection*)peerConnection
result:(FlutterResult)result
{
[peerConnection
offerForConstraints:[self parseMediaConstraints:constraints]
completionHandler:^(RTCSessionDescription *sdp, NSError *error) {
if (error) {
result([FlutterError errorWithCode:@"CreateOfferFailed"
message:[NSString stringWithFormat:@"Error %@", error.userInfo[@"error"]]
details:nil]);
} else {
NSString *type = [RTCSessionDescription stringForType:sdp.type];
result(@{@"sdp": sdp.sdp, @"type": type});
}
}];
}
-(void) peerConnectionCreateAnswer:(NSDictionary *)constraints
peerConnection:(RTCPeerConnection *)peerConnection
result:(FlutterResult)result
{
[peerConnection
answerForConstraints:[self parseMediaConstraints:constraints]
completionHandler:^(RTCSessionDescription *sdp, NSError *error) {
if (error) {
result([FlutterError errorWithCode:@"CreateAnswerFailed"
message:[NSString stringWithFormat:@"Error %@", error.userInfo[@"error"]]
details:nil]);
} else {
NSString *type = [RTCSessionDescription stringForType:sdp.type];
result(@{@"sdp": sdp.sdp, @"type": type});
}
}];
}
-(void) peerConnectionSetLocalDescription:(RTCSessionDescription *)sdp
peerConnection:(RTCPeerConnection *)peerConnection
result:(FlutterResult)result
{
[peerConnection setLocalDescription:sdp completionHandler: ^(NSError *error) {
if (error) {
result([FlutterError errorWithCode:@"SetLocalDescriptionFailed"
message:[NSString stringWithFormat:@"Error %@", error.localizedDescription]
details:nil]);
} else {
result(nil);
}
}];
}
-(void) peerConnectionSetRemoteDescription:(RTCSessionDescription *)sdp
peerConnection:(RTCPeerConnection *)peerConnection
result:(FlutterResult)result
{
[peerConnection setRemoteDescription: sdp completionHandler: ^(NSError *error) {
if (error) {
result([FlutterError errorWithCode:@"SetRemoteDescriptionFailed"
message:[NSString stringWithFormat:@"Error %@", error.localizedDescription]
details:nil]);
} else {
result(nil);
}
}];
}
-(void) peerConnectionAddICECandidate:(RTCIceCandidate*)candidate
peerConnection:(RTCPeerConnection *)peerConnection
result:(FlutterResult)result
{
[peerConnection addIceCandidate:candidate];
NSLog(@"addICECandidateresult: %@", candidate);
}
-(void) peerConnectionClose:(RTCPeerConnection *)peerConnection
{
[peerConnection close];
// Clean up peerConnection's streams and tracks
[self.remoteStreams removeAllObjects];
[self.remoteTracks removeAllObjects];
// Clean up peerConnection's dataChannels.
NSMutableDictionary<NSNumber *, RTCDataChannel *> *dataChannels
= self.dataChannels;
for (NSNumber *dataChannelId in dataChannels) {
dataChannels[dataChannelId].delegate = nil;
// There is no need to close the RTCDataChannel because it is owned by the
// RTCPeerConnection and the latter will close the former.
}
[dataChannels removeAllObjects];
}
-(void) peerConnectionGetStats:(nonnull NSString *)trackID
peerConnection:(RTCPeerConnection *)peerConnection
result:(FlutterResult)result
{
RTCMediaStreamTrack *track = nil;
if (!trackID
|| !trackID.length
|| (track = self.localTracks[trackID])
|| (track = self.remoteTracks[trackID])) {
[peerConnection statsForTrack:track
statsOutputLevel:RTCStatsOutputLevelStandard
completionHandler:^(NSArray<RTCLegacyStatsReport *> *stats) {
result(@[[self statsToJSON:stats]]);
}];
}
}
/**
* Constructs a JSON <tt>NSString</tt> representation of a specific array of
* <tt>RTCLegacyStatsReport</tt>s.
* <p>
* On iOS it is faster to (1) construct a single JSON <tt>NSString</tt>
* representation of an array of <tt>RTCLegacyStatsReport</tt>s and (2) have it
* pass through the React Native bridge rather than the array of
* <tt>RTCLegacyStatsReport</tt>s.
*
* @param reports the array of <tt>RTCLegacyStatsReport</tt>s to represent in
* JSON format
* @return an <tt>NSString</tt> which represents the specified <tt>stats</tt> in
* JSON format
*/
- (NSString *)statsToJSON:(NSArray<RTCLegacyStatsReport *> *)reports
{
// XXX The initial capacity matters, of course, because it determines how many
// times the NSMutableString will have grow. But walking through the reports
// to compute an initial capacity which exactly matches the requirements of
// the reports is too much work without real-world bang here. A better
// approach is what the Android counterpart does i.e. cache the
// NSMutableString and preferably with a Java-like soft reference. If that is
// too much work, then an improvement should be caching the required capacity
// from the previous invocation of the method and using it as the initial
// capacity in the next invocation. As I didn't want to go even through that,
// choosing just about any initial capacity is OK because NSMutableCopy
// doesn't have too bad a strategy of growing.
NSMutableString *s = [NSMutableString stringWithCapacity:8 * 1024];
[s appendString:@"["];
BOOL firstReport = YES;
for (RTCLegacyStatsReport *report in reports) {
if (firstReport) {
firstReport = NO;
} else {
[s appendString:@","];
}
[s appendString:@"{\"id\":\""]; [s appendString:report.reportId];
[s appendString:@"\",\"type\":\""]; [s appendString:report.type];
[s appendString:@"\",\"timestamp\":"];
[s appendFormat:@"%f", report.timestamp];
[s appendString:@",\"values\":["];
__block BOOL firstValue = YES;
[report.values enumerateKeysAndObjectsUsingBlock:^(
NSString *key,
NSString *value,
BOOL *stop) {
if (firstValue) {
firstValue = NO;
} else {
[s appendString:@","];
}
[s appendString:@"{\""]; [s appendString:key];
[s appendString:@"\":\""]; [s appendString:value];
[s appendString:@"\"}"];
}];
[s appendString:@"]}"];
}
[s appendString:@"]"];
return s;
}
- (NSString *)stringForICEConnectionState:(RTCIceConnectionState)state {
switch (state) {
case RTCIceConnectionStateNew: return @"new";
case RTCIceConnectionStateChecking: return @"checking";
case RTCIceConnectionStateConnected: return @"connected";
case RTCIceConnectionStateCompleted: return @"completed";
case RTCIceConnectionStateFailed: return @"failed";
case RTCIceConnectionStateDisconnected: return @"disconnected";
case RTCIceConnectionStateClosed: return @"closed";
case RTCIceConnectionStateCount: return @"count";
}
return nil;
}
- (NSString *)stringForICEGatheringState:(RTCIceGatheringState)state {
switch (state) {
case RTCIceGatheringStateNew: return @"new";
case RTCIceGatheringStateGathering: return @"gathering";
case RTCIceGatheringStateComplete: return @"complete";
}
return nil;
}
- (NSString *)stringForSignalingState:(RTCSignalingState)state {
switch (state) {
case RTCSignalingStateStable: return @"stable";
case RTCSignalingStateHaveLocalOffer: return @"have-local-offer";
case RTCSignalingStateHaveLocalPrAnswer: return @"have-local-pranswer";
case RTCSignalingStateHaveRemoteOffer: return @"have-remote-offer";
case RTCSignalingStateHaveRemotePrAnswer: return @"have-remote-pranswer";
case RTCSignalingStateClosed: return @"closed";
}
return nil;
}
/**
* Parses the constraint keys and values of a specific JavaScript object into
* a specific <tt>NSMutableDictionary</tt> in a format suitable for the
* initialization of a <tt>RTCMediaConstraints</tt> instance.
*
* @param src The JavaScript object which defines constraint keys and values and
* which is to be parsed into the specified <tt>dst</tt>.
* @param dst The <tt>NSMutableDictionary</tt> into which the constraint keys
* and values defined by <tt>src</tt> are to be written in a format suitable for
* the initialization of a <tt>RTCMediaConstraints</tt> instance.
*/
- (void)parseJavaScriptConstraints:(NSDictionary *)src
intoWebRTCConstraints:(NSMutableDictionary<NSString *, NSString *> *)dst {
for (id srcKey in src) {
id srcValue = src[srcKey];
NSString *dstValue;
if ([srcValue isKindOfClass:[NSNumber class]]) {
dstValue = [srcValue boolValue] ? @"true" : @"false";
} else {
dstValue = [srcValue description];
}
dst[[srcKey description]] = dstValue;
}
}
/**
* Parses a JavaScript object into a new <tt>RTCMediaConstraints</tt> instance.
*
* @param constraints The JavaScript object to parse into a new
* <tt>RTCMediaConstraints</tt> instance.
* @returns A new <tt>RTCMediaConstraints</tt> instance initialized with the
* mandatory and optional constraint keys and values specified by
* <tt>constraints</tt>.
*/
- (RTCMediaConstraints *)parseMediaConstraints:(NSDictionary *)constraints {
id mandatory = constraints[@"mandatory"];
NSMutableDictionary<NSString *, NSString *> *mandatory_
= [NSMutableDictionary new];
if ([mandatory isKindOfClass:[NSDictionary class]]) {
[self parseJavaScriptConstraints:(NSDictionary *)mandatory
intoWebRTCConstraints:mandatory_];
}
id optional = constraints[@"optional"];
NSMutableDictionary<NSString *, NSString *> *optional_
= [NSMutableDictionary new];
if ([optional isKindOfClass:[NSArray class]]) {
for (id o in (NSArray *)optional) {
if ([o isKindOfClass:[NSDictionary class]]) {
[self parseJavaScriptConstraints:(NSDictionary *)o
intoWebRTCConstraints:optional_];
}
}
}
return [[RTCMediaConstraints alloc] initWithMandatoryConstraints:mandatory_
optionalConstraints:optional_];
}
#pragma mark - RTCPeerConnectionDelegate methods
- (void)peerConnection:(RTCPeerConnection *)peerConnection didChangeSignalingState:(RTCSignalingState)newState {
if(_eventSink){
_eventSink(@{
@"event" : @"signalingState",
@"state" : [self stringForSignalingState:newState]});
}
}
-(void)peerConnection:(RTCPeerConnection *)peerConnection
mediaStream:(RTCMediaStream *)stream didAddTrack:(RTCVideoTrack*)track{
self.remoteTracks[track.trackId] = track;
NSString *streamId = stream.streamId;
self.remoteStreams[streamId] = stream;
_eventSink(@{
@"event" : @"addTrack",
@"streamId": streamId,
@"trackId": track.trackId,
@"track": @{
@"id": track.trackId,
@"kind": track.kind,
@"label": track.trackId,
@"enabled": @(track.isEnabled),
@"remote": @(YES),
@"readyState": @"live"}
});
}
-(void)peerConnection:(RTCPeerConnection *)peerConnection
mediaStream:(RTCMediaStream *)stream didRemoveTrack:(RTCVideoTrack*)track{
[peerConnection.remoteTracks removeObjectForKey:track.trackId];
NSString *streamId = stream.streamId;
_eventSink(@{
@"event" : @"removeTrack",
@"streamId": streamId,
@"trackId": track.trackId,
});
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection didAddStream:(RTCMediaStream *)stream {
NSMutableArray *tracks = [NSMutableArray array];
for (RTCVideoTrack *track in stream.videoTracks) {
peerConnection.remoteTracks[track.trackId] = track;
[tracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}];
}
for (RTCAudioTrack *track in stream.audioTracks) {
peerConnection.remoteTracks[track.trackId] = track;
[tracks addObject:@{@"id": track.trackId, @"kind": track.kind, @"label": track.trackId, @"enabled": @(track.isEnabled), @"remote": @(YES), @"readyState": @"live"}];
}
NSString *streamId = stream.streamId;
peerConnection.remoteStreams[streamId] = stream;
_eventSink(@{
@"event" : @"addStream",
@"streamId": streamId,
@"tracks": tracks,
});
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection didRemoveStream:(RTCMediaStream *)stream {
NSArray *keysArray = [peerConnection.remoteStreams allKeysForObject:stream];
// We assume there can be only one object for 1 key
if (keysArray.count > 1) {
NSLog(@"didRemoveStream - more than one stream entry found for stream instance with id: %@", stream.streamId);
}
NSString *streamId = stream.streamId;
for (RTCVideoTrack *track in stream.videoTracks) {
[peerConnection.remoteTracks removeObjectForKey:track.trackId];
}
for (RTCAudioTrack *track in stream.audioTracks) {
[peerConnection.remoteTracks removeObjectForKey:track.trackId];
}
[peerConnection.remoteStreams removeObjectForKey:streamId];
_eventSink(@{
@"event" : @"removeStream",
@"streamId": streamId,
});
}
- (void)peerConnectionShouldNegotiate:(RTCPeerConnection *)peerConnection {
if(_eventSink){
_eventSink(@{@"event" : @"onRenegotiationNeeded",});
}
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection didChangeIceConnectionState:(RTCIceConnectionState)newState {
if(_eventSink){
_eventSink(@{
@"event" : @"iceConnectionState",
@"state" : [self stringForICEConnectionState:newState]
});
}
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection didChangeIceGatheringState:(RTCIceGatheringState)newState {
if(_eventSink){
_eventSink(@{
@"event" : @"iceGatheringState",
@"state" : [self stringForICEGatheringState:newState]
});
}
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection didGenerateIceCandidate:(RTCIceCandidate *)candidate {
if(_eventSink){
_eventSink(@{
@"event" : @"onCandidate",
@"candidate" : @{@"candidate": candidate.sdp, @"sdpMLineIndex": @(candidate.sdpMLineIndex), @"sdpMid": candidate.sdpMid}
});
}
}
- (void)peerConnection:(RTCPeerConnection*)peerConnection didOpenDataChannel:(RTCDataChannel*)dataChannel {
// XXX RTP data channels are not defined by the WebRTC standard, have been
// deprecated in Chromium, and Google have decided (in 2015) to no longer
// support them (in the face of multiple reported issues of breakages).
if (-1 == dataChannel.channelId) {
return;
}
NSNumber *dataChannelId = [NSNumber numberWithInteger:dataChannel.channelId];
dataChannel.peerConnectionId = peerConnection.reactTag;
peerConnection.dataChannels[dataChannelId] = dataChannel;
// WebRTCModule implements the category RTCDataChannel i.e. the protocol
// RTCDataChannelDelegate.
dataChannel.delegate = self;
NSDictionary *body = @{@"id": peerConnection.reactTag,
@"dataChannel": @{@"id": dataChannelId,
@"label": dataChannel.label}};
if(_eventSink){
_eventSink(@{
@"event" : @"didOpenDataChannel",
@"body" : body
});
}
}
@end

View File

@ -0,0 +1,5 @@
#import <Foundation/Foundation.h>
@interface FlutterRTCVideoViewManager
@end

View File

@ -0,0 +1,386 @@
#import <AVFoundation/AVFoundation.h>
#import <objc/runtime.h>
#import <WebRTC/RTCEAGLVideoView.h>
#import <WebRTC/RTCMediaStream.h>
#import <WebRTC/RTCVideoFrame.h>
#import <WebRTC/RTCVideoTrack.h>
#import "FlutterRTCVideoViewManager.h"
#import "FlutterWebRTCPlugin.h"
/**
* In the fashion of
* https://www.w3.org/TR/html5/embedded-content-0.html#dom-video-videowidth
* and https://www.w3.org/TR/html5/rendering.html#video-object-fit, resembles
* the CSS style {@code object-fit}.
*/
typedef NS_ENUM(NSInteger, RTCVideoViewObjectFit) {
/**
* The contain value defined by https://www.w3.org/TR/css3-images/#object-fit:
*
* The replaced content is sized to maintain its aspect ratio while fitting
* within the element's content box.
*/
RTCVideoViewObjectFitContain,
/**
* The cover value defined by https://www.w3.org/TR/css3-images/#object-fit:
*
* The replaced content is sized to maintain its aspect ratio while filling
* the element's entire content box.
*/
RTCVideoViewObjectFitCover
};
/**
* Implements an equivalent of {@code HTMLVideoElement} i.e. Web's video
* element.
*/
@interface RTCVideoView : UIView <RTCVideoRenderer, RTCEAGLVideoViewDelegate>
/**
* The indicator which determines whether this {@code RTCVideoView} is to mirror
* the video specified by {@link #videoTrack} during its rendering. Typically,
* applications choose to mirror the front/user-facing camera.
*/
@property (nonatomic) BOOL mirror;
/**
* In the fashion of
* https://www.w3.org/TR/html5/embedded-content-0.html#dom-video-videowidth
* and https://www.w3.org/TR/html5/rendering.html#video-object-fit, resembles
* the CSS style {@code object-fit}.
*/
@property (nonatomic) RTCVideoViewObjectFit objectFit;
/**
* The {@link RTCEAGLVideoView} which implements the actual
* {@link RTCVideoRenderer} of this instance and which this instance fits within
* itself so that the rendered video preserves the aspect ratio of
* {@link #_videoSize}.
*/
@property (nonatomic, readonly) RTCEAGLVideoView *subview;
/**
* The {@link RTCVideoTrack}, if any, which this instance renders.
*/
@property (nonatomic, strong) RTCVideoTrack *videoTrack;
@end
@implementation RTCVideoView {
/**
* The width and height of the video (frames) rendered by {@link #subview}.
*/
CGSize _videoSize;
}
/**
* Tells this view that its window object changed.
*/
- (void)didMoveToWindow {
[super didMoveToWindow];
// XXX This RTCVideoView strongly retains its videoTrack. The latter strongly
// retains the former as well though because RTCVideoTrack strongly retains
// the RTCVideoRenderers added to it. In other words, there is a cycle of
// strong retainments and, consequently, there is a memory leak. In order to
// break the cycle, have this RTCVideoView as the RTCVideoRenderer of its
// videoTrack only while this view resides in a window.
RTCVideoTrack *videoTrack = self.videoTrack;
if (videoTrack) {
if (self.window) {
// TODO RTCVideoTrack's addRenderer implementation has an NSAssert1 that
// makes sure that the specified RTCVideoRenderer is not added multiple
// times (without intervening removals, of course). It may (or may not) be
// wise to explicitly make sure here that we will not hit that NSAssert1.
[videoTrack addRenderer:self];
} else {
[videoTrack removeRenderer:self];
}
}
}
/**
* Invalidates the current layout of the receiver and triggers a layout update
* during the next update cycle. Make sure that the method call is performed on
* the application's main thread (as documented to be necessary by Apple).
*/
- (void)dispatchAsyncSetNeedsLayout {
__weak UIView *weakSelf = self;
dispatch_async(dispatch_get_main_queue(), ^{
UIView *strongSelf = weakSelf;
[strongSelf setNeedsLayout];
});
}
/**
* Initializes and returns a newly allocated view object with the specified
* frame rectangle.
*
* @param frame The frame rectangle for the view, measured in points.
*/
- (instancetype)initWithFrame:(CGRect)frame {
if (self = [super initWithFrame:frame]) {
RTCEAGLVideoView *subview = [[RTCEAGLVideoView alloc] init];
subview.delegate = self;
_videoSize.height = 0;
_videoSize.width = 0;
self.opaque = NO;
[self addSubview:subview];
}
return self;
}
/**
* Lays out the subview of this instance while preserving the aspect ratio of
* the video it renders.
*/
- (void)layoutSubviews {
[super layoutSubviews];
UIView *subview = self.subview;
if (!subview) {
return;
}
CGFloat width = _videoSize.width, height = _videoSize.height;
CGRect newValue;
if (width <= 0 || height <= 0) {
newValue.origin.x = 0;
newValue.origin.y = 0;
newValue.size.width = 0;
newValue.size.height = 0;
} else if (RTCVideoViewObjectFitCover == self.objectFit) { // cover
newValue = self.bounds;
// Is there a real need to scale subview?
if (newValue.size.width != width || newValue.size.height != height) {
CGFloat scaleFactor
= MAX(newValue.size.width / width, newValue.size.height / height);
// Scale both width and height in order to make it obvious that the aspect
// ratio is preserved.
width *= scaleFactor;
height *= scaleFactor;
newValue.origin.x += (newValue.size.width - width) / 2.0;
newValue.origin.y += (newValue.size.height - height) / 2.0;
newValue.size.width = width;
newValue.size.height = height;
}
} else { // contain
// The implementation is in accord with
// https://www.w3.org/TR/html5/embedded-content-0.html#the-video-element:
//
// In the absence of style rules to the contrary, video content should be
// rendered inside the element's playback area such that the video content
// is shown centered in the playback area at the largest possible size that
// fits completely within it, with the video content's aspect ratio being
// preserved. Thus, if the aspect ratio of the playback area does not match
// the aspect ratio of the video, the video will be shown letterboxed or
// pillarboxed. Areas of the element's playback area that do not contain the
// video represent nothing.
newValue
= AVMakeRectWithAspectRatioInsideRect(
CGSizeMake(width, height),
self.bounds);
}
CGRect oldValue = subview.frame;
if (newValue.origin.x != oldValue.origin.x
|| newValue.origin.y != oldValue.origin.y
|| newValue.size.width != oldValue.size.width
|| newValue.size.height != oldValue.size.height) {
subview.frame = newValue;
}
subview.transform
= self.mirror
? CGAffineTransformMakeScale(-1.0, 1.0)
: CGAffineTransformIdentity;
}
/**
* Implements the setter of the {@link #mirror} property of this
* {@code RTCVideoView}.
*
* @param mirror The value to set on the {@code mirror} property of this
* {@code RTCVideoView}.
*/
- (void)setMirror:(BOOL)mirror {
if (_mirror != mirror) {
_mirror = mirror;
[self dispatchAsyncSetNeedsLayout];
}
}
/**
* Implements the setter of the {@link #objectFit} property of this
* {@code RTCVideoView}.
*
* @param objectFit The value to set on the {@code objectFit} property of this
* {@code RTCVideoView}.
*/
- (void)setObjectFit:(RTCVideoViewObjectFit)objectFit {
if (_objectFit != objectFit) {
_objectFit = objectFit;
[self dispatchAsyncSetNeedsLayout];
}
}
/**
* Implements the setter of the {@link #videoTrack} property of this
* {@code RTCVideoView}.
*
* @param videoTrack The value to set on the {@code videoTrack} property of this
* {@code RTCVideoView}.
*/
- (void)setVideoTrack:(RTCVideoTrack *)videoTrack {
RTCVideoTrack *oldValue = self.videoTrack;
if (oldValue != videoTrack) {
if (oldValue) {
[oldValue removeRenderer:self];
}
_videoTrack = videoTrack;
// XXX This RTCVideoView strongly retains its videoTrack. The latter
// strongly retains the former as well though because RTCVideoTrack strongly
// retains the RTCVideoRenderers added to it. In other words, there is a
// cycle of strong retainments and, consequently, there is a memory leak. In
// order to break the cycle, have this RTCVideoView as the RTCVideoRenderer
// of its videoTrack only while this view resides in a window.
if (videoTrack && self.window) {
[videoTrack addRenderer:self];
}
}
}
/**
* Implements the getter of the {@code subview} property of this
* {@code RTCVideoView}. Gets the {@link RTCEAGLVideoView} subview of this
* {@code RTCVideoView} which implements the actual {@link RTCVideoRenderer} of
* this instance and which actually renders {@link #videoTrack}.
*
* @returns The {@code RTCEAGLVideoView} subview of this {@code RTCVideoView}
* which implements the actual {@code RTCVideoRenderer} of this instance and
* which actually renders {@code videoTrack}.
*/
- (RTCEAGLVideoView *)subview {
// In order to reduce the number of strong retainments of the RTCEAGLVideoView
// instance and, thus, the risk of memory leaks, retrieve the subview from the
// super's list of subviews of this view.
for (UIView *subview in self.subviews) {
if ([subview isKindOfClass:[RTCEAGLVideoView class]]) {
return (RTCEAGLVideoView *)subview;
}
}
return nil;
}
#pragma mark - RTCVideoRenderer methods
/**
* Renders a specific video frame. Delegates to the subview of this instance
* which implements the actual {@link RTCVideoRenderer}.
*
* @param frame The video frame to render.
*/
- (void)renderFrame:(RTCVideoFrame *)frame {
id<RTCVideoRenderer> videoRenderer = self.subview;
if (videoRenderer) {
[videoRenderer renderFrame:frame];
}
}
/**
* Sets the size of the video frame to render.
*
* @param size The size of the video frame to render.
*/
- (void)setSize:(CGSize)size {
id<RTCVideoRenderer> videoRenderer = self.subview;
if (videoRenderer) {
[videoRenderer setSize:size];
}
}
#pragma mark - RTCEAGLVideoViewDelegate methods
/**
* Notifies this {@link RTCEAGLVideoViewDelegate} that a specific
* {@link RTCEAGLVideoView} had the size of the video (frames) it renders
* changed.
*
* @param videoView The {@code RTCEAGLVideoView} which had the size of the video
* (frames) it renders changed to the specified size.
* @param size The new size of the video (frames) to be rendered by the
* specified {@code videoView}.
*/
- (void)videoView:(RTCEAGLVideoView *)videoView didChangeVideoSize:(CGSize)size {
if (videoView == self.subview) {
_videoSize = size;
[self dispatchAsyncSetNeedsLayout];
}
}
@end
@implementation FlutterRTCVideoViewManager
- (UIView *)view {
RTCVideoView *v = [[RTCVideoView alloc] init];
v.clipsToBounds = YES;
return v;
}
- (dispatch_queue_t)methodQueue {
return dispatch_get_main_queue();
}
/**
* In the fashion of
* https://www.w3.org/TR/html5/embedded-content-0.html#dom-video-videowidth
* and https://www.w3.org/TR/html5/rendering.html#video-object-fit, resembles
* the CSS style {@code object-fit}.
*/
- (void) objectFit{
/*
NSString *s = [RCTConvert NSString:json];
RTCVideoViewObjectFit e
= (s && [s isEqualToString:@"cover"])
? RTCVideoViewObjectFitCover
: RTCVideoViewObjectFitContain;
view.objectFit = e;
*/
}
-(void)setStreamURL:(NSString*)url {
/*
RTCVideoTrack *videoTrack;
if (json) {
NSString *streamReactTag = (NSString *)json;
WebRTCModule *module = [self.bridge moduleForName:@"WebRTCModule"];
RTCMediaStream *stream = [module streamForReactTag:streamReactTag];
NSArray *videoTracks = stream ? stream.videoTracks : nil;
videoTrack = videoTracks && videoTracks.count ? videoTracks[0] : nil;
if (!videoTrack) {
NSLog(@"No video stream for react tag: %@", streamReactTag);
}
} else {
videoTrack = nil;
}
view.videoTrack = videoTrack;
*/
}
@end

View File

@ -0,0 +1,22 @@
#import <Flutter/Flutter.h>
#import <Foundation/Foundation.h>
#import <WebRTC/RTCDataChannel.h>
#import <WebRTC/RTCPeerConnection.h>
#import <WebRTC/RTCDataChannel.h>
#import <WebRTC/RTCDataChannelConfiguration.h>
#import <WebRTC/RTCMediaStreamTrack.h>
@interface FlutterWebRTCPlugin : NSObject<FlutterPlugin, FlutterStreamHandler, RTCPeerConnectionDelegate>
{
FlutterEventSink _eventSink;
}
@property (nonatomic, strong) RTCPeerConnectionFactory *peerConnectionFactory;
@property (nonatomic, strong) NSMutableDictionary<NSNumber *, RTCPeerConnection *> *peerConnections;
@property (nonatomic, strong) NSMutableDictionary<NSString *, RTCMediaStream *> *localStreams;
@property (nonatomic, strong) NSMutableDictionary<NSString *, RTCMediaStreamTrack *> *localTracks;
@property (nonatomic, strong) NSMutableDictionary<NSNumber *, RTCDataChannel *> *dataChannels;
@property (nonatomic, strong) NSMutableDictionary<NSString *, RTCMediaStream *> *remoteStreams;
@property (nonatomic, strong) NSMutableDictionary<NSString *, RTCMediaStreamTrack *> *remoteTracks;
@end

View File

@ -0,0 +1,216 @@
#import "FlutterWebRTCPlugin.h"
#import <AVFoundation/AVFoundation.h>
#import <WebRTC/RTCMediaStream.h>
#import <WebRTC/RTCPeerConnectionFactory.h>
#import <WebRTC/RTCPeerConnection.h>
#import <WebRTC/RTCAudioTrack.h>
#import <WebRTC/RTCVideoTrack.h>
#import <WebRTC/RTCConfiguration.h>
#import "FlutterRTCPeerConnection.h"
#import "FlutterRTCMediaStream.h"
#import "ARDVideoDecoderFactory.h"
#import "ARDVideoEncoderFactory.h"
@implementation FlutterWebRTCPlugin {
FlutterMethodChannel *_methodChannel;
FlutterEventChannel* _eventChannel;
id _registry;
id _messenger;
}
+ (void)registerWithRegistrar:(NSObject<FlutterPluginRegistrar>*)registrar {
FlutterMethodChannel* channel = [FlutterMethodChannel
methodChannelWithName:@"cloudwebrtc.com/WebRTC.Method"
binaryMessenger:[registrar messenger]];
FlutterWebRTCPlugin* instance = [[FlutterWebRTCPlugin alloc] initWithChannel:channel
registrar:registrar
messenger:[registrar messenger]];
[registrar addMethodCallDelegate:instance channel:channel];
}
#pragma mark - FlutterStreamHandler methods
- (FlutterError* _Nullable)onCancelWithArguments:(id _Nullable)arguments {
_eventSink = nil;
return nil;
}
- (FlutterError* _Nullable)onListenWithArguments:(id _Nullable)arguments
eventSink:(nonnull FlutterEventSink)sink {
_eventSink = sink;
return nil;
}
- (instancetype)initWithChannel:(FlutterMethodChannel *)channel
registrar:(NSObject<FlutterPluginRegistrar>*)registrar
messenger:(NSObject<FlutterBinaryMessenger>*)messenger {
self = [super init];
if (self) {
_methodChannel = channel;
_registry = registrar;
_messenger = messenger;
}
/*Create Event Channel.*/
FlutterEventChannel* eventChannel = [FlutterEventChannel
eventChannelWithName:[NSString stringWithFormat:@"cloudwebrtc.com/WebRTC.Event"]
binaryMessenger:messenger];
[eventChannel setStreamHandler:self];
_eventChannel = eventChannel;
ARDVideoDecoderFactory *decoderFactory = [[ARDVideoDecoderFactory alloc] init];
ARDVideoEncoderFactory *encoderFactory = [[ARDVideoEncoderFactory alloc] init];
_peerConnectionFactory = [[RTCPeerConnectionFactory alloc]
initWithEncoderFactory:encoderFactory
decoderFactory:decoderFactory];
//[RTCPeerConnectionFactory initializeSSL];
self.peerConnections = [NSMutableDictionary new];
self.localStreams = [NSMutableDictionary new];
self.localTracks = [NSMutableDictionary new];
self.dataChannels = [NSMutableDictionary new];
self.remoteStreams = [NSMutableDictionary new];
self.remoteTracks = [NSMutableDictionary new];
return self;
}
- (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult) result {
if ([@"createPeerConnection" isEqualToString:call.method]) {
//TODO: 使call.arguments RTCConfigurationconstraints
NSDictionary* argsMap = call.arguments;
NSString* configurationArgs = argsMap[@"configuration"];
NSString* constraintsArgs = argsMap[@"options"];
RTCConfiguration* configuration = [[RTCConfiguration alloc] init];
NSDictionary* constraints = nil;
RTCPeerConnection *peerConnection
= [self.peerConnectionFactory
peerConnectionWithConfiguration:configuration
constraints:[self parseMediaConstraints:constraints]
delegate:self];
int64_t textureId = [_registry registerTexture:peerConnection];
self.peerConnections[@(textureId)] = peerConnection;
result(@{ @"textureId" : @(textureId)});
} else if ([@"getUserMedia" isEqualToString:call.method]) {
NSDictionary* argsMap = call.arguments;
NSString* constraintsArgs = argsMap[@"constraints"];
NSDictionary* constraints = nil;
//return MediaStreamId or Error
[self getUserMedia:constraints result:result];
} else if ([@"createOffer" isEqualToString:call.method]) {
NSDictionary* argsMap = call.arguments;
int64_t textureId = ((NSNumber*)argsMap[@"textureId"]).integerValue;
NSDictionary * constraints = (NSDictionary*)argsMap[@"constraints"];
RTCPeerConnection *peerConnection = self.peerConnections[@(textureId)];
if(peerConnection)
{
[self peerConnectionCreateOffer:constraints peerConnection:peerConnection result:result ];
}
} else if ([@"createAnswer" isEqualToString:call.method]) {
NSDictionary* argsMap = call.arguments;
int64_t textureId = ((NSNumber*)argsMap[@"textureId"]).integerValue;
NSDictionary * constraints = (NSDictionary*)argsMap[@"constraints"];
RTCPeerConnection *peerConnection = self.peerConnections[@(textureId)];
if(peerConnection)
{
[self peerConnectionCreateAnswer:constraints
peerConnection:peerConnection
result:result];
}
} else if ([@"addStream" isEqualToString:call.method]) {
NSDictionary* argsMap = call.arguments;
int64_t textureId = ((NSNumber*)argsMap[@"textureId"]).integerValue;
RTCPeerConnection *peerConnection = self.peerConnections[@(textureId)];
int64_t streamId = ((NSNumber*)argsMap[@"streamId"]).integerValue;
RTCMediaStream *stream = self.localStreams[@(streamId)];
if(peerConnection)
{
[peerConnection addStream:stream];
result(@"");
}
} else if ([@"removeStream" isEqualToString:call.method]) {
NSDictionary* argsMap = call.arguments;
int64_t textureId = ((NSNumber*)argsMap[@"textureId"]).integerValue;
RTCPeerConnection *peerConnection = self.peerConnections[@(textureId)];
int64_t streamId = ((NSNumber*)argsMap[@"streamId"]).integerValue;
RTCMediaStream *stream = self.localStreams[@(streamId)];
if(stream && peerConnection)
{
[peerConnection removeStream:stream];
result(nil);
}else{
result([FlutterError errorWithCode:@"removeStream"
message:[NSString stringWithFormat:@"Error: pc or stream not found!"]
details:nil]);
}
} else if ([@"setLocalDescription" isEqualToString:call.method]) {
NSDictionary* argsMap = call.arguments;
int64_t textureId = ((NSNumber*)argsMap[@"textureId"]).integerValue;
RTCPeerConnection *peerConnection = self.peerConnections[@(textureId)];
if(peerConnection)
{
[self peerConnectionSetLocalDescription:nil peerConnection:peerConnection result:result];
}else{
result([FlutterError errorWithCode:@"SetLocalDescriptionFailed"
message:[NSString stringWithFormat:@"Error: pc not found!"]
details:nil]);
}
} else if ([@"setRemoteDescription" isEqualToString:call.method]) {
NSDictionary* argsMap = call.arguments;
int64_t textureId = ((NSNumber*)argsMap[@"textureId"]).integerValue;
RTCPeerConnection *peerConnection = self.peerConnections[@(textureId)];
if(peerConnection)
{
[self peerConnectionSetRemoteDescription:nil peerConnection:peerConnection result:result];
}else{
result([FlutterError errorWithCode:@"SetRemoteDescriptionFailed"
message:[NSString stringWithFormat:@"Error: pc not found!"]
details:nil]);
}
} else {
result(FlutterMethodNotImplemented);
}
}
- (void)dealloc
{
[_localTracks removeAllObjects];
_localTracks = nil;
[_localStreams removeAllObjects];
_localStreams = nil;
for (NSNumber *peerConnectionId in _peerConnections) {
RTCPeerConnection *peerConnection = _peerConnections[peerConnectionId];
peerConnection.delegate = nil;
[peerConnection close];
}
[_peerConnections removeAllObjects];
_peerConnectionFactory = nil;
}
- (RTCMediaStream*)streamForTextureId:(NSNumber*)textureId
{
RTCMediaStream *stream = _localStreams[textureId];
if (!stream) {
for (NSNumber *peerConnectionId in _peerConnections) {
RTCPeerConnection *peerConnection = _peerConnections[peerConnectionId];
stream = peerConnection.remoteStreams[textureId];
if (stream) {
break;
}
}
}
return stream;
}
@end

View File

@ -0,0 +1,55 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <WebRTC/RTCMacros.h>
#import <WebRTC/RTCVideoSource.h>
@class AVCaptureSession;
@class RTCMediaConstraints;
@class RTCPeerConnectionFactory;
NS_ASSUME_NONNULL_BEGIN
/**
* DEPRECATED Use RTCCameraVideoCapturer instead.
*
* RTCAVFoundationVideoSource is a video source that uses
* webrtc::AVFoundationVideoCapturer. We do not currently provide a wrapper for
* that capturer because cricket::VideoCapturer is not ref counted and we cannot
* guarantee its lifetime. Instead, we expose its properties through the ref
* counted video source interface.
*/
RTC_EXPORT
@interface RTCAVFoundationVideoSource : RTCVideoSource
- (instancetype)init NS_UNAVAILABLE;
/**
* Calling this function will cause frames to be scaled down to the
* requested resolution. Also, frames will be cropped to match the
* requested aspect ratio, and frames will be dropped to match the
* requested fps. The requested aspect ratio is orientation agnostic and
* will be adjusted to maintain the input orientation, so it doesn't
* matter if e.g. 1280x720 or 720x1280 is requested.
*/
- (void)adaptOutputFormatToWidth:(int)width height:(int)height fps:(int)fps;
/** Returns whether rear-facing camera is available for use. */
@property(nonatomic, readonly) BOOL canUseBackCamera;
/** Switches the camera being used (either front or back). */
@property(nonatomic, assign) BOOL useBackCamera;
/** Returns the active capture session. */
@property(nonatomic, readonly) AVCaptureSession *captureSession;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,248 @@
/*
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <AVFoundation/AVFoundation.h>
#import <Foundation/Foundation.h>
#import <WebRTC/RTCMacros.h>
NS_ASSUME_NONNULL_BEGIN
extern NSString * const kRTCAudioSessionErrorDomain;
/** Method that requires lock was called without lock. */
extern NSInteger const kRTCAudioSessionErrorLockRequired;
/** Unknown configuration error occurred. */
extern NSInteger const kRTCAudioSessionErrorConfiguration;
@class RTCAudioSession;
@class RTCAudioSessionConfiguration;
// Surfaces AVAudioSession events. WebRTC will listen directly for notifications
// from AVAudioSession and handle them before calling these delegate methods,
// at which point applications can perform additional processing if required.
RTC_EXPORT
@protocol RTCAudioSessionDelegate <NSObject>
@optional
/** Called on a system notification thread when AVAudioSession starts an
* interruption event.
*/
- (void)audioSessionDidBeginInterruption:(RTCAudioSession *)session;
/** Called on a system notification thread when AVAudioSession ends an
* interruption event.
*/
- (void)audioSessionDidEndInterruption:(RTCAudioSession *)session
shouldResumeSession:(BOOL)shouldResumeSession;
/** Called on a system notification thread when AVAudioSession changes the
* route.
*/
- (void)audioSessionDidChangeRoute:(RTCAudioSession *)session
reason:(AVAudioSessionRouteChangeReason)reason
previousRoute:(AVAudioSessionRouteDescription *)previousRoute;
/** Called on a system notification thread when AVAudioSession media server
* terminates.
*/
- (void)audioSessionMediaServerTerminated:(RTCAudioSession *)session;
/** Called on a system notification thread when AVAudioSession media server
* restarts.
*/
- (void)audioSessionMediaServerReset:(RTCAudioSession *)session;
// TODO(tkchin): Maybe handle SilenceSecondaryAudioHintNotification.
- (void)audioSession:(RTCAudioSession *)session
didChangeCanPlayOrRecord:(BOOL)canPlayOrRecord;
/** Called on a WebRTC thread when the audio device is notified to begin
* playback or recording.
*/
- (void)audioSessionDidStartPlayOrRecord:(RTCAudioSession *)session;
/** Called on a WebRTC thread when the audio device is notified to stop
* playback or recording.
*/
- (void)audioSessionDidStopPlayOrRecord:(RTCAudioSession *)session;
/** Called when the AVAudioSession output volume value changes. */
- (void)audioSession:(RTCAudioSession *)audioSession
didChangeOutputVolume:(float)outputVolume;
/** Called when the audio device detects a playout glitch. The argument is the
* number of glitches detected so far in the current audio playout session.
*/
- (void)audioSession:(RTCAudioSession *)audioSession
didDetectPlayoutGlitch:(int64_t)totalNumberOfGlitches;
@end
/** This is a protocol used to inform RTCAudioSession when the audio session
* activation state has changed outside of RTCAudioSession. The current known use
* case of this is when CallKit activates the audio session for the application
*/
RTC_EXPORT
@protocol RTCAudioSessionActivationDelegate <NSObject>
/** Called when the audio session is activated outside of the app by iOS. */
- (void)audioSessionDidActivate:(AVAudioSession *)session;
/** Called when the audio session is deactivated outside of the app by iOS. */
- (void)audioSessionDidDeactivate:(AVAudioSession *)session;
@end
/** Proxy class for AVAudioSession that adds a locking mechanism similar to
* AVCaptureDevice. This is used to that interleaving configurations between
* WebRTC and the application layer are avoided.
*
* RTCAudioSession also coordinates activation so that the audio session is
* activated only once. See |setActive:error:|.
*/
RTC_EXPORT
@interface RTCAudioSession : NSObject <RTCAudioSessionActivationDelegate>
/** Convenience property to access the AVAudioSession singleton. Callers should
* not call setters on AVAudioSession directly, but other method invocations
* are fine.
*/
@property(nonatomic, readonly) AVAudioSession *session;
/** Our best guess at whether the session is active based on results of calls to
* AVAudioSession.
*/
@property(nonatomic, readonly) BOOL isActive;
/** Whether RTCAudioSession is currently locked for configuration. */
@property(nonatomic, readonly) BOOL isLocked;
/** If YES, WebRTC will not initialize the audio unit automatically when an
* audio track is ready for playout or recording. Instead, applications should
* call setIsAudioEnabled. If NO, WebRTC will initialize the audio unit
* as soon as an audio track is ready for playout or recording.
*/
@property(nonatomic, assign) BOOL useManualAudio;
/** This property is only effective if useManualAudio is YES.
* Represents permission for WebRTC to initialize the VoIP audio unit.
* When set to NO, if the VoIP audio unit used by WebRTC is active, it will be
* stopped and uninitialized. This will stop incoming and outgoing audio.
* When set to YES, WebRTC will initialize and start the audio unit when it is
* needed (e.g. due to establishing an audio connection).
* This property was introduced to work around an issue where if an AVPlayer is
* playing audio while the VoIP audio unit is initialized, its audio would be
* either cut off completely or played at a reduced volume. By preventing
* the audio unit from being initialized until after the audio has completed,
* we are able to prevent the abrupt cutoff.
*/
@property(nonatomic, assign) BOOL isAudioEnabled;
// Proxy properties.
@property(readonly) NSString *category;
@property(readonly) AVAudioSessionCategoryOptions categoryOptions;
@property(readonly) NSString *mode;
@property(readonly) BOOL secondaryAudioShouldBeSilencedHint;
@property(readonly) AVAudioSessionRouteDescription *currentRoute;
@property(readonly) NSInteger maximumInputNumberOfChannels;
@property(readonly) NSInteger maximumOutputNumberOfChannels;
@property(readonly) float inputGain;
@property(readonly) BOOL inputGainSettable;
@property(readonly) BOOL inputAvailable;
@property(readonly, nullable)
NSArray<AVAudioSessionDataSourceDescription *> * inputDataSources;
@property(readonly, nullable)
AVAudioSessionDataSourceDescription *inputDataSource;
@property(readonly, nullable)
NSArray<AVAudioSessionDataSourceDescription *> * outputDataSources;
@property(readonly, nullable)
AVAudioSessionDataSourceDescription *outputDataSource;
@property(readonly) double sampleRate;
@property(readonly) double preferredSampleRate;
@property(readonly) NSInteger inputNumberOfChannels;
@property(readonly) NSInteger outputNumberOfChannels;
@property(readonly) float outputVolume;
@property(readonly) NSTimeInterval inputLatency;
@property(readonly) NSTimeInterval outputLatency;
@property(readonly) NSTimeInterval IOBufferDuration;
@property(readonly) NSTimeInterval preferredIOBufferDuration;
/** Default constructor. */
+ (instancetype)sharedInstance;
- (instancetype)init NS_UNAVAILABLE;
/** Adds a delegate, which is held weakly. */
- (void)addDelegate:(id<RTCAudioSessionDelegate>)delegate;
/** Removes an added delegate. */
- (void)removeDelegate:(id<RTCAudioSessionDelegate>)delegate;
/** Request exclusive access to the audio session for configuration. This call
* will block if the lock is held by another object.
*/
- (void)lockForConfiguration;
/** Relinquishes exclusive access to the audio session. */
- (void)unlockForConfiguration;
/** If |active|, activates the audio session if it isn't already active.
* Successful calls must be balanced with a setActive:NO when activation is no
* longer required. If not |active|, deactivates the audio session if one is
* active and this is the last balanced call. When deactivating, the
* AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation option is passed to
* AVAudioSession.
*/
- (BOOL)setActive:(BOOL)active
error:(NSError **)outError;
// The following methods are proxies for the associated methods on
// AVAudioSession. |lockForConfiguration| must be called before using them
// otherwise they will fail with kRTCAudioSessionErrorLockRequired.
- (BOOL)setCategory:(NSString *)category
withOptions:(AVAudioSessionCategoryOptions)options
error:(NSError **)outError;
- (BOOL)setMode:(NSString *)mode error:(NSError **)outError;
- (BOOL)setInputGain:(float)gain error:(NSError **)outError;
- (BOOL)setPreferredSampleRate:(double)sampleRate error:(NSError **)outError;
- (BOOL)setPreferredIOBufferDuration:(NSTimeInterval)duration
error:(NSError **)outError;
- (BOOL)setPreferredInputNumberOfChannels:(NSInteger)count
error:(NSError **)outError;
- (BOOL)setPreferredOutputNumberOfChannels:(NSInteger)count
error:(NSError **)outError;
- (BOOL)overrideOutputAudioPort:(AVAudioSessionPortOverride)portOverride
error:(NSError **)outError;
- (BOOL)setPreferredInput:(AVAudioSessionPortDescription *)inPort
error:(NSError **)outError;
- (BOOL)setInputDataSource:(AVAudioSessionDataSourceDescription *)dataSource
error:(NSError **)outError;
- (BOOL)setOutputDataSource:(AVAudioSessionDataSourceDescription *)dataSource
error:(NSError **)outError;
@end
@interface RTCAudioSession (Configuration)
/** Applies the configuration to the current session. Attempts to set all
* properties even if previous ones fail. Only the last error will be
* returned.
* |lockForConfiguration| must be called first.
*/
- (BOOL)setConfiguration:(RTCAudioSessionConfiguration *)configuration
error:(NSError **)outError;
/** Convenience method that calls both setConfiguration and setActive.
* |lockForConfiguration| must be called first.
*/
- (BOOL)setConfiguration:(RTCAudioSessionConfiguration *)configuration
active:(BOOL)active
error:(NSError **)outError;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,48 @@
/*
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <AVFoundation/AVFoundation.h>
#import <Foundation/Foundation.h>
#import "WebRTC/RTCMacros.h"
NS_ASSUME_NONNULL_BEGIN
extern const int kRTCAudioSessionPreferredNumberOfChannels;
extern const double kRTCAudioSessionHighPerformanceSampleRate;
extern const double kRTCAudioSessionLowComplexitySampleRate;
extern const double kRTCAudioSessionHighPerformanceIOBufferDuration;
extern const double kRTCAudioSessionLowComplexityIOBufferDuration;
// Struct to hold configuration values.
RTC_EXPORT
@interface RTCAudioSessionConfiguration : NSObject
@property(nonatomic, strong) NSString *category;
@property(nonatomic, assign) AVAudioSessionCategoryOptions categoryOptions;
@property(nonatomic, strong) NSString *mode;
@property(nonatomic, assign) double sampleRate;
@property(nonatomic, assign) NSTimeInterval ioBufferDuration;
@property(nonatomic, assign) NSInteger inputNumberOfChannels;
@property(nonatomic, assign) NSInteger outputNumberOfChannels;
/** Initializes configuration to defaults. */
- (instancetype)init NS_DESIGNATED_INITIALIZER;
/** Returns the current configuration of the audio session. */
+ (instancetype)currentConfiguration;
/** Returns the configuration that WebRTC needs. */
+ (instancetype)webRTCConfiguration;
/** Provide a way to override the default configuration. */
+ (void)setWebRTCConfiguration:(RTCAudioSessionConfiguration *)configuration;
@end
NS_ASSUME_NONNULL_END

Some files were not shown because too many files have changed in this diff Show More