diff --git a/CHANGELOG.md b/CHANGELOG.md index edac0eccd1..9f30de6647 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,16 @@ # Changelog +[1.1.0] - 2025-08-20 + +* [Apple/Android] feat: Add H265/HEVC support. +* [Mobile/Desktop] feat: Support write logs with Logger (logger package) (#1891) +* [Android] fix: Reduce Recording Stop Delay and Prevent Encoder OOM Crashes (Android) (#1912) +* [Native/Web] feat: small setVolume addition (#1904) +* [Web] feat: Add texture-based video rendering for web (#1911) +* [Android] fix: RECORDINGS - Add fallback resolutions for unsupported stream frame sizes on low-end Android devices (#1900) +* [Android] fix: Update proguard-rules.pro (#1902) + [1.0.0] - 2025-07-25 * Bump version to 1.0.0 diff --git a/android/build.gradle b/android/build.gradle index 0d6571a581..1ae5ddcc9b 100644 --- a/android/build.gradle +++ b/android/build.gradle @@ -2,7 +2,7 @@ group 'com.cloudwebrtc.webrtc' version '1.0-SNAPSHOT' buildscript { - ext.kotlin_version = '1.7.10' + ext.kotlin_version = '1.8.10' repositories { google() mavenCentral() @@ -47,12 +47,12 @@ android { } kotlinOptions { - jvmTarget = '1.8' + jvmTarget = JavaVersion.VERSION_1_8 } } dependencies { - implementation 'io.github.webrtc-sdk:android:137.7151.01' + implementation 'io.github.webrtc-sdk:android:137.7151.03' implementation 'com.github.davidliu:audioswitch:89582c47c9a04c62f90aa5e57251af4800a62c9a' implementation 'androidx.annotation:annotation:1.1.0' implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version" diff --git a/android/proguard-rules.pro b/android/proguard-rules.pro index 6ce9896196..699b363c62 100644 --- a/android/proguard-rules.pro +++ b/android/proguard-rules.pro @@ -1,3 +1,4 @@ # Flutter WebRTC -keep class com.cloudwebrtc.webrtc.** { *; } -keep class org.webrtc.** { *; } +-keep class org.jni_zero.** { *; } diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/FlutterWebRTCPlugin.java b/android/src/main/java/com/cloudwebrtc/webrtc/FlutterWebRTCPlugin.java index 3a49f88c85..d89377bcdf 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/FlutterWebRTCPlugin.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/FlutterWebRTCPlugin.java @@ -41,7 +41,11 @@ public class FlutterWebRTCPlugin implements FlutterPlugin, ActivityAware, EventC private LifeCycleObserver observer; private Lifecycle lifecycle; private EventChannel eventChannel; - public EventChannel.EventSink eventSink; + + // eventSink is static because FlutterWebRTCPlugin can be instantiated multiple times + // but the onListen(Object, EventChannel.EventSink) event only fires once for the first + // FlutterWebRTCPlugin instance, so for the next instances eventSink will be == null + public static EventChannel.EventSink eventSink; public FlutterWebRTCPlugin() { sharedSingleton = this; diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java b/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java index dfa5de8e7a..c580832498 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/MethodCallHandlerImpl.java @@ -49,6 +49,8 @@ import org.webrtc.EglBase; import org.webrtc.IceCandidate; import org.webrtc.Logging; +import org.webrtc.Logging.Severity; +import org.webrtc.Loggable; import org.webrtc.MediaConstraints; import org.webrtc.MediaConstraints.KeyValuePair; import org.webrtc.MediaStream; @@ -133,6 +135,18 @@ public class MethodCallHandlerImpl implements MethodCallHandler, StateProvider { public AudioProcessingController audioProcessingController; + public static class LogSink implements Loggable { + @Override + public void onLogMessage(String message, Severity sev, String tag) { + ConstraintsMap params = new ConstraintsMap(); + params.putString("event", "onLogData"); + params.putString("data", message); + FlutterWebRTCPlugin.sharedSingleton.sendEvent(params.toMap()); + } + } + + public static LogSink logSink = new LogSink(); + MethodCallHandlerImpl(Context context, BinaryMessenger messenger, TextureRegistry textureRegistry) { this.context = context; this.textures = textureRegistry; @@ -161,7 +175,7 @@ void dispose() { mPeerConnectionObservers.clear(); } private void initialize(boolean bypassVoiceProcessing, int networkIgnoreMask, boolean forceSWCodec, List forceSWCodecList, - @Nullable ConstraintsMap androidAudioConfiguration) { + @Nullable ConstraintsMap androidAudioConfiguration, Severity logSeverity) { if (mFactory != null) { return; } @@ -169,6 +183,7 @@ private void initialize(boolean bypassVoiceProcessing, int networkIgnoreMask, bo PeerConnectionFactory.initialize( InitializationOptions.builder(context) .setEnableInternalTracer(true) + .setInjectableLogger(logSink, logSeverity) .createInitializationOptions()); getUserMediaImpl = new GetUserMediaImpl(this, context); @@ -342,7 +357,15 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { if(options.get("bypassVoiceProcessing") != null) { enableBypassVoiceProcessing = (boolean)options.get("bypassVoiceProcessing"); } - initialize(enableBypassVoiceProcessing, networkIgnoreMask, forceSWCodec, forceSWCodecList, androidAudioConfiguration); + + Severity logSeverity = Severity.LS_NONE; + if (constraintsMap.hasKey("logSeverity") + && constraintsMap.getType("logSeverity") == ObjectType.String) { + String logSeverityStr = constraintsMap.getString("logSeverity"); + logSeverity = str2LogSeverity(logSeverityStr); + } + + initialize(enableBypassVoiceProcessing, networkIgnoreMask, forceSWCodec, forceSWCodecList, androidAudioConfiguration, logSeverity); result.success(null); break; } @@ -1014,6 +1037,11 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) { } break; } + case "setLogSeverity": { + //now it's possible to setup logSeverity only via PeerConnectionFactory.initialize method + //Log.d(TAG, "no implementation for 'setLogSeverity'"); + break; + } default: if(frameCryptor.handleMethodCall(call, result)) { break; @@ -2020,6 +2048,22 @@ private void removeTrackForRendererById(String trackId) { } } + private Severity str2LogSeverity(String severity) { + switch (severity) { + case "verbose": + return Severity.LS_VERBOSE; + case "info": + return Severity.LS_INFO; + case "warning": + return Severity.LS_WARNING; + case "error": + return Severity.LS_ERROR; + case "none": + default: + return Severity.LS_NONE; + } + } + public void createDataChannel(final String peerConnectionId, String label, ConstraintsMap config, Result result) { // Forward to PeerConnectionObserver which deals with DataChannels diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/record/EncoderConfig.java b/android/src/main/java/com/cloudwebrtc/webrtc/record/EncoderConfig.java new file mode 100644 index 0000000000..129dfc55f7 --- /dev/null +++ b/android/src/main/java/com/cloudwebrtc/webrtc/record/EncoderConfig.java @@ -0,0 +1,20 @@ +package com.cloudwebrtc.webrtc.record; + +class EncoderConfig { + final int width; + final int height; + final int bitrate; + final int profile; + + EncoderConfig(int width, int height, int bitrate, int profile) { + this.width = width; + this.height = height; + this.bitrate = bitrate; + this.profile = profile; + } + + @Override + public String toString() { + return width + "x" + height + ", bitrate: " + bitrate + ", profile: " + profile; + } +} \ No newline at end of file diff --git a/android/src/main/java/com/cloudwebrtc/webrtc/record/VideoFileRenderer.java b/android/src/main/java/com/cloudwebrtc/webrtc/record/VideoFileRenderer.java index 275b33264a..f8c927b917 100644 --- a/android/src/main/java/com/cloudwebrtc/webrtc/record/VideoFileRenderer.java +++ b/android/src/main/java/com/cloudwebrtc/webrtc/record/VideoFileRenderer.java @@ -6,6 +6,7 @@ import android.media.MediaCodecInfo; import android.media.MediaFormat; import android.media.MediaMuxer; +import android.os.Build; import android.os.Handler; import android.os.HandlerThread; import android.util.Log; @@ -20,7 +21,13 @@ import org.webrtc.audio.JavaAudioDeviceModule.SamplesReadyCallback; import java.io.IOException; +import java.lang.reflect.Method; import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.Comparator; +import java.util.List; import java.util.concurrent.CountDownLatch; class VideoFileRenderer implements VideoSink, SamplesReadyCallback { @@ -77,51 +84,213 @@ class VideoFileRenderer implements VideoSink, SamplesReadyCallback { audioTrackIndex = withAudio ? -1 : 0; } + private boolean tryConfigureEncoder(EncoderConfig config) { + try { + MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, config.width, config.height); + format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); + format.setInteger(MediaFormat.KEY_BIT_RATE, config.bitrate); + format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE); + format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); + // Use YUV420 semi-planar size (1.5 bytes per pixel) to reduce memory usage + format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, config.width * config.height * 3 / 2); + format.setInteger(MediaFormat.KEY_PRIORITY, 0); + format.setInteger(MediaFormat.KEY_PROFILE, config.profile); - private void initVideoEncoder() { - MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, outputFileWidth, outputFileHeight); + Log.d(TAG, "Trying encoder config: " + config); - // Set some properties. Failing to specify some of these can cause the MediaCodec - // configure() call to throw an unhelpful exception. - format.setInteger(MediaFormat.KEY_COLOR_FORMAT, - MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); - format.setInteger(MediaFormat.KEY_BIT_RATE, 6000000); - format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE); - format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); + encoder = MediaCodec.createEncoderByType(MIME_TYPE); + String codecName = encoder.getName(); + Log.d(TAG, "Codec name: " + codecName); - format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 0); // Para Surface input - format.setInteger(MediaFormat.KEY_PRIORITY, 0); // Background priority - format.setInteger(MediaFormat.KEY_PROFILE, MediaCodecInfo.CodecProfileLevel.AVCProfileBaseline); // AVC baseline + encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); + // Create input surface *before* starting the encoder + surface = encoder.createInputSurface(); + Log.d(TAG, "Input surface created successfully: " + surface); + return true; + } catch (Exception e) { + Log.w(TAG, "Failed to configure encoder for config: " + config + ", error: " + e.getMessage()); + if (surface != null) { + surface.release(); + surface = null; + } + if (encoder != null) { + try { + encoder.release(); + } catch (Exception ignored) { + } + encoder = null; + } + return false; + } + } - // Create a MediaCodec encoder, and configure it with our format. Get a Surface - // we can use for input and wrap it with a class that handles the EGL work. + private boolean startEncoder() { try { - encoder = MediaCodec.createEncoderByType(MIME_TYPE); - encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); + encoder.start(); + encoderOutputBuffers = encoder.getOutputBuffers(); + Log.d(TAG, "Encoder started successfully"); + return true; + } catch (Exception e) { + Log.w(TAG, "Failed to start encoder: " + e.getMessage()); + if (surface != null) { + surface.release(); + surface = null; + } + if (encoder != null) { + try { + encoder.release(); + } catch (Exception ignored) { + } + encoder = null; + } + return false; + } + } - CountDownLatch latch = new CountDownLatch(1); - renderThreadHandler.post(() -> { - eglBase = EglBase.create(sharedContext, EglBase.CONFIG_RECORDABLE); - surface = encoder.createInputSurface(); - eglBase.createSurface(surface); - eglBase.makeCurrent(); - drawer = new GlRectDrawer(); - latch.countDown(); - }); - latch.await(); // espera EGL estar pronto + private List getSupportedConfigurations(int frameWidth, int frameHeight) { + + int[] bitrates = {6000000, 4000000, 2000000, 1000000}; + int[] profiles = { + MediaCodecInfo.CodecProfileLevel.AVCProfileBaseline, + MediaCodecInfo.CodecProfileLevel.AVCProfileMain, + MediaCodecInfo.CodecProfileLevel.AVCProfileHigh + }; + List resolutions = new ArrayList<>(); + resolutions.add(new int[]{frameWidth, frameHeight}); + for (int[] res : Arrays.asList( + new int[]{1920, 1080}, + new int[]{1280, 720}, + new int[]{854, 480}, + new int[]{640, 360}, + new int[]{426, 240})) { + // only add resolutions bellow the original stream resolution + if (res[0] <= frameWidth && res[1] <= frameHeight) { + resolutions.add(res); + } + } + + List configs = new ArrayList<>(); + for (int[] res : resolutions) { + for (int bitrate : bitrates) { + for (int profile : profiles) { + configs.add(new EncoderConfig(res[0], res[1], bitrate, profile)); + } + } + } + + // Sort: prioritize higher resolutions, higher bitrates, Baseline profile + Collections.sort(configs, new Comparator() { + @Override + public int compare(EncoderConfig c1, EncoderConfig c2) { + int resCompare = Integer.compare(c2.width * c2.height, c1.width * c1.height); + if (resCompare != 0) return resCompare; + int bitrateCompare = Integer.compare(c2.bitrate, c1.bitrate); + if (bitrateCompare != 0) return bitrateCompare; + return Integer.compare(c1.profile, c2.profile); // Baseline first + } + }); + + return configs; + } + + private boolean isProfileSupported(MediaCodecInfo codecInfo, String mimeType, int profile) { + try { + MediaCodecInfo.CodecCapabilities caps = codecInfo.getCapabilitiesForType(mimeType); + for (MediaCodecInfo.CodecProfileLevel pl : caps.profileLevels) { + if (pl.profile == profile) { + return true; + } + } } catch (Exception e) { - Log.wtf(TAG, e); - Thread.currentThread().interrupt(); + Log.w(TAG, "Failed to check profile support: " + e.getMessage()); } + return false; } + + private void initVideoEncoder(int frameWidth, int frameHeight) { + if (encoder != null) { + encoder.stop(); + encoder.release(); + encoder = null; + } + if (surface != null) { + surface.release(); + surface = null; + } + + // Check codec capabilities + MediaCodecInfo codecInfo = null; + try { + MediaCodec codec = MediaCodec.createEncoderByType(MIME_TYPE); + codecInfo = codec.getCodecInfo(); + codec.release(); + } catch (Exception e) { + Log.e(TAG, "Failed to get codec info: " + e.getMessage()); + } + + List configs = getSupportedConfigurations(frameWidth, frameHeight); + + for (EncoderConfig config : configs) { + // Skip unsupported configurations + if (codecInfo != null) { + MediaCodecInfo.VideoCapabilities videoCaps = codecInfo.getCapabilitiesForType(MIME_TYPE).getVideoCapabilities(); + if (!videoCaps.isSizeSupported(config.width, config.height)) { + Log.d(TAG, "Skipping unsupported resolution: " + config); + continue; + } + if (!videoCaps.getBitrateRange().contains(config.bitrate)) { + Log.d(TAG, "Skipping unsupported bitrate: " + config); + continue; + } + if (!isProfileSupported(codecInfo, MIME_TYPE, config.profile)) { + Log.d(TAG, "Skipping unsupported profile: " + config); + continue; + } + } + + if (tryConfigureEncoder(config) && startEncoder()) { + outputFileWidth = config.width; + outputFileHeight = config.height; + CountDownLatch latch = new CountDownLatch(1); + renderThreadHandler.post(() -> { + try { + eglBase = EglBase.create(sharedContext, EglBase.CONFIG_RECORDABLE); + Log.d(TAG, "EGL context created"); + eglBase.createSurface(surface); + eglBase.makeCurrent(); + drawer = new GlRectDrawer(); + encoderStarted = true; + encoderInitializing = false; + Log.d(TAG, "Encoder surface setup complete: " + surface); + } catch (Exception e) { + Log.e(TAG, "Failed to setup EGL surface: " + e.getMessage()); + } finally { + latch.countDown(); + } + }); + try { + latch.await(); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + Log.e(TAG, "Interrupted while awaiting EGL setup: " + e.getMessage()); + } + if (encoderStarted) { + return; + } + } + } + + Log.e(TAG, "Failed to configure and start encoder with any supported configuration."); + } @Override public void onFrame(VideoFrame frame) { frame.retain(); - if (outputFileWidth == -1) { - outputFileWidth = frame.getRotatedWidth(); - outputFileHeight = frame.getRotatedHeight(); - initVideoEncoder(); + if (outputFileWidth == -1 && !encoderInitializing) { + encoderInitializing = true; + int frameWidth = frame.getRotatedWidth(); + int frameHeight = frame.getRotatedHeight(); + initVideoEncoder(frameWidth, frameHeight); } renderThreadHandler.post(() -> renderFrameOnRenderThread(frame)); } @@ -192,18 +361,12 @@ void release() { } } // End Signify modification - + private boolean encoderInitializing = false; private boolean encoderStarted = false; private volatile boolean muxerStarted = false; private long videoFrameStart = 0; private void drainEncoder() { - if (!encoderStarted) { - encoder.start(); - encoderOutputBuffers = encoder.getOutputBuffers(); - encoderStarted = true; - return; - } while (true) { int encoderStatus = encoder.dequeueOutputBuffer(bufferInfo, 10000); if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { @@ -259,11 +422,15 @@ private void drainEncoder() { private long presTime = 0L; + + private void drainAudio() { if (audioBufferInfo == null) audioBufferInfo = new MediaCodec.BufferInfo(); + while (true) { - int encoderStatus = audioEncoder.dequeueOutputBuffer(audioBufferInfo, 10000); + int encoderStatus = audioEncoder.dequeueOutputBuffer(audioBufferInfo, 1000); + if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { break; } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { @@ -285,24 +452,30 @@ private void drainAudio() { if (!muxerStarted) break; } else if (encoderStatus < 0) { - Log.e(TAG, "unexpected result fr om encoder.dequeueOutputBuffer: " + encoderStatus); + Log.e(TAG, "unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus); } else { // encoderStatus >= 0 + try { ByteBuffer encodedData = audioOutputBuffers[encoderStatus]; if (encodedData == null) { Log.e(TAG, "encoderOutputBuffer " + encoderStatus + " was null"); break; } + // It's usually necessary to adjust the ByteBuffer values to match BufferInfo. encodedData.position(audioBufferInfo.offset); encodedData.limit(audioBufferInfo.offset + audioBufferInfo.size); + if (muxerStarted) mediaMuxer.writeSampleData(audioTrackIndex, encodedData, audioBufferInfo); + isRunning = isRunning && (audioBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) == 0; audioEncoder.releaseOutputBuffer(encoderStatus, false); + if ((audioBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { break; } + } catch (Exception e) { Log.wtf(TAG, e); break; @@ -331,6 +504,7 @@ public void onWebRtcAudioRecordSamplesReady(JavaAudioDeviceModule.AudioSamples a } catch (IOException exception) { Log.wtf(TAG, exception); } + int bufferIndex = audioEncoder.dequeueInputBuffer(0); if (bufferIndex >= 0) { ByteBuffer buffer = audioInputBuffers[bufferIndex]; diff --git a/common/cpp/include/flutter_webrtc.h b/common/cpp/include/flutter_webrtc.h index 573956b9aa..526f07b877 100644 --- a/common/cpp/include/flutter_webrtc.h +++ b/common/cpp/include/flutter_webrtc.h @@ -11,6 +11,7 @@ #include "flutter_video_renderer.h" #include "libwebrtc.h" +#include "rtc_logging.h" namespace flutter_webrtc_plugin { @@ -38,6 +39,10 @@ class FlutterWebRTC : public FlutterWebRTCBase, void HandleMethodCall(const MethodCallProxy& method_call, std::unique_ptr result); + + private: + void initLoggerCallback(RTCLoggingSeverity severity); + RTCLoggingSeverity str2LogSeverity(std::string str); }; } // namespace flutter_webrtc_plugin diff --git a/common/cpp/src/flutter_webrtc.cc b/common/cpp/src/flutter_webrtc.cc index 5e0afb6e17..6ab12d316c 100644 --- a/common/cpp/src/flutter_webrtc.cc +++ b/common/cpp/src/flutter_webrtc.cc @@ -1,9 +1,12 @@ #include "flutter_webrtc.h" +#include "flutter_data_channel.h" #include "flutter_webrtc/flutter_web_r_t_c_plugin.h" namespace flutter_webrtc_plugin { +static EventChannelProxy* eventChannelProxy = nullptr; + FlutterWebRTC::FlutterWebRTC(FlutterWebRTCPlugin* plugin) : FlutterWebRTCBase::FlutterWebRTCBase(plugin->messenger(), plugin->textures(), @@ -24,6 +27,11 @@ void FlutterWebRTC::HandleMethodCall( const EncodableMap params = GetValue(*method_call.arguments()); const EncodableMap options = findMap(params, "options"); + std::string severityStr = findString(options, "logSeverity"); + if (severityStr.empty() == false) { + RTCLoggingSeverity severity = str2LogSeverity(severityStr); + initLoggerCallback(severity); + } result->Success(); } else if (method_call.method_name().compare("createPeerConnection") == 0) { if (!method_call.arguments()) { @@ -1266,6 +1274,18 @@ void FlutterWebRTC::HandleMethodCall( state[EncodableValue("state")] = peerConnectionStateString(pc->peer_connection_state()); result->Success(EncodableValue(state)); + } else if (method_call.method_name().compare("setLogSeverity") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Bad arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + std::string severityStr = findString(params, "severity"); + if (severityStr.empty() == false) { + RTCLoggingSeverity severity = str2LogSeverity(severityStr); + initLoggerCallback(severity); + } } else { if (HandleFrameCryptorMethodCall(method_call, std::move(result), &result)) { return; @@ -1275,4 +1295,32 @@ void FlutterWebRTC::HandleMethodCall( } } +void FlutterWebRTC::initLoggerCallback(RTCLoggingSeverity severity) { + if(eventChannelProxy == nullptr) { + eventChannelProxy = event_channel(); + } + + libwebrtc::LibWebRTCLogging::setLogSink(severity, [](const string& message){ + EncodableMap info; + info[EncodableValue("event")] = "onLogData"; + info[EncodableValue("data")] = message.c_string(); + eventChannelProxy->Success(EncodableValue(info), false); + }); +} + +RTCLoggingSeverity FlutterWebRTC::str2LogSeverity(std::string str) { + if(str == "verbose") + return Verbose; + else if(str == "info") + return Info; + else if(str == "warning") + return Warning; + else if(str == "error") + return Error; + else if(str == "none") + return None; + + return None; +} + } // namespace flutter_webrtc_plugin diff --git a/common/darwin/Classes/FlutterWebRTCPlugin.m b/common/darwin/Classes/FlutterWebRTCPlugin.m index e7102173d3..12f1bc56e5 100644 --- a/common/darwin/Classes/FlutterWebRTCPlugin.m +++ b/common/darwin/Classes/FlutterWebRTCPlugin.m @@ -18,6 +18,9 @@ #import #import +#import +#import + #import "LocalTrack.h" #import "LocalAudioTrack.h" #import "LocalVideoTrack.h" @@ -107,6 +110,8 @@ @implementation FlutterWebRTCPlugin { #if TARGET_OS_IPHONE FLutterRTCVideoPlatformViewFactory *_platformViewFactory; #endif + + RTC_OBJC_TYPE(RTCCallbackLogger) * loggerCallback; } static FlutterWebRTCPlugin *sharedSingleton; @@ -242,9 +247,42 @@ - (void)didSessionRouteChange:(NSNotification*)notification { #endif } +-(void) initLoggerCallback:(RTCLoggingSeverity)severity { + if(loggerCallback == nil) { + loggerCallback = [RTC_OBJC_TYPE(RTCCallbackLogger) new]; + [loggerCallback start:^(NSString *logMessage) { + postEvent(self.eventSink, @{ + @"event" : @"onLogData", + @"data" : logMessage + }); + }]; + } + + loggerCallback.severity = severity; +} + +-(RTCLoggingSeverity)str2LogSeverity:(NSString*)str { + if ([@"verbose" isEqualToString:str]) { + return RTCLoggingSeverityVerbose; + } else if ([@"info" isEqualToString:str]) { + return RTCLoggingSeverityInfo; + } else if ([@"warning" isEqualToString:str]) { + return RTCLoggingSeverityWarning; + } else if ([@"error" isEqualToString:str]) { + return RTCLoggingSeverityError; + } else if ([@"none" isEqualToString:str]) { + return RTCLoggingSeverityNone; + } + + return RTCLoggingSeverityNone; +} + - (void)initialize:(NSArray*)networkIgnoreMask -bypassVoiceProcessing:(BOOL)bypassVoiceProcessing { - // RTCSetMinDebugLogLevel(RTCLoggingSeverityVerbose); + bypassVoiceProcessing:(BOOL)bypassVoiceProcessing + severity:(RTCLoggingSeverity)severity { + // RTCSetMinDebugLogLevel(severity); + [self initLoggerCallback:severity]; + if (!_peerConnectionFactory) { VideoDecoderFactory* decoderFactory = [[VideoDecoderFactory alloc] init]; VideoEncoderFactory* encoderFactory = [[VideoEncoderFactory alloc] init]; @@ -297,7 +335,14 @@ - (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result { if (options[@"networkIgnoreMask"] != nil) { networkIgnoreMask = ((NSArray*)options[@"networkIgnoreMask"]); } - [self initialize:networkIgnoreMask bypassVoiceProcessing:enableBypassVoiceProcessing]; + RTCLoggingSeverity severity = RTCLoggingSeverityNone; + if (options[@"logSeverity"] != nil) { + NSString* severityStr = ((NSString*)options[@"logSeverity"]); + severity = [self str2LogSeverity:severityStr]; + } + + [self initialize:networkIgnoreMask bypassVoiceProcessing:enableBypassVoiceProcessing + severity:severity]; result(@""); } else if ([@"createPeerConnection" isEqualToString:call.method]) { NSDictionary* argsMap = call.arguments; @@ -1504,6 +1549,11 @@ - (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result { message:[NSString stringWithFormat:@"Error: peerConnection not found!"] details:nil]); } + } else if ([@"setLogSeverity" isEqualToString:call.method]) { + NSDictionary* argsMap = call.arguments; + NSString* severityStr = argsMap[@"severity"]; + RTCLoggingSeverity severity = [self str2LogSeverity:severityStr]; + [self initLoggerCallback:severity]; #if TARGET_OS_IOS } else if ([@"startRecordToFile" isEqualToString:call.method]){ diff --git a/example/android/gradle/wrapper/gradle-wrapper.properties b/example/android/gradle/wrapper/gradle-wrapper.properties index bc5bce7aa1..73e10df7d4 100644 --- a/example/android/gradle/wrapper/gradle-wrapper.properties +++ b/example/android/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,6 @@ #Sat Nov 09 20:10:39 CST 2024 distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-8.5-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-8.10.2-all.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists diff --git a/example/android/settings.gradle b/example/android/settings.gradle index 4034dd79cc..f4e0879bf3 100644 --- a/example/android/settings.gradle +++ b/example/android/settings.gradle @@ -18,8 +18,8 @@ pluginManagement { plugins { id "dev.flutter.flutter-plugin-loader" version "1.0.0" - id "com.android.application" version "8.3.0" apply false - id "org.jetbrains.kotlin.android" version "1.7.10" apply false + id "com.android.application" version "8.6.0" apply false + id "org.jetbrains.kotlin.android" version "2.1.0" apply false } include ":app" \ No newline at end of file diff --git a/example/lib/src/loopback_sample_unified_tracks.dart b/example/lib/src/loopback_sample_unified_tracks.dart index ba84cabbf2..fdb5f7218b 100644 --- a/example/lib/src/loopback_sample_unified_tracks.dart +++ b/example/lib/src/loopback_sample_unified_tracks.dart @@ -19,7 +19,13 @@ const List audioCodecList = [ 'PCMU', 'G729' ]; -const List videoCodecList = ['VP8', 'VP9', 'H264', 'AV1']; +const List videoCodecList = [ + 'VP8', + 'VP9', + 'H264', + 'H265', + 'AV1' +]; class _MyAppState extends State { String audioDropdownValue = audioCodecList.first; diff --git a/ios/flutter_webrtc.podspec b/ios/flutter_webrtc.podspec index 5ebc220adf..e83b25767b 100644 --- a/ios/flutter_webrtc.podspec +++ b/ios/flutter_webrtc.podspec @@ -3,7 +3,7 @@ # Pod::Spec.new do |s| s.name = 'flutter_webrtc' - s.version = '1.0.0' + s.version = '1.1.0' s.summary = 'Flutter WebRTC plugin for iOS.' s.description = <<-DESC A new flutter plugin project. @@ -15,7 +15,7 @@ A new flutter plugin project. s.source_files = 'Classes/**/*' s.public_header_files = 'Classes/**/*.h' s.dependency 'Flutter' - s.dependency 'WebRTC-SDK', '137.7151.02' + s.dependency 'WebRTC-SDK', '137.7151.03' s.ios.deployment_target = '13.0' s.static_framework = true s.pod_target_xcconfig = { diff --git a/lib/src/helper.dart b/lib/src/helper.dart index 6f1e9666bf..7e3f71acf4 100644 --- a/lib/src/helper.dart +++ b/lib/src/helper.dart @@ -1,10 +1,20 @@ import 'dart:math'; import 'package:flutter/foundation.dart'; - +import 'package:logger/logger.dart'; import '../flutter_webrtc.dart'; +import 'native_logs_listener.dart'; class Helper { + /// Set Logger object for webrtc; + /// + /// Params: + /// + /// "severity": possible values: ['verbose', 'info', 'warning', 'error', 'none'] + static void setLogger(Logger logger, [String severity = 'none']) { + NativeLogsListener.instance.setLogger(logger, severity); + } + static Future> enumerateDevices(String type) async { var devices = await navigator.mediaDevices.enumerateDevices(); return devices.where((d) => d.kind == type).toList(); diff --git a/lib/src/native/rtc_video_platform_view_controller.dart b/lib/src/native/rtc_video_platform_view_controller.dart index e9eeb1d51c..75c824dace 100644 --- a/lib/src/native/rtc_video_platform_view_controller.dart +++ b/lib/src/native/rtc_video_platform_view_controller.dart @@ -7,9 +7,11 @@ import 'package:webrtc_interface/webrtc_interface.dart'; import '../helper.dart'; import 'utils.dart'; +import '../video_renderer_extension.dart' show AudioControl; + class RTCVideoPlatformViewController extends ValueNotifier - implements VideoRenderer { + implements VideoRenderer, AudioControl { RTCVideoPlatformViewController(int viewId) : super(RTCVideoValue.empty) { _viewId = viewId; } @@ -180,4 +182,18 @@ class RTCVideoPlatformViewController extends ValueNotifier } return true; } + + @override + Future setVolume(double value) async { + try { + if (_srcObject == null) { + throw Exception('Can\'t set volume: The MediaStream is null'); + } + for(MediaStreamTrack track in _srcObject!.getAudioTracks()) { + await Helper.setVolume(value, track); + } + } catch (e) { + print('Helper.setVolume ${e.toString()}'); + } + } } diff --git a/lib/src/native/rtc_video_renderer_impl.dart b/lib/src/native/rtc_video_renderer_impl.dart index c2a46cba75..969750e0aa 100644 --- a/lib/src/native/rtc_video_renderer_impl.dart +++ b/lib/src/native/rtc_video_renderer_impl.dart @@ -7,9 +7,10 @@ import 'package:webrtc_interface/webrtc_interface.dart'; import '../helper.dart'; import 'utils.dart'; +import '../video_renderer_extension.dart' show AudioControl; class RTCVideoRenderer extends ValueNotifier - implements VideoRenderer { + implements VideoRenderer, AudioControl { RTCVideoRenderer() : super(RTCVideoValue.empty); Completer? _initializing; int? _textureId; @@ -176,4 +177,18 @@ class RTCVideoRenderer extends ValueNotifier } return true; } + + @override + Future setVolume(double value) async { + try { + if (_srcObject == null) { + throw Exception('Can\'t set volume: The MediaStream is null'); + } + for(MediaStreamTrack track in _srcObject!.getAudioTracks()) { + await Helper.setVolume(value, track); + } + } catch (e) { + print('Helper.setVolume ${e.toString()}'); + } + } } diff --git a/lib/src/native/utils.dart b/lib/src/native/utils.dart index 362e7917cd..15841e55e9 100644 --- a/lib/src/native/utils.dart +++ b/lib/src/native/utils.dart @@ -1,6 +1,7 @@ import 'dart:io'; import 'package:flutter/services.dart'; +import '../native_logs_listener.dart'; class WebRTC { static const MethodChannel _channel = MethodChannel('FlutterWebRTC.Method'); @@ -24,7 +25,10 @@ class WebRTC { static Future invokeMethod(String methodName, [dynamic param]) async { - await initialize(); + + await initialize(options: { + 'logSeverity': NativeLogsListener.instance.severity, + }); return _channel.invokeMethod( methodName, diff --git a/lib/src/native_logs_listener.dart b/lib/src/native_logs_listener.dart new file mode 100644 index 0000000000..1d35794b71 --- /dev/null +++ b/lib/src/native_logs_listener.dart @@ -0,0 +1,43 @@ +import 'package:logger/logger.dart'; +import 'native/event_channel.dart'; +import './native/utils.dart'; + +class NativeLogsListener { + NativeLogsListener._internal() { + FlutterWebRTCEventChannel.instance.handleEvents.stream.listen((data) { + var event = data.keys.first; + Map map = data[event]; + handleEvent(event, map); + }); + } + + static final NativeLogsListener instance = NativeLogsListener._internal(); + Logger? _logger; + String _severity = 'none'; + + String get severity => _severity; + + /// Set Logger object; + /// + /// Params: + /// + /// "severity": possible values: ['verbose', 'info', 'warning', 'error', 'none'] + void setLogger(Logger logger, [String severity = 'none']) { + _logger = logger; + _severity = severity; + + WebRTC.invokeMethod('setLogSeverity', { + 'severity': severity, + }); + } + + void handleEvent(String event, final Map map) async { + switch (map['event']) { + case 'onLogData': + if(_logger != null) { + _logger?.i('webrtc: ${map['data']}'); + } + break; + } + } +} \ No newline at end of file diff --git a/lib/src/video_renderer_extension.dart b/lib/src/video_renderer_extension.dart index fa8b7ac78b..df4a00f14b 100644 --- a/lib/src/video_renderer_extension.dart +++ b/lib/src/video_renderer_extension.dart @@ -3,3 +3,7 @@ import 'package:flutter_webrtc/flutter_webrtc.dart'; extension VideoRendererExtension on RTCVideoRenderer { RTCVideoValue get videoValue => value; } + +abstract class AudioControl { + Future setVolume(double volume); +} \ No newline at end of file diff --git a/lib/src/web/rtc_video_renderer_impl.dart b/lib/src/web/rtc_video_renderer_impl.dart index 69df097e0c..b800123071 100644 --- a/lib/src/web/rtc_video_renderer_impl.dart +++ b/lib/src/web/rtc_video_renderer_impl.dart @@ -1,5 +1,6 @@ import 'dart:async'; import 'dart:js_interop'; +import 'dart:js_interop_unsafe'; import 'dart:ui_web' as web_ui; import 'package:flutter/foundation.dart'; @@ -7,6 +8,10 @@ import 'package:flutter/services.dart'; import 'package:dart_webrtc/dart_webrtc.dart'; import 'package:web/web.dart' as web; +import '../video_renderer_extension.dart' show AudioControl; + +const bool useHtmlElementView = + bool.fromEnvironment("WEBRTC_USE_HTML_ELEMENT_VIEW", defaultValue: false); // An error code value to error name Map. // See: https://developer.mozilla.org/en-US/docs/Web/API/MediaError/code @@ -32,7 +37,7 @@ const String _kDefaultErrorMessage = 'No further diagnostic information can be determined or provided.'; class RTCVideoRenderer extends ValueNotifier - implements VideoRenderer { + implements VideoRenderer, AudioControl { RTCVideoRenderer() : _textureId = _textureCounter++, super(RTCVideoValue.empty); @@ -59,6 +64,8 @@ class RTCVideoRenderer extends ValueNotifier bool _muted = false; + web.HTMLVideoElement? element; + set objectFit(String fit) { if (_objectFit == fit) return; _objectFit = fit; @@ -233,6 +240,9 @@ class RTCVideoRenderer extends ValueNotifier if (audioManager != null && !audioManager.hasChildNodes()) { audioManager.remove(); } + if (!useHtmlElementView) { + element?.remove(); + } return super.dispose(); } @@ -240,8 +250,11 @@ class RTCVideoRenderer extends ValueNotifier Future audioOutput(String deviceId) async { try { final element = _audioElement; - if (null != element) { - await element.setSinkId(deviceId).toDart; + if (null != element && + element.getProperty('setSinkId'.toJS).isDefinedAndNotNull) { + await (element.callMethod('setSinkId'.toJS, deviceId.toJS) as JSPromise) + .toDart; + return true; } } catch (e) { @@ -250,62 +263,71 @@ class RTCVideoRenderer extends ValueNotifier return false; } + web.HTMLVideoElement createElement() { + for (var s in _subscriptions) { + s.cancel(); + } + _subscriptions.clear(); + + final element = web.HTMLVideoElement() + ..autoplay = true + ..muted = true + ..controls = false + ..srcObject = _videoStream + ..id = _elementIdForVideo + ..setAttribute('playsinline', 'true'); + + _applyDefaultVideoStyles(element); + + _subscriptions.add( + element.onCanPlay.listen((dynamic _) { + _updateAllValues(element); + }), + ); + + _subscriptions.add( + element.onResize.listen((dynamic _) { + _updateAllValues(element); + onResize?.call(); + }), + ); + + // The error event fires when some form of error occurs while attempting to load or perform the media. + _subscriptions.add( + element.onError.listen((web.Event _) { + // The Event itself (_) doesn't contain info about the actual error. + // We need to look at the HTMLMediaElement.error. + // See: https://developer.mozilla.org/en-US/docs/Web/API/HTMLMediaElement/error + final error = element.error; + print('RTCVideoRenderer: videoElement.onError, ${error.toString()}'); + throw PlatformException( + code: _kErrorValueToErrorName[error!.code]!, + message: error.message != '' ? error.message : _kDefaultErrorMessage, + details: _kErrorValueToErrorDescription[error.code], + ); + }), + ); + + _subscriptions.add( + element.onEnded.listen((dynamic _) { + // print('RTCVideoRenderer: videoElement.onEnded'); + }), + ); + + return element; + } + @override Future initialize() async { - web_ui.platformViewRegistry.registerViewFactory(viewType, (int viewId) { - for (var s in _subscriptions) { - s.cancel(); - } - _subscriptions.clear(); - - final element = web.HTMLVideoElement() - ..autoplay = true - ..muted = true - ..controls = false - ..srcObject = _videoStream - ..id = _elementIdForVideo - ..setAttribute('playsinline', 'true'); - - _applyDefaultVideoStyles(element); - - _subscriptions.add( - element.onCanPlay.listen((dynamic _) { - _updateAllValues(element); - }), - ); - - _subscriptions.add( - element.onResize.listen((dynamic _) { - _updateAllValues(element); - onResize?.call(); - }), - ); - - // The error event fires when some form of error occurs while attempting to load or perform the media. - _subscriptions.add( - element.onError.listen((web.Event _) { - // The Event itself (_) doesn't contain info about the actual error. - // We need to look at the HTMLMediaElement.error. - // See: https://developer.mozilla.org/en-US/docs/Web/API/HTMLMediaElement/error - final error = element.error; - print('RTCVideoRenderer: videoElement.onError, ${error.toString()}'); - throw PlatformException( - code: _kErrorValueToErrorName[error!.code]!, - message: - error.message != '' ? error.message : _kDefaultErrorMessage, - details: _kErrorValueToErrorDescription[error.code], - ); - }), - ); - - _subscriptions.add( - element.onEnded.listen((dynamic _) { - // print('RTCVideoRenderer: videoElement.onEnded'); - }), - ); - - return element; - }); + bool isVisible = useHtmlElementView; + if (isVisible) { + web_ui.platformViewRegistry.registerViewFactory(viewType, (int viewId) { + return createElement(); + }, isVisible: isVisible); + } else { + final element = createElement(); + web.window.document.body!.appendChild(element); + } } void _applyDefaultVideoStyles(web.HTMLVideoElement element) { @@ -314,11 +336,17 @@ class RTCVideoRenderer extends ValueNotifier element.style.transform = 'scaleX(-1)'; } - element - ..style.objectFit = _objectFit - ..style.border = 'none' - ..style.width = '100%' - ..style.height = '100%'; + if (useHtmlElementView) { + element + ..style.objectFit = _objectFit + ..style.border = 'none' + ..style.width = '100%' + ..style.height = '100%'; + } else { + element.style.pointerEvents = "none"; + element.style.opacity = "0"; + element.style.position = "absolute"; + } } @override @@ -326,4 +354,9 @@ class RTCVideoRenderer extends ValueNotifier @override Function? onFirstFrameRendered; + + @override + Future setVolume(double volume) async { + _audioElement?.volume = volume.clamp(0.0, 1.0); + } } diff --git a/lib/src/web/rtc_video_view_impl.dart b/lib/src/web/rtc_video_view_impl.dart index 9ef8ff1461..fab26ca0b8 100644 --- a/lib/src/web/rtc_video_view_impl.dart +++ b/lib/src/web/rtc_video_view_impl.dart @@ -1,8 +1,13 @@ import 'dart:async'; +import 'dart:js_interop'; +import 'dart:js_interop_unsafe'; +import 'dart:ui' as ui; +import 'dart:ui_web' as ui_web; import 'package:flutter/material.dart'; import 'package:dart_webrtc/dart_webrtc.dart'; +import 'package:web/web.dart' as web; import 'package:webrtc_interface/webrtc_interface.dart'; import 'rtc_video_renderer_impl.dart'; @@ -41,17 +46,97 @@ class RTCVideoViewState extends State { widget.objectFit == RTCVideoViewObjectFit.RTCVideoViewObjectFitContain ? 'contain' : 'cover'; + + videoElement = + web.document.getElementById("video_${videoRenderer.viewType}") + as web.HTMLVideoElement?; + frameCallback(0.toJS, 0.toJS); } void _onRendererListener() { if (mounted) setState(() {}); } + int? callbackID; + + void getFrame(web.HTMLVideoElement element) { + callbackID = + element.requestVideoFrameCallbackWithFallback(frameCallback.toJS); + } + + void cancelFrame(web.HTMLVideoElement element) { + if (callbackID != null) { + element.cancelVideoFrameCallbackWithFallback(callbackID!); + } + } + + void frameCallback(JSAny now, JSAny metadata) { + final web.HTMLVideoElement? element = videoElement; + if (element != null) { + // only capture frames if video is playing (optimization for RAF) + if (element.readyState > 2) { + capture().then((_) async { + getFrame(element); + }); + } else { + getFrame(element); + } + } else { + if (mounted) { + Future.delayed(Duration(milliseconds: 100)).then((_) { + frameCallback(0.toJS, 0.toJS); + }); + } + } + } + + ui.Image? capturedFrame; + num? lastFrameTime; + Future capture() async { + final element = videoElement!; + if (lastFrameTime != element.currentTime) { + lastFrameTime = element.currentTime; + try { + final ui.Image img = await ui_web.createImageFromTextureSource(element, + width: element.videoWidth, + height: element.videoHeight, + transferOwnership: true); + + if (mounted) { + setState(() { + capturedFrame?.dispose(); + capturedFrame = img; + }); + } + } on web.DOMException catch (err) { + lastFrameTime = null; + if (err.name == 'InvalidStateError') { + // We don't have enough data yet, continue on + } else { + rethrow; + } + } + } + } + @override void dispose() { if (mounted) { super.dispose(); } + capturedFrame?.dispose(); + if (videoElement != null) { + cancelFrame(videoElement!); + } + } + + Size? size; + + void updateElement() { + if (videoElement != null && size != null) { + videoElement!.width = size!.width.toInt(); + videoElement!.height = size!.height.toInt(); + } } @override @@ -65,8 +150,40 @@ class RTCVideoViewState extends State { : 'cover'; } + web.HTMLVideoElement? videoElement; + Widget buildVideoElementView() { - return HtmlElementView(viewType: videoRenderer.viewType); + if (useHtmlElementView) { + return HtmlElementView(viewType: videoRenderer.viewType); + } else { + return LayoutBuilder(builder: (context, constraints) { + if (videoElement != null && size != constraints.biggest) { + size = constraints.biggest; + updateElement(); + } + + return Stack(children: [ + if (capturedFrame != null) + Positioned.fill( + child: FittedBox( + fit: switch (widget.objectFit) { + RTCVideoViewObjectFit.RTCVideoViewObjectFitContain => + BoxFit.contain, + RTCVideoViewObjectFit.RTCVideoViewObjectFitCover => + BoxFit.cover, + }, + child: SizedBox( + width: capturedFrame!.width.toDouble(), + height: capturedFrame!.height.toDouble(), + child: CustomPaint( + willChange: true, + painter: _ImageFlipPainter( + capturedFrame!, + widget.mirror, + ))))) + ]); + }); + } } @override @@ -86,3 +203,53 @@ class RTCVideoViewState extends State { ); } } + +typedef _VideoFrameRequestCallback = JSFunction; + +extension _HTMLVideoElementRequestAnimationFrame on web.HTMLVideoElement { + int requestVideoFrameCallbackWithFallback( + _VideoFrameRequestCallback callback) { + if (hasProperty('requestVideoFrameCallback'.toJS).toDart) { + return requestVideoFrameCallback(callback); + } else { + return web.window.requestAnimationFrame((double num) { + callback.callAsFunction(this, 0.toJS, 0.toJS); + }.toJS); + } + } + + void cancelVideoFrameCallbackWithFallback(int callbackID) { + if (hasProperty('requestVideoFrameCallback'.toJS).toDart) { + cancelVideoFrameCallback(callbackID); + } else { + web.window.cancelAnimationFrame(callbackID); + } + } + + external int requestVideoFrameCallback(_VideoFrameRequestCallback callback); + external void cancelVideoFrameCallback(int callbackID); +} + +class _ImageFlipPainter extends CustomPainter { + _ImageFlipPainter(this.image, this.flip); + + final ui.Image image; + final bool flip; + + @override + void paint(Canvas canvas, Size size) { + if (flip) { + canvas.scale(-1, 1); + canvas.drawImage(image, Offset(-size.width, 0), + Paint()..filterQuality = ui.FilterQuality.high); + } else { + canvas.drawImage( + image, Offset(0, 0), Paint()..filterQuality = ui.FilterQuality.high); + } + } + + @override + bool shouldRepaint(CustomPainter oldDelegate) { + return false; + } +} diff --git a/macos/flutter_webrtc.podspec b/macos/flutter_webrtc.podspec index 673097f0c7..ed7cedc3ba 100644 --- a/macos/flutter_webrtc.podspec +++ b/macos/flutter_webrtc.podspec @@ -3,7 +3,7 @@ # Pod::Spec.new do |s| s.name = 'flutter_webrtc' - s.version = '1.0.0' + s.version = '1.1.0' s.summary = 'Flutter WebRTC plugin for macOS.' s.description = <<-DESC A new flutter plugin project. @@ -15,6 +15,6 @@ A new flutter plugin project. s.source_files = ['Classes/**/*'] s.dependency 'FlutterMacOS' - s.dependency 'WebRTC-SDK', '137.7151.02' + s.dependency 'WebRTC-SDK', '137.7151.03' s.osx.deployment_target = '10.15' end diff --git a/pubspec.yaml b/pubspec.yaml index 3fa077509b..b04bf7ca1c 100644 --- a/pubspec.yaml +++ b/pubspec.yaml @@ -1,6 +1,6 @@ name: flutter_webrtc description: Flutter WebRTC plugin for iOS/Android/Desktop/Web, based on GoogleWebRTC. -version: 1.0.0 +version: 1.1.0 homepage: https://github.com/cloudwebrtc/flutter-webrtc environment: sdk: ">=3.3.0 <4.0.0" @@ -11,6 +11,7 @@ dependencies: dart_webrtc: ^1.5.3+hotfix.3 flutter: sdk: flutter + logger: ^2.0.2+1 path_provider: ^2.0.2 web: ^1.0.0 webrtc_interface: ^1.2.2+hotfix.2 diff --git a/third_party/CMakeLists.txt b/third_party/CMakeLists.txt index 5afd943325..203ecd25e1 100644 --- a/third_party/CMakeLists.txt +++ b/third_party/CMakeLists.txt @@ -1,7 +1,7 @@ include(ExternalProject) set(ZIPFILE "${CMAKE_CURRENT_LIST_DIR}/downloads/libwebrtc.zip") -set(DOWNLOAD_URL "https://github.com/flutter-webrtc/flutter-webrtc/releases/download/v1.0.0/libwebrtc.zip") +set(DOWNLOAD_URL "https://github.com/flutter-webrtc/flutter-webrtc/releases/download/v1.1.0/libwebrtc.zip") if(NOT EXISTS "${ZIPFILE}") message(NOTICE "download: ${DOWNLOAD_URL}")