it = decodeBufferList.iterator();
- while (it.hasNext()) {
- DecodeDataBuffer tmp = it.next();
- if (tmp.getUid() == uid) {
- byte[] buf = new byte[bufferLength];
- tmp.getByteBuffer().limit(bufferLength);
- tmp.getByteBuffer().get(buf);
- tmp.getByteBuffer().flip();
-
- observer.onRenderVideoFrame(uid, buf, videoFrameType, width, height, bufferLength, yStride, uStride, vStride, rotation, renderTimeMs);
-
- tmp.getByteBuffer().put(buf);
- tmp.getByteBuffer().flip();
-
- if (beRenderVideoShot) {
- if (uid == renderVideoShotUid) {
- beRenderVideoShot = false;
-
- getVideoSnapshot(width, height, rotation, bufferLength, buf, renderFilePath, yStride, uStride, vStride);
- }
+ ByteBuffer tmp = decodeBufferList.get(uid);
+ if (tmp != null) {
+ byte[] buf = new byte[bufferLength];
+ tmp.limit(bufferLength);
+ tmp.get(buf);
+ tmp.flip();
+
+ observer.onRenderVideoFrame(uid, buf, videoFrameType, width, height, bufferLength, yStride, uStride, vStride, rotation, renderTimeMs);
+
+ tmp.put(buf);
+ tmp.flip();
+
+ if (beRenderVideoShot) {
+ if (uid == renderVideoShotUid) {
+ beRenderVideoShot = false;
+
+ getVideoSnapshot(width, height, rotation, bufferLength, buf, renderFilePath, yStride, uStride, vStride);
}
}
}
@@ -237,7 +250,8 @@ private void getVideoSnapshot(int width, int height, int rotation, int bufferLen
byte[] bytes = baos.toByteArray();
try {
baos.close();
- } catch (IOException e) {
+ }
+ catch (IOException e) {
e.printStackTrace();
}
Bitmap bitmap = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
@@ -253,14 +267,16 @@ private void getVideoSnapshot(int width, int height, int rotation, int bufferLen
try {
file.createNewFile();
- } catch (IOException e) {
+ }
+ catch (IOException e) {
e.printStackTrace();
}
FileOutputStream fos = null;
try {
fos = new FileOutputStream(file);
- } catch (FileNotFoundException e) {
+ }
+ catch (FileNotFoundException e) {
e.printStackTrace();
}
@@ -271,7 +287,8 @@ private void getVideoSnapshot(int width, int height, int rotation, int bufferLen
try {
fos.close();
- } catch (IOException e) {
+ }
+ catch (IOException e) {
e.printStackTrace();
}
}
@@ -289,7 +306,6 @@ private void swapYU12toYUV420SemiPlanar(byte[] yu12bytes, byte[] i420bytes, int
public void releaseBuffer() {
byteBufferCapture.clear();
- byteBufferRender.clear();
byteBufferAudioRecord.clear();
byteBufferAudioPlay.clear();
byteBufferBeforeAudioMix.clear();
diff --git a/Android/APIExample/lib-raw-data/src/main/java/io/agora/advancedvideo/rawdata/MediaDataVideoObserver.java b/Android/APIExample/lib-raw-data/src/main/java/io/agora/advancedvideo/rawdata/MediaDataVideoObserver.java
index 0393dd206..9d41eedbf 100644
--- a/Android/APIExample/lib-raw-data/src/main/java/io/agora/advancedvideo/rawdata/MediaDataVideoObserver.java
+++ b/Android/APIExample/lib-raw-data/src/main/java/io/agora/advancedvideo/rawdata/MediaDataVideoObserver.java
@@ -7,4 +7,6 @@ public interface MediaDataVideoObserver {
void onCaptureVideoFrame(byte[] data, int frameType, int width, int height, int bufferLength, int yStride, int uStride, int vStride, int rotation, long renderTimeMs);
void onRenderVideoFrame(int uid, byte[] data, int frameType, int width, int height, int bufferLength, int yStride, int uStride, int vStride, int rotation, long renderTimeMs);
+
+ void onPreEncodeVideoFrame(byte[] data, int frameType, int width, int height, int bufferLength, int yStride, int uStride, int vStride, int rotation, long renderTimeMs);
}
diff --git a/Android/APIExample/lib-raw-data/src/main/java/io/agora/advancedvideo/rawdata/MediaPreProcessing.java b/Android/APIExample/lib-raw-data/src/main/java/io/agora/advancedvideo/rawdata/MediaPreProcessing.java
index 1940704b2..668e9cfa3 100644
--- a/Android/APIExample/lib-raw-data/src/main/java/io/agora/advancedvideo/rawdata/MediaPreProcessing.java
+++ b/Android/APIExample/lib-raw-data/src/main/java/io/agora/advancedvideo/rawdata/MediaPreProcessing.java
@@ -19,6 +19,15 @@ public interface ProgressCallback {
* use this parameter for the following purposes:*/
void onCaptureVideoFrame(int videoFrameType, int width, int height, int bufferLength, int yStride, int uStride, int vStride, int rotation, long renderTimeMs);
+ /**
+ * Occurs each time the SDK receives a video frame before encoding.
+ * @param videoFrameType include FRAME_TYPE_YUV420銆丗RAME_TYPE_YUV422銆丗RAME_TYPE_RGBA
+ * @param rotation the rotation of this frame before rendering the video. Supports 0, 90,
+ * 180, 270 degrees clockwise.
+ * @param renderTimeMs The timestamp of the external audio frame. It is mandatory. You can
+ * use this parameter for the following purposes:*/
+ void onPreEncodeVideoFrame(int videoFrameType, int width, int height, int bufferLength, int yStride, int uStride, int vStride, int rotation, long renderTimeMs);
+
/**Occurs each time the SDK receives a video frame captured by the local camera.
* @param uid ID of the remote user who sends the current video frame.*/
void onRenderVideoFrame(int uid, int videoFrameType, int width, int height, int bufferLength, int yStride, int uStride, int vStride, int rotation, long renderTimeMs);
diff --git a/Android/APIExample/lib-screensharing/build.gradle b/Android/APIExample/lib-screensharing/build.gradle
new file mode 100644
index 000000000..5f7ffb803
--- /dev/null
+++ b/Android/APIExample/lib-screensharing/build.gradle
@@ -0,0 +1,33 @@
+apply plugin: 'com.android.library'
+
+android {
+ compileSdkVersion 29
+ buildToolsVersion "29.0.2"
+
+ defaultConfig {
+ minSdkVersion 19
+ targetSdkVersion 29
+ versionCode 1
+ versionName "1.0"
+
+ testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
+
+ }
+
+ buildTypes {
+ release {
+ minifyEnabled false
+ proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
+ }
+ }
+
+}
+
+dependencies {
+ implementation fileTree(dir: 'libs', include: ['*.jar'])
+ implementation 'androidx.appcompat:appcompat:1.1.0'
+ testImplementation 'junit:junit:4.12'
+ androidTestImplementation 'com.android.support.test:runner:1.0.2'
+ androidTestImplementation 'com.android.support.test.espresso:espresso-core:3.0.2'
+ api project(path: ':lib-component')
+}
diff --git a/Android/APIExample/lib-screensharing/proguard-rules.pro b/Android/APIExample/lib-screensharing/proguard-rules.pro
new file mode 100644
index 000000000..f1b424510
--- /dev/null
+++ b/Android/APIExample/lib-screensharing/proguard-rules.pro
@@ -0,0 +1,21 @@
+# Add project specific ProGuard rules here.
+# You can control the set of applied configuration files using the
+# proguardFiles setting in build.gradle.
+#
+# For more details, see
+# http://developer.android.com/guide/developing/tools/proguard.html
+
+# If your project uses WebView with JS, uncomment the following
+# and specify the fully qualified class name to the JavaScript interface
+# class:
+#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
+# public *;
+#}
+
+# Uncomment this to preserve the line number information for
+# debugging stack traces.
+#-keepattributes SourceFile,LineNumberTable
+
+# If you keep the line number information, uncomment this to
+# hide the original source file name.
+#-renamesourcefileattribute SourceFile
diff --git a/Android/APIExample/lib-screensharing/src/main/AndroidManifest.xml b/Android/APIExample/lib-screensharing/src/main/AndroidManifest.xml
new file mode 100644
index 000000000..7f8c6347f
--- /dev/null
+++ b/Android/APIExample/lib-screensharing/src/main/AndroidManifest.xml
@@ -0,0 +1,26 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/Android/APIExample/lib-screensharing/src/main/aidl/io/agora/rtc/ss/aidl/INotification.aidl b/Android/APIExample/lib-screensharing/src/main/aidl/io/agora/rtc/ss/aidl/INotification.aidl
new file mode 100644
index 000000000..76cf4d111
--- /dev/null
+++ b/Android/APIExample/lib-screensharing/src/main/aidl/io/agora/rtc/ss/aidl/INotification.aidl
@@ -0,0 +1,9 @@
+// INotification.aidl
+package io.agora.rtc.ss.aidl;
+
+// Declare any non-default types here with import statements
+
+interface INotification {
+ void onError(int error);
+ void onTokenWillExpire();
+}
diff --git a/Android/APIExample/lib-screensharing/src/main/aidl/io/agora/rtc/ss/aidl/IScreenSharing.aidl b/Android/APIExample/lib-screensharing/src/main/aidl/io/agora/rtc/ss/aidl/IScreenSharing.aidl
new file mode 100644
index 000000000..d843e3ccf
--- /dev/null
+++ b/Android/APIExample/lib-screensharing/src/main/aidl/io/agora/rtc/ss/aidl/IScreenSharing.aidl
@@ -0,0 +1,14 @@
+// IScreenSharing.aidl
+package io.agora.rtc.ss.aidl;
+
+import io.agora.rtc.ss.aidl.INotification;
+
+// Declare any non-default types here with import statements
+
+interface IScreenSharing {
+ void registerCallback(INotification callback);
+ void unregisterCallback(INotification callback);
+ void startShare();
+ void stopShare();
+ void renewToken(String token);
+}
diff --git a/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/Constant.java b/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/Constant.java
new file mode 100644
index 000000000..236e7961b
--- /dev/null
+++ b/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/Constant.java
@@ -0,0 +1,13 @@
+package io.agora.rtc.ss;
+
+public class Constant {
+ public static final String CHANNEL_NAME = "channel";
+ public static final String UID = "uid";
+ public static final String WIDTH = "width";
+ public static final String HEIGHT = "height";
+ public static final String FRAME_RATE = "frame_rate";
+ public static final String BITRATE = "bit_rate";
+ public static final String ORIENTATION_MODE = "orientation_mode";
+ public static final String APP_ID = "app_id";
+ public static final String ACCESS_TOKEN = "access_token";
+}
diff --git a/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/ScreenSharingClient.java b/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/ScreenSharingClient.java
new file mode 100644
index 000000000..7e72062dd
--- /dev/null
+++ b/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/ScreenSharingClient.java
@@ -0,0 +1,141 @@
+package io.agora.rtc.ss;
+
+import android.annotation.TargetApi;
+import android.content.ComponentName;
+import android.content.Context;
+import android.content.Intent;
+import android.content.ServiceConnection;
+import android.os.IBinder;
+import android.os.RemoteException;
+import android.util.Log;
+
+import io.agora.rtc.ss.aidl.INotification;
+import io.agora.rtc.ss.aidl.IScreenSharing;
+import io.agora.rtc.ss.impl.ScreenSharingService;
+import io.agora.rtc.video.VideoEncoderConfiguration;
+
+public class ScreenSharingClient {
+ private static final String TAG = ScreenSharingClient.class.getSimpleName();
+ private static IScreenSharing mScreenShareSvc;
+ private IStateListener mStateListener;
+ private static volatile ScreenSharingClient mInstance;
+
+// private ScreenSharingClient() {
+// }
+
+ public static ScreenSharingClient getInstance() {
+ if (mInstance == null) {
+ synchronized (ScreenSharingClient.class) {
+ if (mInstance == null) {
+ mInstance = new ScreenSharingClient();
+ }
+ }
+ }
+
+ return mInstance;
+ }
+
+ private ServiceConnection mScreenShareConn = new ServiceConnection() {
+ public void onServiceConnected(ComponentName className, IBinder service) {
+ mScreenShareSvc = IScreenSharing.Stub.asInterface(service);
+
+ try {
+ mScreenShareSvc.registerCallback(mNotification);
+ mScreenShareSvc.startShare();
+ } catch (RemoteException e) {
+ e.printStackTrace();
+ Log.e(TAG, Log.getStackTraceString(e));
+ }
+
+ }
+
+ public void onServiceDisconnected(ComponentName className) {
+ mScreenShareSvc = null;
+ }
+ };
+
+ private INotification mNotification = new INotification.Stub() {
+ /**
+ * This is called by the remote service to tell us about error happened.
+ * Note that IPC calls are dispatched through a thread
+ * pool running in each process, so the code executing here will
+ * NOT be running in our main thread like most other things -- so,
+ * if to update the UI, we need to use a Handler to hop over there.
+ */
+ public void onError(int error) {
+ Log.e(TAG, "screen sharing service error happened: " + error);
+ mStateListener.onError(error);
+ }
+
+ public void onTokenWillExpire() {
+ Log.d(TAG, "access token for screen sharing service will expire soon");
+ mStateListener.onTokenWillExpire();
+ }
+ };
+
+ @TargetApi(21)
+ public void start(Context context, String appId, String token, String channelName, int uid, VideoEncoderConfiguration vec) {
+ if (mScreenShareSvc == null) {
+ Intent intent = new Intent(context, ScreenSharingService.class);
+ intent.putExtra(Constant.APP_ID, appId);
+ intent.putExtra(Constant.ACCESS_TOKEN, token);
+ intent.putExtra(Constant.CHANNEL_NAME, channelName);
+ intent.putExtra(Constant.UID, uid);
+ intent.putExtra(Constant.WIDTH, vec.dimensions.width);
+ intent.putExtra(Constant.HEIGHT, vec.dimensions.height);
+ intent.putExtra(Constant.FRAME_RATE, vec.frameRate);
+ intent.putExtra(Constant.BITRATE, vec.bitrate);
+ intent.putExtra(Constant.ORIENTATION_MODE, vec.orientationMode.getValue());
+ context.bindService(intent, mScreenShareConn, Context.BIND_AUTO_CREATE);
+ } else {
+ try {
+ mScreenShareSvc.startShare();
+ } catch (RemoteException e) {
+ e.printStackTrace();
+ Log.e(TAG, Log.getStackTraceString(e));
+ }
+ }
+
+ }
+
+ @TargetApi(21)
+ public void stop(Context context) {
+ if (mScreenShareSvc != null) {
+ try {
+ mScreenShareSvc.stopShare();
+ mScreenShareSvc.unregisterCallback(mNotification);
+ } catch (RemoteException e) {
+ e.printStackTrace();
+ Log.e(TAG, Log.getStackTraceString(e));
+ } finally {
+ mScreenShareSvc = null;
+ }
+ }
+ context.unbindService(mScreenShareConn);
+ }
+
+ @TargetApi(21)
+ public void renewToken(String token) {
+ if (mScreenShareSvc != null) {
+ try {
+ mScreenShareSvc.renewToken(token);
+ } catch (RemoteException e) {
+ e.printStackTrace();
+ Log.e(TAG, Log.getStackTraceString(e));
+ }
+ } else {
+ Log.e(TAG, "screen sharing service not exist");
+ }
+ }
+
+ @TargetApi(21)
+ public void setListener(IStateListener listener) {
+ mStateListener = listener;
+ }
+
+ public interface IStateListener {
+ void onError(int error);
+
+ void onTokenWillExpire();
+ }
+}
diff --git a/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/gles/AVFrameBase.java b/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/gles/AVFrameBase.java
new file mode 100644
index 000000000..748ad64b1
--- /dev/null
+++ b/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/gles/AVFrameBase.java
@@ -0,0 +1,9 @@
+package io.agora.rtc.ss.gles;
+
+public class AVFrameBase {
+ public long dts;
+ public long pts;
+
+ public AVFrameBase() {
+ }
+}
diff --git a/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/gles/EglCore.java b/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/gles/EglCore.java
new file mode 100644
index 000000000..31f9e6bb4
--- /dev/null
+++ b/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/gles/EglCore.java
@@ -0,0 +1,331 @@
+package io.agora.rtc.ss.gles;
+
+import android.annotation.TargetApi;
+import android.graphics.SurfaceTexture;
+import android.opengl.EGL14;
+import android.opengl.EGLConfig;
+import android.opengl.EGLContext;
+import android.opengl.EGLDisplay;
+import android.opengl.EGLExt;
+import android.opengl.EGLSurface;
+import android.os.Build;
+import android.util.Log;
+import android.view.Surface;
+
+@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
+public final class EglCore {
+ private static final String TAG = EglCore.class.getSimpleName();
+
+ /**
+ * Constructor flag: surface must be recordable. This discourages EGL from using a
+ * pixel format that cannot be converted efficiently to something usable by the video
+ * encoder.
+ */
+ public static final int FLAG_RECORDABLE = 0x01;
+
+ /**
+ * Constructor flag: ask for GLES3, fall back to GLES2 if not available. Without this
+ * flag, GLES2 is used.
+ */
+ public static final int FLAG_TRY_GLES3 = 0x02;
+
+ // Android-specific extension.
+ private static final int EGL_RECORDABLE_ANDROID = 0x3142;
+
+ private EGLDisplay mEGLDisplay = EGL14.EGL_NO_DISPLAY;
+ private EGLContext mEGLContext = EGL14.EGL_NO_CONTEXT;
+ private EGLConfig mEGLConfig = null;
+ private int mGlVersion = -1;
+
+ public EglCore() {
+ this(null, 0);
+ }
+
+ /**
+ * Prepares EGL display and context.
+ *
+ *
+ * @param sharedContext The context to share, or null if sharing is not desired.
+ * @param flags Configuration bit flags, e.g. FLAG_RECORDABLE.
+ */
+ public EglCore(EGLContext sharedContext, int flags) {
+ if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
+ throw new RuntimeException("EGL already set up");
+ }
+
+ if (sharedContext == null) {
+ sharedContext = EGL14.EGL_NO_CONTEXT;
+ }
+
+ mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
+ if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
+ throw new RuntimeException("unable to get EGL14 display");
+ }
+ int[] version = new int[2];
+ if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) {
+ mEGLDisplay = null;
+ throw new RuntimeException("unable to initialize EGL14");
+ }
+
+ // Try to get a GLES3 context, if requested.
+ if ((flags & FLAG_TRY_GLES3) != 0) {
+ //Log.OffscreenSurface(TAG, "Trying GLES 3");
+ EGLConfig config = getConfig(flags, 3);
+ if (config != null) {
+ int[] attrib3_list = {
+ EGL14.EGL_CONTEXT_CLIENT_VERSION, 3,
+ EGL14.EGL_NONE
+ };
+ EGLContext context = EGL14.eglCreateContext(mEGLDisplay, config, sharedContext,
+ attrib3_list, 0);
+
+ if (EGL14.eglGetError() == EGL14.EGL_SUCCESS) {
+ //Log.OffscreenSurface(TAG, "Got GLES 3 config");
+ mEGLConfig = config;
+ mEGLContext = context;
+ mGlVersion = 3;
+ }
+ }
+ }
+ if (mEGLContext == EGL14.EGL_NO_CONTEXT) { // GLES 2 only, or GLES 3 attempt failed
+ //Log.OffscreenSurface(TAG, "Trying GLES 2");
+ EGLConfig config = getConfig(flags, 2);
+ if (config == null) {
+ throw new RuntimeException("Unable to find a suitable EGLConfig");
+ }
+ int[] attrib2_list = {
+ EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
+ EGL14.EGL_NONE
+ };
+ EGLContext context = EGL14.eglCreateContext(mEGLDisplay, config, sharedContext,
+ attrib2_list, 0);
+ checkEglError("eglCreateContext");
+ mEGLConfig = config;
+ mEGLContext = context;
+ mGlVersion = 2;
+ }
+
+ // Confirm with query.
+ int[] values = new int[1];
+ EGL14.eglQueryContext(mEGLDisplay, mEGLContext, EGL14.EGL_CONTEXT_CLIENT_VERSION,
+ values, 0);
+ Log.d(TAG, "EGLContext created, client version " + values[0]);
+ }
+
+ /**
+ * Finds a suitable EGLConfig.
+ *
+ * @param flags Bit flags from constructor.
+ * @param version Must be 2 or 3.
+ */
+ private EGLConfig getConfig(int flags, int version) {
+ int renderableType = EGL14.EGL_OPENGL_ES2_BIT;
+ if (version >= 3) {
+ renderableType |= EGLExt.EGL_OPENGL_ES3_BIT_KHR;
+ }
+
+ // The actual surface is generally RGBA or RGBX, so situationally omitting alpha
+ // doesn't really help. It can also lead to a huge performance hit on glReadPixels()
+ // when reading into a GL_RGBA buffer.
+ int[] attribList = {
+ EGL14.EGL_RED_SIZE, 8,
+ EGL14.EGL_GREEN_SIZE, 8,
+ EGL14.EGL_BLUE_SIZE, 8,
+ EGL14.EGL_ALPHA_SIZE, 8,
+ //EGL14.EGL_DEPTH_SIZE, 16,
+ //EGL14.EGL_STENCIL_SIZE, 8,
+ EGL14.EGL_RENDERABLE_TYPE, renderableType,
+ EGL14.EGL_NONE, 0, // placeholder for recordable [@-3]
+ EGL14.EGL_NONE
+ };
+ if ((flags & FLAG_RECORDABLE) != 0) {
+ attribList[attribList.length - 3] = EGL_RECORDABLE_ANDROID;
+ attribList[attribList.length - 2] = 1;
+ }
+ EGLConfig[] configs = new EGLConfig[1];
+ int[] numConfigs = new int[1];
+ if (!EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length,
+ numConfigs, 0)) {
+ Log.w(TAG, "unable to find RGB8888 / " + version + " EGLConfig");
+ return null;
+ }
+ return configs[0];
+ }
+
+ /**
+ * Discards all resources held by this class, notably the EGL context. This must be
+ * called from the thread where the context was created.
+ *
+ * On completion, no context will be current.
+ */
+ public void release() {
+ if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
+ // Android is unusual in that it uses a reference-counted EGLDisplay. So for
+ // every eglInitialize() we need an eglTerminate().
+ EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE,
+ EGL14.EGL_NO_CONTEXT);
+ EGL14.eglDestroyContext(mEGLDisplay, mEGLContext);
+ EGL14.eglReleaseThread();
+ EGL14.eglTerminate(mEGLDisplay);
+ }
+
+ mEGLDisplay = EGL14.EGL_NO_DISPLAY;
+ mEGLContext = EGL14.EGL_NO_CONTEXT;
+ mEGLConfig = null;
+ }
+
+ @Override
+ protected void finalize() throws Throwable {
+ try {
+ if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
+ // We're limited here -- finalizers don't run on the thread that holds
+ // the EGL state, so if a surface or context is still current on another
+ // thread we can't fully release it here. Exceptions thrown from here
+ // are quietly discarded. Complain in the log file.
+ Log.w(TAG, "WARNING: EglCore was not explicitly released -- state may be leaked");
+ release();
+ }
+ } finally {
+ super.finalize();
+ }
+ }
+
+ /**
+ * Destroys the specified surface. Note the EGLSurface won't actually be destroyed if it's
+ * still current in a context.
+ */
+ public void releaseSurface(EGLSurface eglSurface) {
+ EGL14.eglDestroySurface(mEGLDisplay, eglSurface);
+ }
+
+ /**
+ * Creates an EGL surface associated with a Surface.
+ *
+ * If this is destined for MediaCodec, the EGLConfig should have the "recordable" attribute.
+ */
+ public EGLSurface createWindowSurface(Object surface) {
+ if (!(surface instanceof Surface) && !(surface instanceof SurfaceTexture)) {
+ throw new RuntimeException("invalid surface: " + surface);
+ }
+
+ // Create a window surface, and attach it to the Surface we received.
+ int[] surfaceAttribs = {
+ EGL14.EGL_NONE
+ };
+ EGLSurface eglSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, mEGLConfig, surface,
+ surfaceAttribs, 0);
+ checkEglError("eglCreateWindowSurface");
+ if (eglSurface == null) {
+ throw new RuntimeException("surface was null");
+ }
+ return eglSurface;
+ }
+
+ /**
+ * Creates an EGL surface associated with an offscreen buffer.
+ */
+ public EGLSurface createOffscreenSurface(int width, int height) {
+ int[] surfaceAttribs = {
+ EGL14.EGL_WIDTH, width,
+ EGL14.EGL_HEIGHT, height,
+ EGL14.EGL_NONE
+ };
+ EGLSurface eglSurface = EGL14.eglCreatePbufferSurface(mEGLDisplay, mEGLConfig,
+ surfaceAttribs, 0);
+ checkEglError("eglCreatePbufferSurface");
+ if (eglSurface == null) {
+ throw new RuntimeException("surface was null");
+ }
+ return eglSurface;
+ }
+
+ /**
+ * Makes our EGL context current, using the supplied surface for both "draw" and "read".
+ */
+ public void makeCurrent(EGLSurface eglSurface) {
+ if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
+ // called makeCurrent() before create?
+ Log.d(TAG, "NOTE: makeCurrent w/o display");
+ }
+ if (!EGL14.eglMakeCurrent(mEGLDisplay, eglSurface, eglSurface, mEGLContext)) {
+ throw new RuntimeException("eglMakeCurrent failed");
+ }
+ }
+
+ /**
+ * Makes our EGL context current, using the supplied "draw" and "read" surfaces.
+ */
+ public void makeCurrent(EGLSurface drawSurface, EGLSurface readSurface) {
+ if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
+ // called makeCurrent() before create?
+ Log.d(TAG, "NOTE: makeCurrent w/o display");
+ }
+ if (!EGL14.eglMakeCurrent(mEGLDisplay, drawSurface, readSurface, mEGLContext)) {
+ throw new RuntimeException("eglMakeCurrent(draw,read) failed");
+ }
+ }
+
+ /**
+ * Makes no context current.
+ */
+ public void makeNothingCurrent() {
+ if (!EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE,
+ EGL14.EGL_NO_CONTEXT)) {
+ throw new RuntimeException("eglMakeCurrent failed");
+ }
+ }
+
+ /**
+ * Calls eglSwapBuffers. Use this to "publish" the current frame.
+ *
+ * @return false on failure
+ */
+ public boolean swapBuffers(EGLSurface eglSurface) {
+ return EGL14.eglSwapBuffers(mEGLDisplay, eglSurface);
+ }
+
+ /**
+ * Sends the presentation time stamp to EGL. Time is expressed in nanoseconds.
+ */
+ public void setPresentationTime(EGLSurface eglSurface, long nsecs) {
+ EGLExt.eglPresentationTimeANDROID(mEGLDisplay, eglSurface, nsecs);
+ }
+
+ /**
+ * Returns true if our context and the specified surface are current.
+ */
+ public boolean isCurrent(EGLSurface eglSurface) {
+ return mEGLContext.equals(EGL14.eglGetCurrentContext()) &&
+ eglSurface.equals(EGL14.eglGetCurrentSurface(EGL14.EGL_DRAW));
+ }
+
+ /**
+ * Performs a simple surface query.
+ */
+ public int querySurface(EGLSurface eglSurface, int what) {
+ int[] value = new int[1];
+ EGL14.eglQuerySurface(mEGLDisplay, eglSurface, what, value, 0);
+ return value[0];
+ }
+
+ /**
+ * Returns the GLES version this context is configured for (currently 2 or 3).
+ */
+ public int getGlVersion() {
+ return mGlVersion;
+ }
+
+ public static void logCurrent(String msg) {
+ EGLDisplay display = EGL14.eglGetCurrentDisplay();
+ EGLContext context = EGL14.eglGetCurrentContext();
+ EGLSurface surface = EGL14.eglGetCurrentSurface(EGL14.EGL_DRAW);
+ Log.i("EglCore", "Current EGL (" + msg + "): display=" + display + ", context=" + context + ", surface=" + surface);
+ }
+
+ private void checkEglError(String msg) {
+ int error;
+ if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
+ throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error));
+ }
+ }
+}
diff --git a/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/gles/EglSurfaceBase.java b/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/gles/EglSurfaceBase.java
new file mode 100644
index 000000000..dc9c2e144
--- /dev/null
+++ b/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/gles/EglSurfaceBase.java
@@ -0,0 +1,179 @@
+package io.agora.rtc.ss.gles;
+
+import android.annotation.TargetApi;
+import android.graphics.Bitmap;
+import android.opengl.EGL14;
+import android.opengl.EGLSurface;
+import android.opengl.GLES20;
+import android.os.Build;
+import android.util.Log;
+
+import java.io.BufferedOutputStream;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+
+/**
+ * Common base class for EGL surfaces.
+ *
+ * There can be multiple surfaces associated with a single context.
+ */
+@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
+public class EglSurfaceBase {
+ private static final String TAG = "EglSurfaceBase";
+
+ // EglCore object we're associated with. It may be associated with multiple surfaces.
+ protected EglCore mEglCore;
+
+ private EGLSurface mEGLSurface = EGL14.EGL_NO_SURFACE;
+ private int mWidth = -1;
+ private int mHeight = -1;
+
+ protected EglSurfaceBase(EglCore eglCore) {
+ mEglCore = eglCore;
+ }
+
+ public void createWindowSurface(Object surface) {
+ if (mEGLSurface != EGL14.EGL_NO_SURFACE) {
+ throw new IllegalStateException("surface already created");
+ }
+ mEGLSurface = mEglCore.createWindowSurface(surface);
+
+ // Don't cache width/height here, because the size of the underlying surface can change
+ // out from under us (see e.g. HardwareScalerActivity).
+ // mWidth = mEglCore.querySurface(mEGLSurface, EGL14.EGL_WIDTH);
+ // mHeight = mEglCore.querySurface(mEGLSurface, EGL14.EGL_HEIGHT);
+ }
+
+ /**
+ * Creates an off-screen surface.
+ */
+ public void createOffscreenSurface(int width, int height) {
+ if (mEGLSurface != EGL14.EGL_NO_SURFACE) {
+ throw new IllegalStateException("surface already created");
+ }
+ mEGLSurface = mEglCore.createOffscreenSurface(width, height);
+ mWidth = width;
+ mHeight = height;
+ }
+
+ /**
+ * Returns the surface's width, in pixels.
+ *
+ * If this is called on a window surface, and the underlying surface is in the process
+ * of changing size, we may not see the new size right away (e.g. in the "surfaceChanged"
+ * callback). The size should match after the next buffer swap.
+ */
+ public int getWidth() {
+ if (mWidth < 0) {
+ return mEglCore.querySurface(mEGLSurface, EGL14.EGL_WIDTH);
+ } else {
+ return mWidth;
+ }
+ }
+
+ /**
+ * Returns the surface's height, in pixels.
+ */
+ public int getHeight() {
+ if (mHeight < 0) {
+ return mEglCore.querySurface(mEGLSurface, EGL14.EGL_HEIGHT);
+ } else {
+ return mHeight;
+ }
+ }
+
+ /**
+ * Release the EGL surface.
+ */
+ public void releaseEglSurface() {
+ mEglCore.releaseSurface(mEGLSurface);
+ mEGLSurface = EGL14.EGL_NO_SURFACE;
+ mWidth = mHeight = -1;
+ }
+
+ /**
+ * Makes our EGL context and surface current.
+ */
+ public void makeCurrent() {
+ mEglCore.makeCurrent(mEGLSurface);
+ }
+
+ /**
+ * Makes our EGL context and surface current for drawing, using the supplied surface
+ * for reading.
+ */
+ public void makeCurrentReadFrom(EglSurfaceBase readSurface) {
+ mEglCore.makeCurrent(mEGLSurface, readSurface.mEGLSurface);
+ }
+
+ /**
+ * Calls eglSwapBuffers. Use this to "publish" the current frame.
+ *
+ * @return false on failure
+ */
+ public boolean swapBuffers() {
+ boolean result = mEglCore.swapBuffers(mEGLSurface);
+ if (!result) {
+ Log.d(TAG, "WARNING: swapBuffers() failed");
+ }
+ return result;
+ }
+
+ /**
+ * Sends the presentation time stamp to EGL.
+ *
+ * @param nsecs Timestamp, in nanoseconds.
+ */
+ public void setPresentationTime(long nsecs) {
+ mEglCore.setPresentationTime(mEGLSurface, nsecs);
+ }
+
+ /**
+ * Saves the EGL surface to a file.
+ *
+ * Expects that this object's EGL surface is current.
+ */
+ public void saveFrame(File file) throws IOException {
+ if (!mEglCore.isCurrent(mEGLSurface)) {
+ throw new RuntimeException("Expected EGL context/surface is not current");
+ }
+
+ // glReadPixels fills in a "direct" ByteBuffer with what is essentially big-endian RGBA
+ // data (i.e. a byte of red, followed by a byte of green...). While the Bitmap
+ // constructor that takes an int[] wants little-endian ARGB (blue/red swapped), the
+ // Bitmap "copy pixels" method wants the same format GL provides.
+ //
+ // Ideally we'OffscreenSurface have some way to re-use the ByteBuffer, especially if we're calling
+ // here often.
+ //
+ // Making this even more interesting is the upside-down nature of GL, which means
+ // our output will look upside down relative to what appears on screen if the
+ // typical GL conventions are used.
+
+ String filename = file.toString();
+
+ int width = getWidth();
+ int height = getHeight();
+ ByteBuffer buf = ByteBuffer.allocateDirect(width * height * 4);
+ buf.order(ByteOrder.LITTLE_ENDIAN);
+ GLES20.glReadPixels(0, 0, width, height,
+ GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf);
+ GlUtil.checkGlError("glReadPixels");
+ buf.rewind();
+
+ BufferedOutputStream bos = null;
+ try {
+ bos = new BufferedOutputStream(new FileOutputStream(filename));
+ Bitmap bmp = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
+ bmp.copyPixelsFromBuffer(buf);
+ bmp.compress(Bitmap.CompressFormat.PNG, 90, bos);
+ bmp.recycle();
+ } finally {
+ if (bos != null) bos.close();
+ }
+ Log.d(TAG, "Saved " + width + "x" + height + " frame as '" + filename + "'");
+ }
+}
\ No newline at end of file
diff --git a/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/gles/GLRender.java b/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/gles/GLRender.java
new file mode 100644
index 000000000..f171329c6
--- /dev/null
+++ b/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/gles/GLRender.java
@@ -0,0 +1,429 @@
+package io.agora.rtc.ss.gles;
+
+import android.graphics.Bitmap;
+import android.graphics.SurfaceTexture;
+import android.opengl.EGL14;
+import android.opengl.EGLContext;
+import android.opengl.GLES20;
+import android.opengl.GLSurfaceView;
+import android.os.Build;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.os.Message;
+import android.util.Log;
+import android.view.TextureView;
+
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.opengles.GL10;
+
+public class GLRender {
+ private static final String TAG = "GLRender";
+ private static final boolean DEBUG_ENABLED = true;
+
+ public static final int STATE_IDLE = 0;
+ public static final int STATE_READY = 1;
+ public static final int STATE_RELEASED = 2;
+
+ private static final int MSG_TYPE_SURFACE_CREATED = 0;
+ private static final int MSG_TYPE_SURFACE_CHANGED = 1;
+ private static final int MSG_TYPE_DRAW_FRAME = 2;
+ private static final int MSG_TYPE_QUIT = 3;
+
+ private HandlerThread mGLHandlerThread;
+ private Handler mGLHandler;
+
+ private TextureView mTextureView;
+ private EglCore mEglCore;
+ private WindowSurface mWindowSurface;
+ private EGLContext mEGLContext;
+ private GLSurfaceView mGLSurfaceView;
+
+ private AtomicInteger mState;
+ private long mThreadId;
+
+ private LinkedList mGLRenderListenerList;
+ private final Object mRenderListenerLock = new Object();
+
+ private LinkedList mEventTaskList;
+ private final Object mEventLock = new Object();
+
+ private LinkedList mGLDrawTaskList;
+ private final Object mDrawLock = new Object();
+
+ private Runnable runnableDrawFrame = new Runnable() {
+ public void run() {
+ doDrawFrame();
+ }
+ };
+
+ private GLSurfaceView.Renderer mGLRenderer = new GLSurfaceView.Renderer() {
+ public void onSurfaceCreated(GL10 gl, EGLConfig config) {
+ surfaceCreated(true);
+ }
+
+ public void onSurfaceChanged(GL10 gl, int width, int height) {
+ surfaceChanged(width, height);
+ }
+
+ public void onDrawFrame(GL10 gl) {
+ drawFrame();
+ }
+ };
+
+ private TextureView.SurfaceTextureListener mTextureListener = new TextureView.SurfaceTextureListener() {
+ public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
+ Log.d(TAG, "onSurfaceTextureAvailable " + surface + " " + width + " " + height);
+ initHandlerThread();
+
+ Message msg = Message.obtain(mGLHandler, MSG_TYPE_SURFACE_CREATED, surface);
+ mGLHandler.sendMessage(msg);
+ msg = Message.obtain(mGLHandler, MSG_TYPE_SURFACE_CHANGED, width, height);
+ mGLHandler.sendMessage(msg);
+ }
+
+ public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
+ Log.d(TAG, "onSurfaceTextureSizeChanged " + surface + " " + width + " " + height);
+ Message msg = Message.obtain(mGLHandler, MSG_TYPE_SURFACE_CHANGED, width, height);
+ mGLHandler.sendMessage(msg);
+ }
+
+ public boolean onSurfaceTextureDestroyed(SurfaceTexture st) {
+ Log.d(TAG, "onSurfaceTextureDestroyed " + st);
+ quit(st);
+ return false;
+ }
+
+ public void onSurfaceTextureUpdated(SurfaceTexture st) {
+ }
+ };
+
+ public GLRender() {
+ doInit(EGL14.EGL_NO_CONTEXT);
+ }
+
+ public GLRender(EGLContext ctx) {
+ doInit(ctx);
+ }
+
+ private void doInit(EGLContext ctx) {
+ mState = new AtomicInteger(STATE_RELEASED);
+ mGLRenderListenerList = new LinkedList<>();
+ mEventTaskList = new LinkedList<>();
+ mGLDrawTaskList = new LinkedList<>();
+ mEGLContext = ctx;
+ }
+
+ public void init(int width, int height) {
+ mState.set(STATE_IDLE);
+ initHandlerThread();
+
+ Message msg = Message.obtain(mGLHandler, MSG_TYPE_SURFACE_CREATED, width, height);
+ mGLHandler.sendMessage(msg);
+
+ msg = Message.obtain(mGLHandler, MSG_TYPE_SURFACE_CHANGED, width, height);
+ mGLHandler.sendMessage(msg);
+ }
+
+ public void update(int width, int height) {
+ Message msg = Message.obtain(mGLHandler, MSG_TYPE_SURFACE_CHANGED, width, height);
+ mGLHandler.sendMessage(msg);
+ }
+
+ public void init(GLSurfaceView sv) {
+ mState.set(STATE_IDLE);
+ sv.setEGLContextClientVersion(2); // GLES 2.0
+ sv.setRenderer(mGLRenderer);
+ sv.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
+ mGLSurfaceView = sv;
+ }
+
+ public void init(TextureView tv) {
+ mState.set(STATE_IDLE);
+ tv.setSurfaceTextureListener(mTextureListener);
+ mTextureView = tv;
+ }
+
+ public void addListener(GLRender.GLRenderListener listener) {
+ synchronized (mRenderListenerLock) {
+ if (!mGLRenderListenerList.contains(listener)) {
+ mGLRenderListenerList.add(listener);
+ }
+ }
+ }
+
+ public void removeListener(GLRenderListener listener) {
+ synchronized (mRenderListenerLock) {
+ mGLRenderListenerList.remove(listener);
+ }
+ }
+
+ public int getState() {
+ return mState.get();
+ }
+
+ public EGLContext getEGLContext() {
+ return mEGLContext;
+ }
+
+ public boolean isGLRenderThread() {
+ return mThreadId == Thread.currentThread().getId();
+ }
+
+ public void onPause() {
+ if (mGLSurfaceView != null) {
+ mState.set(STATE_RELEASED);
+ mGLSurfaceView.queueEvent(new Runnable() {
+ public void run() {
+ quit();
+ }
+ });
+ mGLSurfaceView.onPause();
+ }
+ }
+
+ public void onResume() {
+ if (mState.get() == STATE_RELEASED) {
+ mState.set(STATE_IDLE);
+ }
+
+ if (mGLSurfaceView != null) {
+ mGLSurfaceView.onResume();
+ }
+ }
+
+ public void requestRender() {
+ if (mGLSurfaceView != null) {
+ mGLSurfaceView.requestRender();
+ }
+
+ if (mGLHandler != null) {
+ mGLHandler.sendEmptyMessage(MSG_TYPE_DRAW_FRAME);
+ }
+ }
+
+ public void queueEvent(Runnable runnable) {
+ if (mState.get() == STATE_IDLE) {
+ Log.d(TAG, "glContext not ready, queue event: " + runnable);
+ synchronized (mEventLock) {
+ mEventTaskList.add(runnable);
+ }
+ } else if (mState.get() == STATE_READY) {
+ if (mGLSurfaceView != null) {
+ mGLSurfaceView.queueEvent(runnable);
+ mGLSurfaceView.queueEvent(runnableDrawFrame);
+ } else if (mGLHandler != null) {
+ mGLHandler.post(runnable);
+ mGLHandler.post(runnableDrawFrame);
+ }
+ } else {
+ Log.d(TAG, "glContext lost, drop event: " + runnable);
+ }
+ }
+
+ public void queueDrawFrameAppends(Runnable runnable) {
+ if (mState.get() == STATE_READY) {
+ synchronized (mDrawLock) {
+ mGLDrawTaskList.add(runnable);
+ }
+ }
+ }
+
+ public void quit() {
+ if (mTextureView == null && mGLSurfaceView == null && mGLHandlerThread != null) {
+ mState.set(STATE_RELEASED);
+ quit(null);
+ }
+ }
+
+ private void surfaceCreated(boolean reInitCtx) {
+ mState.set(STATE_READY);
+ mThreadId = Thread.currentThread().getId();
+
+ GLES20.glEnable(GLES20.GL_BLEND);
+ GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA);
+
+ if (reInitCtx && Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
+ mEGLContext = EGL14.eglGetCurrentContext();
+ }
+
+ synchronized (mRenderListenerLock) {
+ Iterator it = mGLRenderListenerList.iterator();
+
+ while (it.hasNext()) {
+ GLRender.GLRenderListener listener = it.next();
+ listener.onReady();
+ }
+ }
+ }
+
+ private void surfaceChanged(int width, int height) {
+ GLES20.glViewport(0, 0, width, height);
+
+ synchronized (mRenderListenerLock) {
+ Iterator it = mGLRenderListenerList.iterator();
+
+ while (it.hasNext()) {
+ GLRender.GLRenderListener listener = it.next();
+ listener.onSizeChanged(width, height);
+ }
+ }
+ }
+
+ private void drawFrame() {
+ Iterator> it;
+ synchronized (mEventLock) {
+ it = mEventTaskList.iterator();
+
+ while (true) {
+ if (!it.hasNext()) {
+ mEventTaskList.clear();
+ break;
+ }
+
+ Runnable runnable = (Runnable) it.next();
+ runnable.run();
+ }
+ }
+
+ synchronized (mRenderListenerLock) {
+ it = mGLRenderListenerList.iterator();
+
+ while (true) {
+ if (!it.hasNext()) {
+ break;
+ }
+
+ GLRender.GLRenderListener listener = (GLRender.GLRenderListener) it.next();
+ listener.onDrawFrame();
+ }
+ }
+
+ doDrawFrame();
+ }
+
+ private void release() {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
+ mEGLContext = EGL14.EGL_NO_CONTEXT;
+ }
+
+ mState.set(STATE_RELEASED);
+ synchronized (mRenderListenerLock) {
+ Iterator it = mGLRenderListenerList.iterator();
+
+ while (it.hasNext()) {
+ GLRenderListener listener = it.next();
+ listener.onReleased();
+ }
+ }
+ }
+
+ private void doDrawFrame() {
+ while (true) {
+ Runnable runnable;
+ synchronized (mDrawLock) {
+ if (mGLDrawTaskList.isEmpty()) {
+ return;
+ }
+
+ runnable = mGLDrawTaskList.getFirst();
+ mGLDrawTaskList.removeFirst();
+ }
+
+ runnable.run();
+ }
+ }
+
+ private void prepareGlSurface(SurfaceTexture st, int width, int height) {
+ mEglCore = new EglCore(mEGLContext, 0);
+
+ if (st != null) {
+ mWindowSurface = new WindowSurface(mEglCore, st);
+ } else {
+ mWindowSurface = new WindowSurface(mEglCore, width, height);
+ }
+
+ mWindowSurface.makeCurrent();
+ GLES20.glViewport(0, 0, mWindowSurface.getWidth(), mWindowSurface.getHeight());
+ }
+
+ private void releaseGlSurface(SurfaceTexture st) {
+ if (st != null) {
+ st.release();
+ }
+
+ if (mWindowSurface != null) {
+ mWindowSurface.release();
+ mWindowSurface = null;
+ }
+
+ if (mEglCore != null) {
+ mEglCore.release();
+ mEglCore = null;
+ }
+ }
+
+ private void initHandlerThread() {
+ if (mGLHandlerThread == null) {
+ mGLHandlerThread = new HandlerThread("MyGLThread");
+ mGLHandlerThread.start();
+ mGLHandler = new Handler(mGLHandlerThread.getLooper(), new Handler.Callback() {
+ public boolean handleMessage(Message msg) {
+ switch (msg.what) {
+ case MSG_TYPE_SURFACE_CREATED:
+ prepareGlSurface((SurfaceTexture) msg.obj, msg.arg1, msg.arg2);
+ surfaceCreated(true);
+ break;
+ case MSG_TYPE_SURFACE_CHANGED:
+ surfaceChanged(msg.arg1, msg.arg2);
+ break;
+ case MSG_TYPE_DRAW_FRAME:
+ drawFrame();
+ mWindowSurface.swapBuffers();
+ break;
+ case MSG_TYPE_QUIT:
+ release();
+ releaseGlSurface((SurfaceTexture) msg.obj);
+ mGLHandlerThread.quit();
+ }
+
+ return true;
+ }
+ });
+ }
+ }
+
+ private void quit(SurfaceTexture st) {
+ if (mGLHandlerThread != null) {
+ mGLHandler.removeCallbacksAndMessages(null);
+ Message msg = Message.obtain(mGLHandler, MSG_TYPE_QUIT, st);
+ mGLHandler.sendMessage(msg);
+
+ try {
+ mGLHandlerThread.join();
+ } catch (InterruptedException e) {
+ Log.d(TAG, "quit " + Log.getStackTraceString(e));
+ } finally {
+ mGLHandlerThread = null;
+ mGLHandler = null;
+ }
+ }
+ }
+
+ public interface ScreenshotListener {
+ void onBitmapAvailable(Bitmap screenshot);
+ }
+
+ public interface GLRenderListener {
+ void onReady();
+
+ void onSizeChanged(int width, int height);
+
+ void onDrawFrame();
+
+ void onReleased();
+ }
+}
diff --git a/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/gles/GlUtil.java b/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/gles/GlUtil.java
new file mode 100644
index 000000000..42bac06b3
--- /dev/null
+++ b/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/gles/GlUtil.java
@@ -0,0 +1,35 @@
+package io.agora.rtc.ss.gles;
+
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+import android.util.Log;
+
+public class GlUtil {
+ private static final String TAG = "GlUtil";
+
+ public static int createOESTextureObject() {
+ int[] textures = new int[1];
+ GLES20.glGenTextures(1, textures, 0);
+ checkGlError("glGenTextures");
+
+ int textureId = textures[0];
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId);
+ checkGlError("glBindTexture " + textureId);
+
+ GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
+ GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
+ GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
+ GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
+ checkGlError("glTexParameter");
+ return textureId;
+ }
+
+ public static void checkGlError(String tag) {
+ int error = GLES20.glGetError();
+ if (error != GLES20.GL_NO_ERROR) {
+ String msg = tag + ": glError 0x" + Integer.toHexString(error);
+ Log.e(TAG, msg);
+ throw new RuntimeException(msg);
+ }
+ }
+}
diff --git a/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/gles/ImgTexFormat.java b/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/gles/ImgTexFormat.java
new file mode 100644
index 000000000..cd36fa30a
--- /dev/null
+++ b/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/gles/ImgTexFormat.java
@@ -0,0 +1,24 @@
+package io.agora.rtc.ss.gles;
+
+public class ImgTexFormat {
+ public static final int COLOR_FORMAT_EXTERNAL_OES = 3;
+
+ public final int mColorFormat;
+ public final int mWidth;
+ public final int mHeight;
+
+ public ImgTexFormat(int cf, int width, int height) {
+ this.mColorFormat = cf;
+ this.mWidth = width;
+ this.mHeight = height;
+ }
+
+ @Override
+ public String toString() {
+ return "ImgTexFormat{" +
+ "mColorFormat=" + mColorFormat +
+ ", mWidth=" + mWidth +
+ ", mHeight=" + mHeight +
+ '}';
+ }
+}
diff --git a/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/gles/ImgTexFrame.java b/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/gles/ImgTexFrame.java
new file mode 100644
index 000000000..3f7950446
--- /dev/null
+++ b/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/gles/ImgTexFrame.java
@@ -0,0 +1,36 @@
+package io.agora.rtc.ss.gles;
+
+import android.opengl.Matrix;
+
+import java.util.Arrays;
+
+public class ImgTexFrame extends AVFrameBase {
+ public static final int NO_TEXTURE = -1;
+ public static final float[] DEFAULT_MATRIX = new float[16];
+ public ImgTexFormat mFormat;
+ public int mTextureId = NO_TEXTURE;
+ public final float[] mTexMatrix;
+
+ public ImgTexFrame(ImgTexFormat format, int textureId, float[] matrix, long ts) {
+ this.mFormat = format;
+ this.mTextureId = textureId;
+ this.pts = ts;
+ this.dts = ts;
+
+ if (matrix != null && matrix.length == 16) {
+ this.mTexMatrix = matrix;
+ } else {
+ this.mTexMatrix = DEFAULT_MATRIX;
+ Matrix.setIdentityM(this.mTexMatrix, 0);
+ }
+ }
+
+ @Override
+ public String toString() {
+ return "ImgTexFrame{" +
+ "mFormat=" + mFormat +
+ ", mTextureId=" + mTextureId +
+ ", mTexMatrix=" + Arrays.toString(mTexMatrix) +
+ '}';
+ }
+}
diff --git a/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/gles/SinkConnector.java b/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/gles/SinkConnector.java
new file mode 100644
index 000000000..8093e2c2d
--- /dev/null
+++ b/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/gles/SinkConnector.java
@@ -0,0 +1,24 @@
+package io.agora.rtc.ss.gles;
+
+public abstract class SinkConnector {
+ private volatile boolean mConnected = false;
+
+ public SinkConnector() {
+ }
+
+ protected void onConnected() {
+ this.mConnected = true;
+ }
+
+ protected synchronized void onDisconnect() {
+ this.mConnected = false;
+ }
+
+ public boolean isConnected() {
+ return this.mConnected;
+ }
+
+ public abstract void onFormatChanged(Object format);
+
+ public abstract void onFrameAvailable(T frame);
+}
diff --git a/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/gles/SrcConnector.java b/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/gles/SrcConnector.java
new file mode 100644
index 000000000..eb4d91a4f
--- /dev/null
+++ b/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/gles/SrcConnector.java
@@ -0,0 +1,62 @@
+package io.agora.rtc.ss.gles;
+
+import java.util.Iterator;
+import java.util.LinkedList;
+
+public class SrcConnector {
+ private LinkedList> plugList = new LinkedList<>();
+ private Object mFormat;
+
+ public SrcConnector() {
+ }
+
+ public synchronized boolean isConnected() {
+ return !this.plugList.isEmpty();
+ }
+
+ public synchronized void connect(SinkConnector sink) {
+ if (!this.plugList.contains(sink)) {
+ this.plugList.add(sink);
+ sink.onConnected();
+ if (mFormat != null) {
+ sink.onFormatChanged(mFormat);
+ }
+ }
+ }
+
+ public synchronized void onFormatChanged(Object format) {
+ mFormat = format;
+ Iterator> it = this.plugList.iterator();
+ while (it.hasNext()) {
+ SinkConnector pin = it.next();
+ pin.onFormatChanged(format);
+ }
+ }
+
+ public synchronized void onFrameAvailable(T frame) {
+ Iterator> it = this.plugList.iterator();
+ while (it.hasNext()) {
+ SinkConnector sink = it.next();
+ sink.onFrameAvailable(frame);
+ }
+ }
+
+ public synchronized void disconnect() {
+ this.disconnect(null);
+ }
+
+ public synchronized void disconnect(SinkConnector sink) {
+ if (sink != null) {
+ sink.onDisconnect();
+ this.plugList.remove(sink);
+ } else {
+ Iterator it = this.plugList.iterator();
+ while (it.hasNext()) {
+ SinkConnector pin = (SinkConnector) it.next();
+ pin.onDisconnect();
+ }
+ this.plugList.clear();
+ }
+
+ }
+}
diff --git a/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/gles/WindowSurface.java b/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/gles/WindowSurface.java
new file mode 100644
index 000000000..31460ced2
--- /dev/null
+++ b/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/gles/WindowSurface.java
@@ -0,0 +1,63 @@
+package io.agora.rtc.ss.gles;
+
+import android.annotation.TargetApi;
+import android.graphics.SurfaceTexture;
+import android.os.Build;
+import android.view.Surface;
+
+/**
+ * Recordable EGL window surface.
+ *
+ * It's good practice to explicitly quit() the surface, preferably from a "finally" block.
+ */
+@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
+public class WindowSurface extends EglSurfaceBase {
+ private Surface mSurface;
+
+ public WindowSurface(EglCore eglCore, int width, int height) {
+ super(eglCore);
+ this.createOffscreenSurface(width, height);
+ }
+
+ public WindowSurface(EglCore eglCore, Surface surface) {
+ super(eglCore);
+ this.createWindowSurface(surface);
+ this.mSurface = surface;
+ }
+
+ public WindowSurface(EglCore eglCore, SurfaceTexture texture) {
+ super(eglCore);
+ this.createWindowSurface(texture);
+ }
+
+ public void release() {
+ this.releaseEglSurface();
+
+ if (this.mSurface != null) {
+ this.mSurface.release();
+ this.mSurface = null;
+ }
+ }
+
+ /**
+ * Recreate the EGLSurface, using the new EglBase. The caller should have already
+ * freed the old EGLSurface with releaseEglSurface().
+ *
+ * This is useful when we want to update the EGLSurface associated with a Surface.
+ * For example, if we want to share with a different EGLContext, which can only
+ * be done by tearing down and recreating the context. (That's handled by the caller;
+ * this just creates a new EGLSurface for the Surface we were handed earlier.)
+ *
+ * If the previous EGLSurface isn't fully destroyed, e.g. it's still current on a
+ * context somewhere, the create call will fail with complaints from the Surface
+ * about already being connected.
+ */
+ public void recreate(EglCore newEglCore) {
+ if (this.mSurface == null) {
+ throw new RuntimeException("not yet implemented for SurfaceTexture");
+ } else {
+ this.mEglCore = newEglCore; // switch to new context
+ this.createWindowSurface(this.mSurface); // create new surface
+ }
+ }
+}
diff --git a/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/impl/NotificationHelper.java b/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/impl/NotificationHelper.java
new file mode 100644
index 000000000..f54495e98
--- /dev/null
+++ b/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/impl/NotificationHelper.java
@@ -0,0 +1,52 @@
+package io.agora.rtc.ss.impl;
+
+import android.annotation.TargetApi;
+import android.app.Notification;
+import android.app.NotificationChannel;
+import android.app.NotificationManager;
+import android.content.Context;
+import android.os.Build;
+
+import androidx.annotation.RequiresApi;
+
+public class NotificationHelper {
+
+ public static String generateChannelId(Context ctx, int notification) {
+ String channelId;
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
+ channelId = NotificationHelper.createNotificationChannel(ctx, notification);
+ } else {
+ // If earlier version channel ID is not used
+ // https://developer.android.com/reference/android/support/v4/app/NotificationCompat.Builder.html#NotificationCompat.Builder(android.content.Context)
+ channelId = "";
+ }
+ return channelId;
+ }
+
+ @RequiresApi(Build.VERSION_CODES.O)
+ @TargetApi(Build.VERSION_CODES.O)
+ private static String createNotificationChannel(Context ctx, int notification) {
+
+
+ String channelId;
+ String channelName;
+
+ NotificationChannel chan;
+
+ switch (notification) {
+ default:
+ channelId = "generic_noti";
+ channelName = "Generic";
+
+ chan = new NotificationChannel(channelId,
+ channelName, NotificationManager.IMPORTANCE_NONE);
+ break;
+
+ }
+
+ chan.setLockscreenVisibility(Notification.VISIBILITY_PRIVATE);
+ NotificationManager service = (NotificationManager) ctx.getSystemService(Context.NOTIFICATION_SERVICE);
+ service.createNotificationChannel(chan);
+ return channelId;
+ }
+}
diff --git a/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/impl/ScreenCapture.java b/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/impl/ScreenCapture.java
new file mode 100644
index 000000000..48a5c90b8
--- /dev/null
+++ b/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/impl/ScreenCapture.java
@@ -0,0 +1,517 @@
+package io.agora.rtc.ss.impl;
+
+import android.annotation.TargetApi;
+import android.app.Activity;
+import android.content.Context;
+import android.content.Intent;
+import android.graphics.SurfaceTexture;
+import android.hardware.display.DisplayManager;
+import android.hardware.display.VirtualDisplay;
+import android.media.projection.MediaProjection;
+import android.media.projection.MediaProjectionManager;
+import android.os.Build;
+import android.os.Bundle;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.os.Message;
+import android.util.Log;
+import android.view.Surface;
+import android.view.Window;
+
+import java.lang.ref.WeakReference;
+import java.util.Locale;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import io.agora.rtc.ss.gles.GLRender;
+import io.agora.rtc.ss.gles.GlUtil;
+import io.agora.rtc.ss.gles.ImgTexFormat;
+import io.agora.rtc.ss.gles.ImgTexFrame;
+import io.agora.rtc.ss.gles.SrcConnector;
+
+/**
+ * capture video frames from screen
+ */
+@TargetApi(Build.VERSION_CODES.LOLLIPOP)
+public class ScreenCapture implements SurfaceTexture.OnFrameAvailableListener {
+
+ private static final boolean DEBUG_ENABLED = true;
+
+ private static final String TAG = ScreenCapture.class.getSimpleName();
+
+ public static final int MEDIA_PROJECTION_REQUEST_CODE = 1001;
+
+ private Context mContext;
+ private OnScreenCaptureListener mOnScreenCaptureListener;
+ public MediaProjectionManager mMediaProjectManager; // mMediaProjectionManager
+ private MediaProjection mMediaProjection; // mMediaProjection
+ private VirtualDisplay mVirtualDisplay; // mVirtualDisplay
+
+ private int mWidth = 1280; // mWidth
+ private int mHeight = 720; // mHeight
+
+ public final static int SCREEN_STATE_IDLE = 0;
+ public final static int SCREEN_STATE_INITIALIZING = 1;
+ public final static int SCREEN_STATE_INITIALIZED = 2;
+ public final static int SCREEN_STATE_STOPPING = 3;
+ public final static int SCREEN_STATE_CAPTURING = 4;
+
+ public final static int SCREEN_ERROR_SYSTEM_UNSUPPORTED = -1;
+ public final static int SCREEN_ERROR_PERMISSION_DENIED = -2;
+
+ public final static int SCREEN_RECORD_STARTED = 4;
+ public final static int SCREEN_RECORD_FAILED = 5;
+
+ private final static int MSG_SCREEN_START_SCREEN_ACTIVITY = 1;
+ private final static int MSG_SCREEN_INIT_PROJECTION = 2;
+ private final static int MSG_SCREEN_START = 3;
+ private final static int MSG_SCREEN_RELEASE = 4;
+ private final static int MSG_SCREEN_QUIT = 5;
+
+ private final static int RELEASE_SCREEN_THREAD = 1;
+
+ private AtomicInteger mState;
+
+ private GLRender mGLRender;
+ private int mTextureId;
+ private Surface mSurface;
+ private SurfaceTexture mSurfaceTexture;
+ private boolean mTexInited = false;
+ private ImgTexFormat mImgTexFormat;
+
+ private Handler mMainHandler;
+ private HandlerThread mScreenSetupThread;
+ private Handler mScreenSetupHandler;
+
+ private int mScreenDensity;
+
+ // fill extra frame
+ private Runnable mFillFrameRunnable;
+
+ private final static boolean TRACE = true;
+ // Performance trace
+ private long mLastTraceTime;
+ private long mFrameDrawed;
+
+ /**
+ * Source pin transfer ImgTexFrame, used for gpu path and preview
+ */
+ public SrcConnector mImgTexSrcConnector;
+
+ public ScreenCapture(Context context, GLRender render, int density) {
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) {
+ throw new RuntimeException("Need API level " + Build.VERSION_CODES.LOLLIPOP);
+ }
+
+ if (context == null || render == null) {
+ throw new IllegalArgumentException("the context or render must be not null");
+ }
+
+ mContext = context;
+ mGLRender = render;
+ mScreenDensity = density;
+
+ mGLRender.addListener(mGLRenderListener);
+ mImgTexSrcConnector = new SrcConnector<>();
+ mMainHandler = new MainHandler(this);
+ mState = new AtomicInteger(SCREEN_STATE_IDLE);
+ mFillFrameRunnable = new Runnable() {
+ @Override
+ public void run() {
+ if (mState.get() == SCREEN_STATE_CAPTURING) {
+ mGLRender.requestRender();
+ mMainHandler.postDelayed(mFillFrameRunnable, 100);
+ }
+ }
+ };
+
+ initScreenSetupThread();
+ }
+
+ /**
+ * Start screen record.
+ * Can only be called on mState IDLE.
+ */
+ public boolean start() {
+ if (DEBUG_ENABLED) {
+ Log.d(TAG, "start");
+ }
+
+ if (mState.get() != SCREEN_STATE_IDLE) {
+ return false;
+ }
+
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) {
+ Message msg = mMainHandler.obtainMessage(SCREEN_RECORD_FAILED, SCREEN_ERROR_SYSTEM_UNSUPPORTED, 0);
+ mMainHandler.sendMessage(msg);
+ return false;
+ }
+
+ mState.set(SCREEN_STATE_INITIALIZING);
+ mScreenSetupHandler.removeMessages(MSG_SCREEN_START_SCREEN_ACTIVITY);
+ mScreenSetupHandler.sendEmptyMessage(MSG_SCREEN_START_SCREEN_ACTIVITY);
+ return true;
+ }
+
+ /**
+ * stop screen record
+ */
+ public void stop() {
+ if (DEBUG_ENABLED) {
+ Log.d(TAG, "stop");
+ }
+
+ if (mState.get() == SCREEN_STATE_IDLE) {
+ return;
+ }
+
+ // stop fill frame
+ mMainHandler.removeCallbacks(mFillFrameRunnable);
+
+ Message msg = new Message();
+ msg.what = MSG_SCREEN_RELEASE;
+ msg.arg1 = ~RELEASE_SCREEN_THREAD;
+
+ mState.set(SCREEN_STATE_STOPPING);
+ mScreenSetupHandler.removeMessages(MSG_SCREEN_RELEASE);
+ mScreenSetupHandler.sendMessage(msg);
+ }
+
+ public void release() {
+ // stop fill frame
+ if (mMainHandler != null) {
+ mMainHandler.removeCallbacks(mFillFrameRunnable);
+ }
+
+ if (mState.get() == SCREEN_STATE_IDLE) {
+ mScreenSetupHandler.removeMessages(MSG_SCREEN_QUIT);
+ mScreenSetupHandler.sendEmptyMessage(MSG_SCREEN_QUIT);
+ quitThread();
+ return;
+ }
+
+ Message msg = new Message();
+ msg.what = MSG_SCREEN_RELEASE;
+ msg.arg1 = RELEASE_SCREEN_THREAD;
+
+ mState.set(SCREEN_STATE_STOPPING);
+ mScreenSetupHandler.removeMessages(MSG_SCREEN_RELEASE);
+ mScreenSetupHandler.sendMessage(msg);
+
+ quitThread();
+ }
+
+ /**
+ * screen status changed listener
+ *
+ * @param listener
+ */
+ public void setOnScreenCaptureListener(OnScreenCaptureListener listener) {
+ mOnScreenCaptureListener = listener;
+ }
+
+ @Override
+ public void onFrameAvailable(SurfaceTexture st) {
+ if (mState.get() != SCREEN_STATE_CAPTURING) {
+ return;
+ }
+ mGLRender.requestRender();
+ if (mMainHandler != null) {
+ mMainHandler.removeCallbacks(mFillFrameRunnable);
+ mMainHandler.postDelayed(mFillFrameRunnable, 100);
+ }
+ }
+
+ private void initTexFormat() {
+ mImgTexFormat = new ImgTexFormat(ImgTexFormat.COLOR_FORMAT_EXTERNAL_OES, mWidth, mHeight);
+ mImgTexSrcConnector.onFormatChanged(mImgTexFormat);
+ }
+
+ public final void initProjection(int requestCode, int resultCode, Intent intent) {
+ if (DEBUG_ENABLED) {
+ Log.d(TAG, "initProjection");
+ }
+
+ if (requestCode != MEDIA_PROJECTION_REQUEST_CODE) {
+ if (DEBUG_ENABLED) {
+ Log.d(TAG, "Unknown request code: " + requestCode);
+ }
+ } else if (resultCode != Activity.RESULT_OK) {
+ Log.e(TAG, "Screen Cast Permission Denied, resultCode: " + resultCode);
+ Message msg = mMainHandler.obtainMessage(SCREEN_RECORD_FAILED,
+ SCREEN_ERROR_PERMISSION_DENIED, 0);
+ mMainHandler.sendMessage(msg);
+ stop();
+ } else {
+ // get media projection and virtual display
+ mMediaProjection = mMediaProjectManager.getMediaProjection(resultCode, intent);
+
+ if (mSurface != null) {
+ startScreenCapture();
+ } else {
+ mState.set(SCREEN_STATE_INITIALIZED);
+ }
+ }
+ }
+
+ private GLRender.GLRenderListener mGLRenderListener = new GLRender.GLRenderListener() {
+ @Override
+ public void onReady() {
+ Log.d(TAG, "onReady");
+ }
+
+ @Override
+ public void onSizeChanged(int width, int height) {
+ Log.d(TAG, "onSizeChanged : " + width + "*" + height);
+ mWidth = width;
+ mHeight = height;
+
+ mTexInited = false;
+
+ if (mVirtualDisplay != null) {
+ mVirtualDisplay.release();
+ mVirtualDisplay = null;
+ }
+
+ mTextureId = GlUtil.createOESTextureObject();
+ if (mSurfaceTexture != null) {
+ mSurfaceTexture.release();
+ }
+
+ if (mSurface != null) {
+ mSurface.release();
+ }
+ mSurfaceTexture = new SurfaceTexture(mTextureId);
+ mSurfaceTexture.setDefaultBufferSize(mWidth, mHeight);
+ mSurface = new Surface(mSurfaceTexture);
+
+ mSurfaceTexture.setOnFrameAvailableListener(ScreenCapture.this);
+
+ if (mState.get() >= SCREEN_STATE_INITIALIZED && mVirtualDisplay == null) {
+ mScreenSetupHandler.removeMessages(MSG_SCREEN_START);
+ mScreenSetupHandler.sendEmptyMessage(MSG_SCREEN_START);
+ }
+ }
+
+ @Override
+ public void onDrawFrame() {
+ long pts = System.nanoTime() / 1000 / 1000;
+ try {
+ mSurfaceTexture.updateTexImage();
+ } catch (Exception e) {
+ Log.e(TAG, "updateTexImage failed, ignore");
+ return;
+ }
+
+ if (!mTexInited) {
+ mTexInited = true;
+ initTexFormat();
+ }
+
+ float[] texMatrix = new float[16];
+ mSurfaceTexture.getTransformMatrix(texMatrix);
+ ImgTexFrame frame = new ImgTexFrame(mImgTexFormat, mTextureId, texMatrix, pts);
+ try {
+ mImgTexSrcConnector.onFrameAvailable(frame);
+ } catch (Exception e) {
+ e.printStackTrace();
+ Log.e(TAG, "Draw frame failed, ignore");
+ }
+
+ if (TRACE) {
+ mFrameDrawed++;
+ long tm = System.currentTimeMillis();
+ long tmDiff = tm - mLastTraceTime;
+ if (tmDiff >= 5000) {
+ float fps = mFrameDrawed * 1000.f / tmDiff;
+ Log.d(TAG, "screen fps: " + String.format(Locale.getDefault(), "%.2f", fps));
+ mFrameDrawed = 0;
+ mLastTraceTime = tm;
+ }
+ }
+ }
+
+ @Override
+ public void onReleased() {
+
+ }
+ };
+
+ private void startScreenCapture() {
+ mVirtualDisplay = mMediaProjection.createVirtualDisplay("ScreenCapture",
+ mWidth, mHeight, mScreenDensity, DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC, mSurface,
+ null, null);
+
+ mState.set(SCREEN_STATE_CAPTURING);
+ Message msg = mMainHandler.obtainMessage(SCREEN_RECORD_STARTED, 0, 0);
+ mMainHandler.sendMessage(msg);
+ }
+
+ private static class MainHandler extends Handler {
+ private final WeakReference weakCapture;
+
+ public MainHandler(ScreenCapture screenCapture) {
+ super();
+ this.weakCapture = new WeakReference<>(screenCapture);
+ }
+
+ @Override
+ public void handleMessage(Message msg) {
+ ScreenCapture screenCapture = weakCapture.get();
+ if (screenCapture == null) {
+ return;
+ }
+ switch (msg.what) {
+ case SCREEN_RECORD_STARTED:
+ if (screenCapture.mOnScreenCaptureListener != null) {
+ screenCapture.mOnScreenCaptureListener.onStarted();
+ }
+ break;
+ case SCREEN_RECORD_FAILED:
+ if (screenCapture.mOnScreenCaptureListener != null) {
+ screenCapture.mOnScreenCaptureListener.onError(msg.arg1);
+ }
+ break;
+ default:
+ break;
+
+ }
+ }
+ }
+
+ private void initScreenSetupThread() {
+ mScreenSetupThread = new HandlerThread("screen_setup_thread", Thread.NORM_PRIORITY);
+ mScreenSetupThread.start();
+ mScreenSetupHandler = new Handler(mScreenSetupThread.getLooper()) {
+ @Override
+ public void handleMessage(Message msg) {
+ switch (msg.what) {
+ case MSG_SCREEN_START_SCREEN_ACTIVITY: {
+ doScreenSetup();
+ break;
+ }
+ case MSG_SCREEN_INIT_PROJECTION: {
+ initProjection(msg.arg1, msg.arg2, mProjectionIntent);
+ break;
+ }
+ case MSG_SCREEN_START: {
+ startScreenCapture();
+ break;
+ }
+ case MSG_SCREEN_RELEASE: {
+ doScreenRelease(msg.arg1);
+ break;
+ }
+ case MSG_SCREEN_QUIT: {
+ mScreenSetupThread.quit();
+ }
+ }
+ }
+ };
+ }
+
+ private void quitThread() {
+ try {
+ mScreenSetupThread.join();
+ } catch (InterruptedException e) {
+ Log.d(TAG, "quitThread " + Log.getStackTraceString(e));
+ } finally {
+ mScreenSetupThread = null;
+ }
+
+ if (mMainHandler != null) {
+ mMainHandler.removeCallbacksAndMessages(null);
+ mMainHandler = null;
+ }
+ }
+
+ private void doScreenSetup() {
+ if (DEBUG_ENABLED) {
+ Log.d(TAG, "doScreenSetup");
+ }
+
+ if (mMediaProjectManager == null) {
+ mMediaProjectManager = (MediaProjectionManager) mContext.getSystemService(
+ Context.MEDIA_PROJECTION_SERVICE);
+ }
+
+ Intent intent;
+ (intent = new Intent(mContext, ScreenCapture.ScreenCaptureAssistantActivity.class)).addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
+ ScreenCapture.ScreenCaptureAssistantActivity.mScreenCapture = this;
+ mContext.startActivity(intent);
+ }
+
+ private void doScreenRelease(int isQuit) {
+ if (DEBUG_ENABLED) {
+ Log.d(TAG, "doScreenRelease");
+ }
+
+ mState.set(SCREEN_STATE_IDLE);
+
+ if (mVirtualDisplay != null) {
+ mVirtualDisplay.release();
+ }
+
+ if (mMediaProjection != null) {
+ mMediaProjection.stop();
+ }
+
+ mVirtualDisplay = null;
+ mMediaProjection = null;
+
+ if (isQuit == RELEASE_SCREEN_THREAD) {
+ mScreenSetupHandler.sendEmptyMessage(MSG_SCREEN_QUIT);
+ }
+ }
+
+ public Intent mProjectionIntent;
+
+ public static class ScreenCaptureAssistantActivity extends Activity {
+ public static ScreenCapture mScreenCapture;
+
+ public void onCreate(Bundle bundle) {
+ super.onCreate(bundle);
+ requestWindowFeature(Window.FEATURE_NO_TITLE);
+ if (mScreenCapture.mMediaProjectManager == null) {
+ mScreenCapture.mMediaProjectManager =
+ (MediaProjectionManager) this.getSystemService(Context.MEDIA_PROJECTION_SERVICE);
+ }
+
+ this.startActivityForResult(
+ mScreenCapture.mMediaProjectManager.createScreenCaptureIntent(),
+ ScreenCapture.MEDIA_PROJECTION_REQUEST_CODE);
+ }
+
+ public void onActivityResult(int requestCode, int resultCode, Intent intent) {
+ if (mScreenCapture != null && mScreenCapture.mState.get() != SCREEN_STATE_IDLE) {
+ Message msg = new Message();
+ msg.what = MSG_SCREEN_INIT_PROJECTION;
+ msg.arg1 = requestCode;
+ msg.arg2 = resultCode;
+ mScreenCapture.mProjectionIntent = intent;
+ mScreenCapture.mScreenSetupHandler.removeMessages(MSG_SCREEN_INIT_PROJECTION);
+ mScreenCapture.mScreenSetupHandler.sendMessage(msg);
+ }
+ mScreenCapture = null;
+ finish();
+ }
+ }
+
+ public interface OnScreenCaptureListener {
+
+ /**
+ * Notify screen capture started.
+ */
+ void onStarted();
+
+ /**
+ * Notify error occurred while camera capturing.
+ *
+ * @param err err code.
+ * @see #SCREEN_ERROR_SYSTEM_UNSUPPORTED
+ * @see #SCREEN_ERROR_PERMISSION_DENIED
+ */
+ void onError(int err);
+ }
+
+}
+
diff --git a/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/impl/ScreenCaptureSource.java b/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/impl/ScreenCaptureSource.java
new file mode 100644
index 000000000..7e9b4599f
--- /dev/null
+++ b/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/impl/ScreenCaptureSource.java
@@ -0,0 +1,51 @@
+package io.agora.rtc.ss.impl;
+
+import io.agora.rtc.mediaio.IVideoFrameConsumer;
+import io.agora.rtc.mediaio.IVideoSource;
+import io.agora.rtc.mediaio.MediaIO;
+import io.agora.rtc.video.AgoraVideoFrame;
+
+public class ScreenCaptureSource implements IVideoSource {
+
+ private IVideoFrameConsumer mConsumer;
+
+ @Override
+ public boolean onInitialize(IVideoFrameConsumer observer) {
+ mConsumer = observer;
+ return true;
+ }
+
+ @Override
+ public int getBufferType() {
+ return AgoraVideoFrame.BUFFER_TYPE_TEXTURE;
+ }
+
+ @Override
+ public int getCaptureType() {
+ return MediaIO.CaptureType.SCREEN.intValue();
+ }
+
+ @Override
+ public int getContentHint() {
+ return MediaIO.ContentHint.NONE.intValue();
+ }
+
+ @Override
+ public void onDispose() {
+ mConsumer = null;
+ }
+
+ @Override
+ public void onStop() {
+ }
+
+ @Override
+ public boolean onStart() {
+ return true;
+ }
+
+ public IVideoFrameConsumer getConsumer() {
+ return mConsumer;
+ }
+
+}
diff --git a/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/impl/ScreenSharingService.java b/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/impl/ScreenSharingService.java
new file mode 100644
index 000000000..0509fa42b
--- /dev/null
+++ b/Android/APIExample/lib-screensharing/src/main/java/io/agora/rtc/ss/impl/ScreenSharingService.java
@@ -0,0 +1,384 @@
+package io.agora.rtc.ss.impl;
+
+import android.app.Notification;
+import android.app.Service;
+import android.content.Context;
+import android.content.Intent;
+import android.content.res.Configuration;
+import android.os.Build;
+import android.os.IBinder;
+import android.os.Process;
+import android.os.RemoteCallbackList;
+import android.os.RemoteException;
+import android.util.DisplayMetrics;
+import android.util.Log;
+import android.view.WindowManager;
+
+import androidx.core.app.NotificationCompat;
+
+import io.agora.rtc.Constants;
+import io.agora.rtc.IRtcEngineEventHandler;
+import io.agora.rtc.RtcEngine;
+import io.agora.rtc.models.ChannelMediaOptions;
+import io.agora.rtc.ss.Constant;
+import io.agora.rtc.ss.R;
+import io.agora.rtc.ss.aidl.INotification;
+import io.agora.rtc.ss.aidl.IScreenSharing;
+import io.agora.rtc.ss.gles.GLRender;
+import io.agora.rtc.ss.gles.ImgTexFrame;
+import io.agora.rtc.ss.gles.SinkConnector;
+import io.agora.rtc.video.AgoraVideoFrame;
+import io.agora.rtc.video.CameraCapturerConfiguration;
+import io.agora.rtc.video.VideoEncoderConfiguration;
+
+public class ScreenSharingService extends Service {
+
+ private static final String LOG_TAG = ScreenSharingService.class.getSimpleName();
+
+ private ScreenCapture mScreenCapture;
+ private GLRender mScreenGLRender;
+ private RtcEngine mRtcEngine;
+ private Context mContext;
+ private ScreenCaptureSource mSCS;
+
+ private RemoteCallbackList mCallbacks
+ = new RemoteCallbackList();
+
+ private final IScreenSharing.Stub mBinder = new IScreenSharing.Stub() {
+ public void registerCallback(INotification cb) {
+ if (cb != null) mCallbacks.register(cb);
+ }
+
+ public void unregisterCallback(INotification cb) {
+ if (cb != null) mCallbacks.unregister(cb);
+ }
+
+ public void startShare() {
+ startCapture();
+ }
+
+ public void stopShare() {
+ stopCapture();
+ }
+
+ public void renewToken(String token) {
+ refreshToken(token);
+ }
+ };
+
+ private void initModules() {
+ WindowManager wm = (WindowManager) getApplicationContext().getSystemService(Context.WINDOW_SERVICE);
+ DisplayMetrics metrics = new DisplayMetrics();
+ wm.getDefaultDisplay().getMetrics(metrics);
+
+ if (mScreenGLRender == null) {
+ mScreenGLRender = new GLRender();
+ }
+ if (mScreenCapture == null) {
+ mScreenCapture = new ScreenCapture(mContext, mScreenGLRender, metrics.densityDpi);
+ }
+
+ mScreenCapture.mImgTexSrcConnector.connect(new SinkConnector() {
+ @Override
+ public void onFormatChanged(Object obj) {
+ Log.d(LOG_TAG, "onFormatChanged " + obj.toString());
+ }
+
+ @Override
+ public void onFrameAvailable(ImgTexFrame frame) {
+ Log.d(LOG_TAG, "onFrameAvailable " + frame.toString() + " " + frame.pts);
+
+ if (mRtcEngine == null) {
+ return;
+ }
+
+ mSCS.getConsumer().consumeTextureFrame(frame.mTextureId, AgoraVideoFrame.FORMAT_TEXTURE_OES, frame.mFormat.mWidth,
+ frame.mFormat.mHeight, 0, frame.pts, frame.mTexMatrix);
+ }
+ });
+
+ mScreenCapture.setOnScreenCaptureListener(new ScreenCapture.OnScreenCaptureListener() {
+ @Override
+ public void onStarted() {
+ Log.d(LOG_TAG, "Screen Record Started");
+ }
+
+ @Override
+ public void onError(int err) {
+ Log.d(LOG_TAG, "onError " + err);
+ switch (err) {
+ case ScreenCapture.SCREEN_ERROR_SYSTEM_UNSUPPORTED:
+ break;
+ case ScreenCapture.SCREEN_ERROR_PERMISSION_DENIED:
+ break;
+ }
+ }
+ });
+
+ DisplayMetrics outMetrics = new DisplayMetrics();
+ wm.getDefaultDisplay().getMetrics(outMetrics);
+ int screenWidth = outMetrics.widthPixels;
+ int screenHeight = outMetrics.heightPixels;
+
+ initOffscreenPreview(screenWidth, screenHeight);
+ }
+
+ private void deInitModules() {
+ mRtcEngine.leaveChannel();
+ RtcEngine.destroy();
+ mRtcEngine = null;
+
+ if (mScreenCapture != null) {
+ mScreenCapture.release();
+ mScreenCapture = null;
+ }
+
+ if (mScreenGLRender != null) {
+ mScreenGLRender.quit();
+ mScreenGLRender = null;
+ }
+ }
+
+ @Override
+ public void onConfigurationChanged(Configuration newConfig) {
+ WindowManager wm = (WindowManager) getApplicationContext().getSystemService(Context.WINDOW_SERVICE);
+ DisplayMetrics outMetrics = new DisplayMetrics();
+ wm.getDefaultDisplay().getMetrics(outMetrics);
+ int screenWidth = outMetrics.widthPixels;
+ int screenHeight = outMetrics.heightPixels;
+
+ Log.d(LOG_TAG, "onConfigurationChanged " + newConfig.orientation + " " + screenWidth + " " + screenHeight);
+ updateOffscreenPreview(screenWidth, screenHeight);
+ }
+
+ /**
+ * Init offscreen preview.
+ *
+ * @param width offscreen width
+ * @param height offscreen height
+ * @throws IllegalArgumentException
+ */
+ public void initOffscreenPreview(int width, int height) throws IllegalArgumentException {
+ if (width <= 0 || height <= 0) {
+ throw new IllegalArgumentException("Invalid offscreen resolution");
+ }
+
+ mScreenGLRender.init(width, height);
+ }
+
+ /**
+ * Update offscreen preview.
+ *
+ * @param width offscreen width
+ * @param height offscreen height
+ * @throws IllegalArgumentException
+ */
+ public void updateOffscreenPreview(int width, int height) throws IllegalArgumentException {
+ if (width <= 0 || height <= 0) {
+ throw new IllegalArgumentException("Invalid offscreen resolution");
+ }
+
+ mScreenGLRender.update(width, height);
+ }
+
+ private void startCapture() {
+ mScreenCapture.start();
+ startForeground(55431, getForeNotification());
+ }
+
+ private Notification getForeNotification() {
+ Notification notification;
+ String eventTitle = getResources().getString(R.string.app_name);
+ NotificationCompat.Builder builder = new NotificationCompat.Builder(this, NotificationHelper.generateChannelId(getApplication(), 55431))
+ .setContentTitle(eventTitle)
+ .setContentText(eventTitle);
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP)
+ builder.setColor(getResources().getColor(android.R.color.black));
+ notification = builder.build();
+ notification.flags |= Notification.FLAG_ONGOING_EVENT;
+
+ return notification;
+ }
+
+ private void stopCapture() {
+ stopForeground(true);
+ mScreenCapture.stop();
+ }
+
+ private void refreshToken(String token) {
+ if (mRtcEngine != null) {
+ mRtcEngine.renewToken(token);
+ } else {
+ Log.e(LOG_TAG, "rtc engine is null");
+ }
+ }
+
+ @Override
+ public void onCreate() {
+ mContext = getApplicationContext();
+ initModules();
+ }
+
+ @Override
+ public IBinder onBind(Intent intent) {
+ setUpEngine(intent);
+ setUpVideoConfig(intent);
+ joinChannel(intent);
+ return mBinder;
+ }
+
+ @Override
+ public void onDestroy() {
+ super.onDestroy();
+ deInitModules();
+ }
+
+ private void joinChannel(Intent intent) {
+
+ ChannelMediaOptions option = new ChannelMediaOptions();
+ option.autoSubscribeAudio = true;
+ option.autoSubscribeVideo = true;
+ mRtcEngine.joinChannel(intent.getStringExtra(Constant.ACCESS_TOKEN), intent.getStringExtra(Constant.CHANNEL_NAME),
+ "ss_" + Process.myPid(), intent.getIntExtra(Constant.UID, 0), option);
+ }
+
+ private void setUpEngine(Intent intent) {
+ String appId = intent.getStringExtra(Constant.APP_ID);
+ try {
+ mRtcEngine = RtcEngine.create(getApplicationContext(), appId, new IRtcEngineEventHandler() {
+ @Override
+ public void onJoinChannelSuccess(String channel, int uid, int elapsed) {
+ Log.d(LOG_TAG, "onJoinChannelSuccess " + channel + " " + elapsed);
+ }
+
+ @Override
+ public void onWarning(int warn) {
+ Log.d(LOG_TAG, "onWarning " + warn);
+ }
+
+ @Override
+ public void onError(int err) {
+ Log.d(LOG_TAG, "onError " + err);
+ }
+
+ @Override
+ public void onRequestToken() {
+ final int N = mCallbacks.beginBroadcast();
+ for (int i = 0; i < N; i++) {
+ try {
+ mCallbacks.getBroadcastItem(i).onError(Constants.ERR_INVALID_TOKEN);
+ } catch (RemoteException e) {
+ // The RemoteCallbackList will take care of removing
+ // the dead object for us.
+ }
+ }
+ mCallbacks.finishBroadcast();
+ }
+
+ @Override
+ public void onTokenPrivilegeWillExpire(String token) {
+ final int N = mCallbacks.beginBroadcast();
+ for (int i = 0; i < N; i++) {
+ try {
+ mCallbacks.getBroadcastItem(i).onTokenWillExpire();
+ } catch (RemoteException e) {
+ // The RemoteCallbackList will take care of removing
+ // the dead object for us.
+ }
+ }
+ mCallbacks.finishBroadcast();
+ }
+
+ @Override
+ public void onConnectionStateChanged(int state, int reason) {
+ switch (state) {
+ case Constants.CONNECTION_STATE_FAILED :
+ final int N = mCallbacks.beginBroadcast();
+ for (int i = 0; i < N; i++) {
+ try {
+ mCallbacks.getBroadcastItem(i).onError(Constants.CONNECTION_STATE_FAILED);
+ } catch (RemoteException e) {
+ // The RemoteCallbackList will take care of removing
+ // the dead object for us.
+ }
+ }
+ mCallbacks.finishBroadcast();
+ break;
+ default :
+ break;
+ }
+ }
+ });
+ } catch (Exception e) {
+ Log.e(LOG_TAG, Log.getStackTraceString(e));
+
+ throw new RuntimeException("NEED TO check rtc sdk init fatal error\n" + Log.getStackTraceString(e));
+ }
+
+ mRtcEngine.setLogFile("/sdcard/ss_svr.log");
+ mRtcEngine.setChannelProfile(Constants.CHANNEL_PROFILE_LIVE_BROADCASTING);
+ mRtcEngine.enableVideo();
+
+ if (mRtcEngine.isTextureEncodeSupported()) {
+ mSCS = new ScreenCaptureSource();
+ mRtcEngine.setVideoSource(mSCS);
+ } else {
+ throw new RuntimeException("Can not work on device do not supporting texture" + mRtcEngine.isTextureEncodeSupported());
+ }
+
+ mRtcEngine.setClientRole(Constants.CLIENT_ROLE_BROADCASTER);
+
+ mRtcEngine.muteAllRemoteAudioStreams(true);
+ mRtcEngine.muteAllRemoteVideoStreams(true);
+ mRtcEngine.disableAudio();
+ }
+
+ private void setUpVideoConfig(Intent intent) {
+ int width = intent.getIntExtra(Constant.WIDTH, 0);
+ int height = intent.getIntExtra(Constant.HEIGHT, 0);
+ int frameRate = intent.getIntExtra(Constant.FRAME_RATE, 15);
+ int bitRate = intent.getIntExtra(Constant.BITRATE, 0);
+ int orientationMode = intent.getIntExtra(Constant.ORIENTATION_MODE, 0);
+ VideoEncoderConfiguration.FRAME_RATE fr;
+ VideoEncoderConfiguration.ORIENTATION_MODE om;
+
+ switch (frameRate) {
+ case 1 :
+ fr = VideoEncoderConfiguration.FRAME_RATE.FRAME_RATE_FPS_1;
+ break;
+ case 7 :
+ fr = VideoEncoderConfiguration.FRAME_RATE.FRAME_RATE_FPS_7;
+ break;
+ case 10 :
+ fr = VideoEncoderConfiguration.FRAME_RATE.FRAME_RATE_FPS_10;
+ break;
+ case 15 :
+ fr = VideoEncoderConfiguration.FRAME_RATE.FRAME_RATE_FPS_15;
+ break;
+ case 24 :
+ fr = VideoEncoderConfiguration.FRAME_RATE.FRAME_RATE_FPS_24;
+ break;
+ case 30 :
+ fr = VideoEncoderConfiguration.FRAME_RATE.FRAME_RATE_FPS_30;
+ break;
+ default :
+ fr = VideoEncoderConfiguration.FRAME_RATE.FRAME_RATE_FPS_15;
+ break;
+ }
+
+ switch (orientationMode) {
+ case 1 :
+ om = VideoEncoderConfiguration.ORIENTATION_MODE.ORIENTATION_MODE_FIXED_LANDSCAPE;
+ break;
+ case 2 :
+ om = VideoEncoderConfiguration.ORIENTATION_MODE.ORIENTATION_MODE_FIXED_PORTRAIT;
+ break;
+ default :
+ om = VideoEncoderConfiguration.ORIENTATION_MODE.ORIENTATION_MODE_ADAPTIVE;
+ break;
+ }
+
+ mRtcEngine.setVideoEncoderConfiguration(new VideoEncoderConfiguration(
+ new VideoEncoderConfiguration.VideoDimensions(width, height), fr, bitRate, om));
+ }
+}
diff --git a/Android/APIExample/lib-stream-encrypt/src/main/cpp/include/agora/AgoraBase.h b/Android/APIExample/lib-stream-encrypt/src/main/cpp/include/agora/AgoraBase.h
index 29407b5d8..ecb0c5a3b 100644
--- a/Android/APIExample/lib-stream-encrypt/src/main/cpp/include/agora/AgoraBase.h
+++ b/Android/APIExample/lib-stream-encrypt/src/main/cpp/include/agora/AgoraBase.h
@@ -135,7 +135,7 @@ enum WARN_CODE_TYPE
*/
WARN_LOOKUP_CHANNEL_TIMEOUT = 104,
/** **DEPRECATED** 105: The server rejects the request to look up the channel. The server cannot process this request or the request is illegal.
-
+
Deprecated as of v2.4.1. Use CONNECTION_CHANGED_REJECTED_BY_SERVER(10) in the \ref agora::rtc::IRtcEngineEventHandler::onConnectionStateChanged "onConnectionStateChanged" callback instead.
*/
WARN_LOOKUP_CHANNEL_REJECTED = 105,
@@ -150,7 +150,7 @@ enum WARN_CODE_TYPE
/** 111: A timeout occurs when switching to the live video.
*/
WARN_SWITCH_LIVE_VIDEO_TIMEOUT = 111,
- /** 118: A timeout occurs when setting the client role in the live broadcast profile.
+ /** 118: A timeout occurs when setting the client role in the live interactive streaming profile.
*/
WARN_SET_CLIENT_ROLE_TIMEOUT = 118,
/** 121: The ticket to open the channel is invalid.
@@ -159,26 +159,34 @@ enum WARN_CODE_TYPE
/** 122: Try connecting to another server.
*/
WARN_OPEN_CHANNEL_TRY_NEXT_VOS = 122,
- /** 131: The channel connection cannot be recovered. */
+ /** 131: The channel connection cannot be recovered.
+ */
WARN_CHANNEL_CONNECTION_UNRECOVERABLE = 131,
+ /** 132: The IP address has changed.
+ */
WARN_CHANNEL_CONNECTION_IP_CHANGED = 132,
+ /** 133: The port has changed.
+ */
WARN_CHANNEL_CONNECTION_PORT_CHANGED = 133,
+ /** 134: The socket error occurs, try to rejoin channel.
+ */
+ WARN_CHANNEL_SOCKET_ERROR = 134,
/** 701: An error occurs in opening the audio mixing file.
*/
WARN_AUDIO_MIXING_OPEN_ERROR = 701,
- /** 1014: Audio Device Module: a warning occurs in the playback device.
+ /** 1014: Audio Device Module: A warning occurs in the playback device.
*/
WARN_ADM_RUNTIME_PLAYOUT_WARNING = 1014,
/** 1016: Audio Device Module: a warning occurs in the recording device.
*/
WARN_ADM_RUNTIME_RECORDING_WARNING = 1016,
- /** 1019: Audio Device Module: no valid audio data is collected.
+ /** 1019: Audio Device Module: no valid audio data is recorded.
*/
WARN_ADM_RECORD_AUDIO_SILENCE = 1019,
- /** 1020: Audio Device Module: the playback device fails.
+ /** 1020: Audio device module: The audio playback frequency is abnormal, which may cause audio freezes. This abnormality is caused by high CPU usage. Agora recommends stopping other apps.
*/
WARN_ADM_PLAYOUT_MALFUNCTION = 1020,
- /** 1021: Audio Device Module: the recording device fails.
+ /** 1021: Audio device module: the audio recording frequency is abnormal, which may cause audio freezes. This abnormality is caused by high CPU usage. Agora recommends stopping other apps.
*/
WARN_ADM_RECORD_MALFUNCTION = 1021,
/** 1025: The audio playback or recording is interrupted by system events (such as a phone call).
@@ -191,15 +199,14 @@ enum WARN_CODE_TYPE
* AVAudioSessionCategoryPlayAndRecord.
*/
WARN_ADM_IOS_CATEGORY_NOT_PLAYANDRECORD = 1029,
-
- WARN_ADM_IOS_SAMPLERATE_CHANGE = 1030,
-
- /** 1031: Audio Device Module: the recorded audio voice is too low.
+ /** 1031: Audio Device Module: The recorded audio voice is too low.
*/
WARN_ADM_RECORD_AUDIO_LOWLEVEL = 1031,
- /** 1032: Audio Device Module: the playback audio voice is too low.
+ /** 1032: Audio Device Module: The playback audio voice is too low.
*/
WARN_ADM_PLAYOUT_AUDIO_LOWLEVEL = 1032,
+ /** 1033: Audio device module: The audio recording device is occupied.
+ */
WARN_ADM_RECORD_AUDIO_IS_ACTIVE = 1033,
/** 1040: Audio device module: An exception occurs with the audio drive.
* Solutions:
@@ -213,36 +220,36 @@ enum WARN_CODE_TYPE
* audio.
*/
WARN_ADM_INCONSISTENT_AUDIO_DEVICE = 1042,
- /** 1051: (Communication profile only) audio Processing Module: howling is detected.
+ /** 1051: (Communication profile only) Audio processing module: A howling sound is detected when recording the audio data.
*/
WARN_APM_HOWLING = 1051,
- /** 1052: Audio Device Module: the device is in the glitch state.
+ /** 1052: Audio Device Module: The device is in the glitch state.
*/
WARN_ADM_GLITCH_STATE = 1052,
- /** 1053: Audio Device Module: the underlying audio settings have changed.
+ /** 1053: Audio Processing Module: A residual echo is detected, which may be caused by the belated scheduling of system threads or the signal overflow.
*/
- WARN_ADM_IMPROPER_SETTINGS = 1053,
+ WARN_APM_RESIDUAL_ECHO = 1053,
/// @cond
WARN_ADM_WIN_CORE_NO_RECORDING_DEVICE = 1322,
/// @endcond
- /** 1323: Audio device module: No available playback device.
+ /** 1323: Audio device module: No available playback device.
* Solution: Plug in the audio device.
*/
WARN_ADM_WIN_CORE_NO_PLAYOUT_DEVICE = 1323,
- /** Audio device module: The capture device is released improperly.
- * Solutions:
+ /** Audio device module: The capture device is released improperly.
+ * Solutions:
* - Disable or re-enable the audio device.
* - Re-enable your device.
* - Update the sound card drive.
*/
WARN_ADM_WIN_CORE_IMPROPER_CAPTURE_RELEASE = 1324,
- /** 1610: Super-resolution warning: the original video dimensions of the remote user exceed 640 * 480.
+ /** 1610: The origin resolution of the remote video is beyond the range where the super-resolution algorithm can be applied.
*/
WARN_SUPER_RESOLUTION_STREAM_OVER_LIMITATION = 1610,
- /** 1611: Super-resolution warning: another user is using super resolution.
+ /** 1611: Another user is already using the super-resolution algorithm.
*/
WARN_SUPER_RESOLUTION_USER_COUNT_OVER_LIMITATION = 1611,
- /** 1612: The device is not supported.
+ /** 1612: The device does not support the super-resolution algorithm.
*/
WARN_SUPER_RESOLUTION_DEVICE_NOT_SUPPORTED = 1612,
/// @cond
@@ -305,7 +312,15 @@ enum ERROR_CODE_TYPE
/** 15: No network buffers are available. This is for internal SDK internal use only, and it does not return to the application through any method or callback.
*/
ERR_NET_NOBUFS = 15,
- /** 17: The request to join the channel is rejected. This error usually occurs when the user is already in the channel, and still calls the method to join the channel, for example, \ref agora::rtc::IRtcEngine::joinChannel "joinChannel".
+ /** 17: The request to join the channel is rejected.
+ *
+ * - This error usually occurs when the user is already in the channel, and still calls the method to join the
+ * channel, for example, \ref agora::rtc::IRtcEngine::joinChannel "joinChannel".
+ * - This error usually occurs when the user tries to join a channel
+ * during \ref agora::rtc::IRtcEngine::startEchoTest "startEchoTest". Once you
+ * call \ref agora::rtc::IRtcEngine::startEchoTest "startEchoTest", you need to
+ * call \ref agora::rtc::IRtcEngine::stopEchoTest "stopEchoTest" before joining a channel.
+ * - The user tries to join the channel with a token that is expired.
*/
ERR_JOIN_CHANNEL_REJECTED = 17,
/** 18: The request to leave the channel is rejected.
@@ -334,18 +349,21 @@ enum ERROR_CODE_TYPE
/** 102: The specified channel name is invalid. Please try to rejoin the channel with a valid channel name.
*/
ERR_INVALID_CHANNEL_NAME = 102,
+ /** 103: Fails to get server resources in the specified region. Please try to specify another region when calling \ref agora::rtc::IRtcEngine::initialize "initialize".
+ */
+ ERR_NO_SERVER_RESOURCES = 103,
/** **DEPRECATED** 109: Deprecated as of v2.4.1. Use CONNECTION_CHANGED_TOKEN_EXPIRED(9) in the \ref agora::rtc::IRtcEngineEventHandler::onConnectionStateChanged "onConnectionStateChanged" callback instead.
-
+
The token expired due to one of the following reasons:
-
+
- Authorized Timestamp expired: The timestamp is represented by the number of seconds elapsed since 1/1/1970. The user can use the Token to access the Agora service within 24 hours after the Token is generated. If the user does not access the Agora service after 24 hours, this Token is no longer valid.
- Call Expiration Timestamp expired: The timestamp is the exact time when a user can no longer use the Agora service (for example, when a user is forced to leave an ongoing call). When a value is set for the Call Expiration Timestamp, it does not mean that the token will expire, but that the user will be banned from the channel.
*/
ERR_TOKEN_EXPIRED = 109,
/** **DEPRECATED** 110: Deprecated as of v2.4.1. Use CONNECTION_CHANGED_INVALID_TOKEN(8) in the \ref agora::rtc::IRtcEngineEventHandler::onConnectionStateChanged "onConnectionStateChanged" callback instead.
-
+
The token is invalid due to one of the following reasons:
-
+
- The App Certificate for the project is enabled in Console, but the user is still using the App ID. Once the App Certificate is enabled, the user must use a token.
- The uid is mandatory, and users must set the same uid as the one set in the \ref agora::rtc::IRtcEngine::joinChannel "joinChannel" method.
*/
@@ -356,7 +374,7 @@ enum ERROR_CODE_TYPE
/** 112: The internet connection is lost. This applies to the Agora Web SDK only.
*/
ERR_CONNECTION_LOST = 112, // only used in web sdk
- /** 113: The user is not in the channel when calling the \ref agora::rtc::IRtcEngine::sendStreamMessage "sendStreamMessage" or \ref agora::rtc::IRtcEngine::getUserInfoByUserAccount "getUserInfoByUserAccount" method.
+ /** 113: The user is not in the channel when calling the method.
*/
ERR_NOT_IN_CHANNEL = 113,
/** 114: The size of the sent data is over 1024 bytes when the user calls the \ref agora::rtc::IRtcEngine::sendStreamMessage "sendStreamMessage" method.
@@ -377,7 +395,7 @@ enum ERROR_CODE_TYPE
/** 120: Decryption fails. The user may have used a different encryption password to join the channel. Check your settings or try rejoining the channel.
*/
ERR_DECRYPTION_FAILED = 120,
- /** 123: The client is banned by the server.
+ /** 123: The user is banned by the server. This error occurs when the user is kicked off the channel from the server.
*/
ERR_CLIENT_IS_BANNED_BY_SERVER = 123,
/** 124: Incorrect watermark file parameter.
@@ -469,7 +487,7 @@ enum ERROR_CODE_TYPE
*/
ERR_START_CALL = 1002,
/** **DEPRECATED** 1003: Fails to start the camera.
-
+
Deprecated as of v2.4.1. Use LOCAL_VIDEO_STREAM_ERROR_CAPTURE_FAILURE(4) in the \ref agora::rtc::IRtcEngineEventHandler::onConnectionStateChanged "onConnectionStateChanged" callback instead.
*/
ERR_START_CAMERA = 1003,
@@ -512,11 +530,11 @@ enum ERROR_CODE_TYPE
/** 1018: Audio Device Module: Fails to record.
*/
ERR_ADM_RECORD_AUDIO_FAILED = 1018,
- /** 1022: Audio Device Module: An error occurs in initializing the
+ /** 1022: Audio Device Module: An error occurs in initializing the
* loopback device.
*/
ERR_ADM_INIT_LOOPBACK = 1022,
- /** 1023: Audio Device Module: An error occurs in starting the loopback
+ /** 1023: Audio Device Module: An error occurs in starting the loopback
* device.
*/
ERR_ADM_START_LOOPBACK = 1023,
@@ -524,37 +542,37 @@ enum ERROR_CODE_TYPE
* recording permission is granted.
*/
ERR_ADM_NO_PERMISSION = 1027,
- /** 1033: Audio device module: The device is occupied.
+ /** 1033: Audio device module: The device is occupied.
*/
ERR_ADM_RECORD_AUDIO_IS_ACTIVE = 1033,
/** 1101: Audio device module: A fatal exception occurs.
*/
ERR_ADM_ANDROID_JNI_JAVA_RESOURCE = 1101,
- /** 1108: Audio device module: The recording frequency is lower than 50.
- * 0 indicates that the recording is not yet started. We recommend
+ /** 1108: Audio device module: The recording frequency is lower than 50.
+ * 0 indicates that the recording is not yet started. We recommend
* checking your recording permission.
*/
ERR_ADM_ANDROID_JNI_NO_RECORD_FREQUENCY = 1108,
- /** 1109: The playback frequency is lower than 50. 0 indicates that the
- * playback is not yet started. We recommend checking if you have created
- * too many AudioTrack instances.
+ /** 1109: The playback frequency is lower than 50. 0 indicates that the
+ * playback is not yet started. We recommend checking if you have created
+ * too many AudioTrack instances.
*/
ERR_ADM_ANDROID_JNI_NO_PLAYBACK_FREQUENCY = 1109,
- /** 1111: Audio device module: AudioRecord fails to start up. A ROM system
- * error occurs. We recommend the following options to debug:
+ /** 1111: Audio device module: AudioRecord fails to start up. A ROM system
+ * error occurs. We recommend the following options to debug:
* - Restart your App.
- * - Restart your cellphone.
+ * - Restart your cellphone.
* - Check your recording permission.
*/
ERR_ADM_ANDROID_JNI_JAVA_START_RECORD = 1111,
- /** 1112: Audio device module: AudioTrack fails to start up. A ROM system
- * error occurs. We recommend the following options to debug:
+ /** 1112: Audio device module: AudioTrack fails to start up. A ROM system
+ * error occurs. We recommend the following options to debug:
* - Restart your App.
- * - Restart your cellphone.
+ * - Restart your cellphone.
* - Check your playback permission.
*/
ERR_ADM_ANDROID_JNI_JAVA_START_PLAYBACK = 1112,
- /** 1115: Audio device module: AudioRecord returns error. The SDK will
+ /** 1115: Audio device module: AudioRecord returns error. The SDK will
* automatically restart AudioRecord. */
ERR_ADM_ANDROID_JNI_JAVA_RECORD_ERROR = 1115,
/** **DEPRECATED** */
@@ -567,109 +585,109 @@ enum ERROR_CODE_TYPE
ERR_ADM_ANDROID_OPENSL_CREATE_AUDIO_PLAYER = 1157,
/** **DEPRECATED** */
ERR_ADM_ANDROID_OPENSL_START_PLAYER_THREAD = 1160,
- /** 1201: Audio device module: The current device does not support audio
+ /** 1201: Audio device module: The current device does not support audio
* input, possibly because you have mistakenly configured the audio session
- * category, or because some other app is occupying the input device. We
+ * category, or because some other app is occupying the input device. We
* recommend terminating all background apps and re-joining the channel. */
ERR_ADM_IOS_INPUT_NOT_AVAILABLE = 1201,
/** 1206: Audio device module: Cannot activate the Audio Session.*/
ERR_ADM_IOS_ACTIVATE_SESSION_FAIL = 1206,
- /** 1210: Audio device module: Fails to initialize the audio device,
+ /** 1210: Audio device module: Fails to initialize the audio device,
* normally because the audio device parameters are wrongly set.*/
ERR_ADM_IOS_VPIO_INIT_FAIL = 1210,
- /** 1213: Audio device module: Fails to re-initialize the audio device,
+ /** 1213: Audio device module: Fails to re-initialize the audio device,
* normally because the audio device parameters are wrongly set.*/
ERR_ADM_IOS_VPIO_REINIT_FAIL = 1213,
- /** 1214: Fails to re-start up the Audio Unit, possibly because the audio
+ /** 1214: Fails to re-start up the Audio Unit, possibly because the audio
* session category is not compatible with the settings of the Audio Unit.
*/
ERR_ADM_IOS_VPIO_RESTART_FAIL = 1214,
- /// @cond
+
ERR_ADM_IOS_SET_RENDER_CALLBACK_FAIL = 1219,
- /// @endcond
+
/** **DEPRECATED** */
ERR_ADM_IOS_SESSION_SAMPLERATR_ZERO = 1221,
- /** 1301: Audio device module: An audio driver abnomality or a
- * compatibility issue occurs. Solutions: Disable and restart the audio
+ /** 1301: Audio device module: An audio driver abnormality or a
+ * compatibility issue occurs. Solutions: Disable and restart the audio
* device, or reboot the system.*/
ERR_ADM_WIN_CORE_INIT = 1301,
- /** 1303: Audio device module: A recording driver abnomality or a
- * compatibility issue occurs. Solutions: Disable and restart the audio
+ /** 1303: Audio device module: A recording driver abnormality or a
+ * compatibility issue occurs. Solutions: Disable and restart the audio
* device, or reboot the system. */
ERR_ADM_WIN_CORE_INIT_RECORDING = 1303,
- /** 1306: Audio device module: A playout driver abnomality or a
- * compatibility issue occurs. Solutions: Disable and restart the audio
+ /** 1306: Audio device module: A playout driver abnormality or a
+ * compatibility issue occurs. Solutions: Disable and restart the audio
* device, or reboot the system. */
ERR_ADM_WIN_CORE_INIT_PLAYOUT = 1306,
- /** 1307: Audio device module: No audio device is available. Solutions:
+ /** 1307: Audio device module: No audio device is available. Solutions:
* Plug in a proper audio device. */
ERR_ADM_WIN_CORE_INIT_PLAYOUT_NULL = 1307,
- /** 1309: Audio device module: An audio driver abnomality or a
- * compatibility issue occurs. Solutions: Disable and restart the audio
+ /** 1309: Audio device module: An audio driver abnormality or a
+ * compatibility issue occurs. Solutions: Disable and restart the audio
* device, or reboot the system. */
ERR_ADM_WIN_CORE_START_RECORDING = 1309,
- /** 1311: Audio device module: Insufficient system memory or poor device
+ /** 1311: Audio device module: Insufficient system memory or poor device
* performance. Solutions: Reboot the system or replace the device.
*/
ERR_ADM_WIN_CORE_CREATE_REC_THREAD = 1311,
- /** 1314: Audio device module: An audio driver abnormality occurs.
+ /** 1314: Audio device module: An audio driver abnormality occurs.
* Solutions:
* - Disable and then re-enable the audio device.
* - Reboot the system.
* - Upgrade your audio card driver.*/
ERR_ADM_WIN_CORE_CAPTURE_NOT_STARTUP = 1314,
- /** 1319: Audio device module: Insufficient system memory or poor device
+ /** 1319: Audio device module: Insufficient system memory or poor device
* performance. Solutions: Reboot the system or replace the device. */
ERR_ADM_WIN_CORE_CREATE_RENDER_THREAD = 1319,
- /** 1320: Audio device module: An audio driver abnormality occurs.
+ /** 1320: Audio device module: An audio driver abnormality occurs.
* Solutions:
* - Disable and then re-enable the audio device.
* - Reboot the system.
* - Replace the device. */
ERR_ADM_WIN_CORE_RENDER_NOT_STARTUP = 1320,
- /** 1322: Audio device module: No audio sampling device is available.
+ /** 1322: Audio device module: No audio sampling device is available.
* Solutions: Plug in a proper recording device. */
ERR_ADM_WIN_CORE_NO_RECORDING_DEVICE = 1322,
- /** 1323: Audio device module: No audio playout device is available.
+ /** 1323: Audio device module: No audio playout device is available.
* Solutions: Plug in a proper playback device.*/
ERR_ADM_WIN_CORE_NO_PLAYOUT_DEVICE = 1323,
- /** 1351: Audio device module: An audio driver abnormality or a
+ /** 1351: Audio device module: An audio driver abnormality or a
* compatibility issue occurs. Solutions:
* - Disable and then re-enable the audio device.
* - Reboot the system.
* - Upgrade your audio card driver. */
ERR_ADM_WIN_WAVE_INIT = 1351,
- /** 1353: Audio device module: An audio driver abnormality occurs.
+ /** 1353: Audio device module: An audio driver abnormality occurs.
* Solutions:
* - Disable and then re-enable the audio device.
* - Reboot the system.
* - Upgrade your audio card driver. */
ERR_ADM_WIN_WAVE_INIT_RECORDING = 1353,
- /** 1354: Audio device module: An audio driver abnormality occurs.
+ /** 1354: Audio device module: An audio driver abnormality occurs.
* Solutions:
* - Disable and then re-enable the audio device.
* - Reboot the system.
* - Upgrade your audio card driver. */
ERR_ADM_WIN_WAVE_INIT_MICROPHONE = 1354,
- /** 1355: Audio device module: An audio driver abnormality occurs.
+ /** 1355: Audio device module: An audio driver abnormality occurs.
* Solutions:
* - Disable and then re-enable the audio device.
* - Reboot the system.
* - Upgrade your audio card driver. */
ERR_ADM_WIN_WAVE_INIT_PLAYOUT = 1355,
- /** 1356: Audio device module: An audio driver abnormality occurs.
+ /** 1356: Audio device module: An audio driver abnormality occurs.
* Solutions:
* - Disable and then re-enable the audio device.
* - Reboot the system.
* - Upgrade your audio card driver. */
ERR_ADM_WIN_WAVE_INIT_SPEAKER = 1356,
- /** 1357: Audio device module: An audio driver abnormality occurs.
+ /** 1357: Audio device module: An audio driver abnormality occurs.
* Solutions:
* - Disable and then re-enable the audio device.
* - Reboot the system.
* - Upgrade your audio card driver. */
ERR_ADM_WIN_WAVE_START_RECORDING = 1357,
- /** 1358: Audio device module: An audio driver abnormality occurs.
+ /** 1358: Audio device module: An audio driver abnormality occurs.
* Solutions:
* - Disable and then re-enable the audio device.
* - Reboot the system.
@@ -689,7 +707,7 @@ enum ERROR_CODE_TYPE
// VDM error code starts from 1500
/** **DEPRECATED** 1502: Video Device Module: The camera in use.
-
+
Deprecated as of v2.4.1. Use LOCAL_VIDEO_STREAM_ERROR_DEVICE_BUSY(3) in the \ref agora::rtc::IRtcEngineEventHandler::onConnectionStateChanged "onConnectionStateChanged" callback instead.
*/
ERR_VDM_WIN_DEVICE_IN_USE = 1502,
diff --git a/Android/APIExample/lib-stream-encrypt/src/main/cpp/include/agora/IAgoraMediaEngine.h b/Android/APIExample/lib-stream-encrypt/src/main/cpp/include/agora/IAgoraMediaEngine.h
index 2ebf3abab..e44fb6c43 100644
--- a/Android/APIExample/lib-stream-encrypt/src/main/cpp/include/agora/IAgoraMediaEngine.h
+++ b/Android/APIExample/lib-stream-encrypt/src/main/cpp/include/agora/IAgoraMediaEngine.h
@@ -44,11 +44,11 @@ class IAudioFrameObserver {
/** The sample rate.
*/
int samplesPerSec; //sampling rate
- /** The data buffer of the audio frame. When the audio frame uses a stereo channel, the data buffer is interleaved.
+ /** The data buffer of the audio frame. When the audio frame uses a stereo channel, the data buffer is interleaved.
The size of the data buffer is as follows: `buffer` = `samples` 脳 `channels` 脳 `bytesPerSample`.
*/
void* buffer; //data buffer
- /** The timestamp of the external audio frame. You can use this parameter for the following purposes:
+ /** The timestamp (ms) of the external audio frame. You can use this parameter for the following purposes:
- Restore the order of the captured audio frame.
- Synchronize audio and video frames in video-related scenarios, including where external video sources are used.
*/
@@ -99,20 +99,20 @@ class IAudioFrameObserver {
virtual bool onPlaybackAudioFrameBeforeMixing(unsigned int uid,
AudioFrame& audioFrame) = 0;
/** Determines whether to receive audio data from multiple channels.
-
+
@since v3.0.1
After you register the audio frame observer, the SDK triggers this callback every time it captures an audio frame.
- In the multi-channel scenario, if you want to get audio data from multiple channels,
- set the return value of this callback as true. After that, the SDK triggers the
- \ref IAudioFrameObserver::onPlaybackAudioFrameBeforeMixingEx "onPlaybackAudioFrameBeforeMixingEx" callback to send you the before-mixing
+ In the multi-channel scenario, if you want to get audio data from multiple channels,
+ set the return value of this callback as true. After that, the SDK triggers the
+ \ref IAudioFrameObserver::onPlaybackAudioFrameBeforeMixingEx "onPlaybackAudioFrameBeforeMixingEx" callback to send you the before-mixing
audio data from various channels. You can also get the channel ID of each audio frame.
-
+
@note
- - Once you set the return value of this callback as true, the SDK triggers
- only the \ref IAudioFrameObserver::onPlaybackAudioFrameBeforeMixingEx "onPlaybackAudioFrameBeforeMixingEx" callback
- to send the before-mixing audio frame. \ref IAudioFrameObserver::onPlaybackAudioFrameBeforeMixing "onPlaybackAudioFrameBeforeMixing" is not triggered.
+ - Once you set the return value of this callback as true, the SDK triggers
+ only the \ref IAudioFrameObserver::onPlaybackAudioFrameBeforeMixingEx "onPlaybackAudioFrameBeforeMixingEx" callback
+ to send the before-mixing audio frame. \ref IAudioFrameObserver::onPlaybackAudioFrameBeforeMixing "onPlaybackAudioFrameBeforeMixing" is not triggered.
In the multi-channel scenario, Agora recommends setting the return value as true.
- If you set the return value of this callback as false, the SDK triggers only the `onPlaybackAudioFrameBeforeMixing` callback to send the audio data.
@return
@@ -120,11 +120,11 @@ class IAudioFrameObserver {
- `false`: Do not receive audio data from multiple channels.
*/
virtual bool isMultipleChannelFrameWanted() { return false; }
-
+
/** Gets the before-mixing playback audio frame from multiple channels.
- After you successfully register the audio frame observer, if you set the return
- value of isMultipleChannelFrameWanted as true, the SDK triggers this callback each
+ After you successfully register the audio frame observer, if you set the return
+ value of \ref IAudioFrameObserver::isMultipleChannelFrameWanted "isMultipleChannelFrameWanted" as true, the SDK triggers this callback each
time it receives a before-mixing audio frame from any of the channel.
@param channelId The channel ID of this audio frame.
@@ -207,7 +207,7 @@ class IVideoFrameObserver {
/** Set the rotation of this frame before rendering the video. Supports 0, 90, 180, 270 degrees clockwise.
*/
int rotation; // rotation of this frame (0, 90, 180, 270)
- /** The timestamp of the external audio frame. It is mandatory. You can use this parameter for the following purposes:
+ /** The timestamp (ms) of the external audio frame. It is mandatory. You can use this parameter for the following purposes:
- Restore the order of the captured audio frame.
- Synchronize audio and video frames in video-related scenarios, including scenarios where external video sources are used.
@note This timestamp is for rendering the video stream, and not for capturing the video stream.
@@ -219,13 +219,14 @@ class IVideoFrameObserver {
public:
/** Occurs each time the SDK receives a video frame captured by the local camera.
*
- * After you successfully register the video frame observer, the SDK triggers this callback each time a video frame is received. In this callback,
+ * After you successfully register the video frame observer, the SDK triggers this callback each time a video frame is received. In this callback,
* you can get the video data captured by the local camera. You can then pre-process the data according to your scenarios.
*
* After pre-processing, you can send the processed video data back to the SDK by setting the `videoFrame` parameter in this callback.
*
* @note
- * This callback does not support sending processed RGBA video data back to the SDK.
+ * - This callback does not support sending processed RGBA video data back to the SDK.
+ * - The video data that this callback gets has not been pre-processed, without the watermark, the cropped content, the rotation, and the image enhancement.
*
* @param videoFrame Pointer to VideoFrame.
* @return Whether or not to ignore the current video frame if the pre-processing fails:
@@ -234,9 +235,9 @@ class IVideoFrameObserver {
*/
virtual bool onCaptureVideoFrame(VideoFrame& videoFrame) = 0;
/** @since v3.0.0
- *
+ *
* Occurs each time the SDK receives a video frame before encoding.
- *
+ *
* After you successfully register the video frame observer, the SDK triggers this callback each time when it receives a video frame. In this callback, you can get the video data before encoding. You can then process the data according to your particular scenarios.
*
* After processing, you can send the processed video data back to the SDK by setting the `VideoFrame` parameter in this callback.
@@ -253,10 +254,10 @@ class IVideoFrameObserver {
*/
virtual bool onPreEncodeVideoFrame(VideoFrame& videoFrame) { return true; }
/** Occurs each time the SDK receives a video frame sent by the remote user.
- *
+ *
* After you successfully register the video frame observer and isMultipleChannelFrameWanted return false, the SDK triggers this callback each time a video frame is received.
* In this callback, you can get the video data sent by the remote user. You can then post-process the data according to your scenarios.
- *
+ *
* After post-processing, you can send the processed data back to the SDK by setting the `videoFrame` parameter in this callback.
*
* @note
@@ -269,11 +270,11 @@ class IVideoFrameObserver {
* - false: Ignore the current video frame, and do not send it back to the SDK.
*/
virtual bool onRenderVideoFrame(unsigned int uid, VideoFrame& videoFrame) = 0;
- /** Occurs each time the SDK receives a video frame and prompts you to set the video format.
+ /** Occurs each time the SDK receives a video frame and prompts you to set the video format.
*
* YUV420 is the default video format. If you want to receive other video formats, register this callback in the IVideoFrameObserver class.
*
- * After you successfully register the video frame observer, the SDK triggers this callback each time it receives a video frame.
+ * After you successfully register the video frame observer, the SDK triggers this callback each time it receives a video frame.
* You need to set your preferred video data in the return value of this callback.
*
* @return Sets the video format: #VIDEO_FRAME_TYPE
@@ -281,13 +282,13 @@ class IVideoFrameObserver {
* - #FRAME_TYPE_RGBA (2): RGBA
*/
virtual VIDEO_FRAME_TYPE getVideoFormatPreference() { return FRAME_TYPE_YUV420; }
- /** Occurs each time the SDK receives a video frame and prompts you whether or not to rotate the captured video according to the rotation member in the VideoFrame class.
+ /** Occurs each time the SDK receives a video frame and prompts you whether or not to rotate the captured video according to the rotation member in the VideoFrame class.
*
* The SDK does not rotate the captured video by default. If you want to rotate the captured video according to the rotation member in the VideoFrame class, register this callback in the IVideoFrameObserver class.
*
* After you successfully register the video frame observer, the SDK triggers this callback each time it receives a video frame. You need to set whether or not to rotate the video frame in the return value of this callback.
*
- * @note
+ * @note
* This callback applies to RGBA video data only.
*
* @return Sets whether or not to rotate the captured video:
@@ -296,13 +297,13 @@ class IVideoFrameObserver {
*/
virtual bool getRotationApplied() { return false; }
/** Occurs each time the SDK receives a video frame and prompts you whether or not to mirror the captured video.
- *
+ *
* The SDK does not mirror the captured video by default. Register this callback in the IVideoFrameObserver class if you want to mirror the captured video.
*
- * After you successfully register the video frame observer, the SDK triggers this callback each time a video frame is received.
+ * After you successfully register the video frame observer, the SDK triggers this callback each time a video frame is received.
* You need to set whether or not to mirror the captured video in the return value of this callback.
- *
- * @note
+ *
+ * @note
* This callback applies to RGBA video data only.
*
* @return Sets whether or not to mirror the captured video:
@@ -311,7 +312,7 @@ class IVideoFrameObserver {
*/
virtual bool getMirrorApplied() { return false; }
/** @since v3.0.0
-
+
Sets whether to output the acquired video frame smoothly.
If you want the video frames acquired from \ref IVideoFrameObserver::onRenderVideoFrame "onRenderVideoFrame" to be more evenly spaced, you can register the `getSmoothRenderingEnabled` callback in the `IVideoFrameObserver` class and set its return value as `true`.
@@ -344,34 +345,34 @@ class IVideoFrameObserver {
*
*/
virtual uint32_t getObservedFramePosition() { return static_cast(POSITION_POST_CAPTURER | POSITION_PRE_RENDERER); }
-
+
/** Determines whether to receive video data from multiple channels.
- After you register the video frame observer, the SDK triggers this callback
+ After you register the video frame observer, the SDK triggers this callback
every time it captures a video frame.
- In the multi-channel scenario, if you want to get video data from multiple channels,
- set the return value of this callback as true. After that, the SDK triggers the
- onRenderVideoFrameEx callback to send you
+ In the multi-channel scenario, if you want to get video data from multiple channels,
+ set the return value of this callback as true. After that, the SDK triggers the
+ \ref IVideoFrameObserver::onRenderVideoFrameEx "onRenderVideoFrameEx" callback to send you
the video data from various channels. You can also get the channel ID of each video frame.
@note
- - Once you set the return value of this callback as true, the SDK triggers only the `onRenderVideoFrameEx` callback to
- send the video frame. onRenderVideoFrame will not be triggered. In the multi-channel scenario, Agora recommends setting the return value as true.
+ - Once you set the return value of this callback as true, the SDK triggers only the `onRenderVideoFrameEx` callback to
+ send the video frame. \ref IVideoFrameObserver::onRenderVideoFrame "onRenderVideoFrame" will not be triggered. In the multi-channel scenario, Agora recommends setting the return value as true.
- If you set the return value of this callback as false, the SDK triggers only the `onRenderVideoFrame` callback to send the video data.
- @return
+ @return
- `true`: Receive video data from multiple channels.
- `false`: Do not receive video data from multiple channels.
*/
virtual bool isMultipleChannelFrameWanted() { return false; }
/** Gets the video frame from multiple channels.
-
- After you successfully register the video frame observer, if you set the return value of
- isMultipleChannelFrameWanted as true, the SDK triggers this callback each time it receives a video frame
+
+ After you successfully register the video frame observer, if you set the return value of
+ \ref IVideoFrameObserver::isMultipleChannelFrameWanted "isMultipleChannelFrameWanted" as true, the SDK triggers this callback each time it receives a video frame
from any of the channel.
- You can process the video data retrieved from this callback according to your scenario, and send the
+ You can process the video data retrieved from this callback according to your scenario, and send the
processed data back to the SDK using the `videoFrame` parameter in this callback.
@note This callback does not support sending RGBA video data back to the SDK.
@@ -456,7 +457,7 @@ class IVideoFrame {
/** Retrieves the height of the frame.
*/
virtual int height() const = 0;
- /** Retrieves the timestamp (90 ms) of the frame.
+ /** Retrieves the timestamp (ms) of the frame.
*/
virtual unsigned int timestamp() const = 0;
/** Retrieves the render time (ms).
@@ -540,6 +541,8 @@ struct ExternalVideoFrame
};
/** The video pixel format.
+ *
+ * @note The SDK does not support the alpha channel, and discards any alpha value passed to the SDK.
*/
enum VIDEO_PIXEL_FORMAT
{
@@ -602,9 +605,17 @@ struct ExternalVideoFrame
/** [Raw data related parameter] The clockwise rotation of the video frame. You can set the rotation angle as 0, 90, 180, or 270. The default value is 0.
*/
int rotation;
- /** Timestamp of the incoming video frame (ms). An incorrect timestamp results in frame loss or unsynchronized audio and video.
+ /** Timestamp (ms) of the incoming video frame. An incorrect timestamp results in frame loss or unsynchronized audio and video.
*/
long long timestamp;
+
+ ExternalVideoFrame()
+ :cropLeft(0)
+ ,cropTop(0)
+ ,cropRight(0)
+ ,cropBottom(0)
+ ,rotation(0)
+ {}
};
class IMediaEngine {
@@ -615,26 +626,32 @@ class IMediaEngine {
This method is used to register an audio frame observer object (register a callback). This method is required to register callbacks when the engine is required to provide an \ref IAudioFrameObserver::onRecordAudioFrame "onRecordAudioFrame" or \ref IAudioFrameObserver::onPlaybackAudioFrame "onPlaybackAudioFrame" callback.
- @param observer Audio frame observer object instance. If NULL is passed in, the registration is canceled.
+ @note Ensure that you call this method before joining a channel.
+
+ @param observer Audio frame observer object instance. See IAudioFrameObserver. Set the value as NULL to release the
+ audio observer object. Agora recommends calling `registerAudioFrameObserver(NULL)` after receiving the \ref agora::rtc::IRtcEngineEventHandler::onLeaveChannel "onLeaveChannel" callback.
+
@return
- 0: Success.
- < 0: Failure.
*/
virtual int registerAudioFrameObserver(IAudioFrameObserver* observer) = 0;
/** Registers a video frame observer object.
-
- You need to implement the IVideoFrameObserver class in this method, and register callbacks according to your scenarios.
-
- After you successfully register the video frame observer, the SDK triggers the registered callbacks each time a video frame is received.
-
- @note When handling the video data returned in the callbacks, pay attention to the changes in the `width` and `height` parameters,
- which may be adapted under the following circumstances:
- - When the network condition deteriorates, the video resolution decreases incrementally.
- - If the user adjusts the video profile, the resolution of the video returned in the callbacks also changes.
- @param observer Video frame observer object instance. If NULL is passed in, the registration is canceled.
- @return
- - 0: Success.
- - < 0: Failure.
+ *
+ * You need to implement the IVideoFrameObserver class in this method, and register callbacks according to your scenarios.
+ *
+ * After you successfully register the video frame observer, the SDK triggers the registered callbacks each time a video frame is received.
+ *
+ * @note
+ * - When handling the video data returned in the callbacks, pay attention to the changes in the `width` and `height` parameters,
+ * which may be adapted under the following circumstances:
+ * - When the network condition deteriorates, the video resolution decreases incrementally.
+ * - If the user adjusts the video profile, the resolution of the video returned in the callbacks also changes.
+ * - Ensure that you call this method before joining a channel.
+ * @param observer Video frame observer object instance. If NULL is passed in, the registration is canceled.
+ * @return
+ * - 0: Success.
+ * - < 0: Failure.
*/
virtual int registerVideoFrameObserver(IVideoFrameObserver* observer) = 0;
/** **DEPRECATED** */
@@ -666,36 +683,35 @@ class IMediaEngine {
*/
virtual int pushAudioFrame(IAudioFrameObserver::AudioFrame* frame) = 0;
/** Pulls the remote audio data.
- *
- * Before calling this method, call the
- * \ref agora::rtc::IRtcEngine::setExternalAudioSink
- * "setExternalAudioSink(enabled: true)" method to enable and set the
+ *
+ * Before calling this method, call the
+ * \ref agora::rtc::IRtcEngine::setExternalAudioSink
+ * "setExternalAudioSink(enabled: true)" method to enable and set the
* external audio sink.
- *
- * After a successful method call, the app pulls the decoded and mixed
+ *
+ * After a successful method call, the app pulls the decoded and mixed
* audio data for playback.
- *
+ *
* @note
- * - Once you call the \ref agora::media::IMediaEngine::pullAudioFrame
- * "pullAudioFrame" method successfully, the app will not retrieve any audio
- * data from the
- * \ref agora::media::IAudioFrameObserver::onPlaybackAudioFrame
+ * - Once you call the \ref agora::media::IMediaEngine::pullAudioFrame
+ * "pullAudioFrame" method successfully, the app will not retrieve any audio
+ * data from the
+ * \ref agora::media::IAudioFrameObserver::onPlaybackAudioFrame
* "onPlaybackAudioFrame" callback.
- * - The difference between the
- * \ref agora::media::IAudioFrameObserver::onPlaybackAudioFrame
- * "onPlaybackAudioFrame" callback and the
- * \ref agora::media::IMediaEngine::pullAudioFrame "pullAudioFrame" method is as
+ * - The difference between the
+ * \ref agora::media::IAudioFrameObserver::onPlaybackAudioFrame
+ * "onPlaybackAudioFrame" callback and the
+ * \ref agora::media::IMediaEngine::pullAudioFrame "pullAudioFrame" method is as
* follows:
- * - `onPlaybackAudioFrame`: The SDK sends the audio data to the app once
- * every 10 ms. Any delay in processing the audio frames may result in audio
- * jitter.
- * - `pullAudioFrame`: The app pulls the remote audio data. After setting the
- * audio data parameters, the SDK adjusts the frame buffer and avoids
+ * - `onPlaybackAudioFrame`: The SDK sends the audio data to the app through this callback.
+ * Any delay in processing the audio frames may result in audio jitter.
+ * - `pullAudioFrame`: The app pulls the remote audio data. After setting the
+ * audio data parameters, the SDK adjusts the frame buffer and avoids
* problems caused by jitter in the external audio playback.
- *
- * @param frame Pointers to the audio frame.
+ *
+ * @param frame Pointers to the audio frame.
* See: \ref IAudioFrameObserver::AudioFrame "AudioFrame".
- *
+ *
* @return
* - 0: Success.
* - < 0: Failure.
@@ -703,6 +719,8 @@ class IMediaEngine {
virtual int pullAudioFrame(IAudioFrameObserver::AudioFrame* frame) = 0;
/** Configures the external video source.
+ @note Ensure that you call this method before joining a channel.
+
@param enable Sets whether to use the external video source:
- true: Use the external video source.
- false: (Default) Do not use the external video source.
@@ -720,7 +738,7 @@ class IMediaEngine {
@param frame Video frame to be pushed. See \ref ExternalVideoFrame "ExternalVideoFrame".
- @note In the Communication profile, this method does not support video frames in the Texture format.
+ @note In the `COMMUNICATION` profile, this method does not support video frames in the Texture format.
@return
- 0: Success.
diff --git a/Android/APIExample/lib-stream-encrypt/src/main/cpp/include/agora/IAgoraRtcChannel.h b/Android/APIExample/lib-stream-encrypt/src/main/cpp/include/agora/IAgoraRtcChannel.h
index bbb0aa12d..f354183dc 100644
--- a/Android/APIExample/lib-stream-encrypt/src/main/cpp/include/agora/IAgoraRtcChannel.h
+++ b/Android/APIExample/lib-stream-encrypt/src/main/cpp/include/agora/IAgoraRtcChannel.h
@@ -16,7 +16,7 @@ struct ChannelMediaOptions {
- true: (Default) Subscribe.
- false: Do not subscribe.
- This member serves a similar function to the \ref agora::rtc::IChannel::muteAllRemoteAudioStreams "muteAllRemoteAudioStreams" method. After joining the channel,
+ This member serves a similar function to the \ref agora::rtc::IChannel::muteAllRemoteAudioStreams "muteAllRemoteAudioStreams" method. After joining the channel,
you can call the `muteAllRemoteAudioStreams` method to set whether to subscribe to audio streams in the channel.
*/
bool autoSubscribeAudio;
@@ -24,7 +24,7 @@ struct ChannelMediaOptions {
- true: (Default) Subscribe.
- false: Do not subscribe.
- This member serves a similar function to the \ref agora::rtc::IChannel::muteAllRemoteVideoStreams "muteAllRemoteVideoStreams" method. After joining the channel,
+ This member serves a similar function to the \ref agora::rtc::IChannel::muteAllRemoteVideoStreams "muteAllRemoteVideoStreams" method. After joining the channel,
you can call the `muteAllRemoteVideoStreams` method to set whether to subscribe to video streams in the channel.
*/
bool autoSubscribeVideo;
@@ -53,7 +53,7 @@ class IChannelEventHandler
(void)msg;
}
/** Reports the error code of `IChannel`.
-
+
@param rtcChannel IChannel
@param err The error code: #ERROR_CODE_TYPE
@param msg The error message.
@@ -78,7 +78,7 @@ class IChannelEventHandler
(void)elapsed;
}
/** Occurs when a user rejoins the channel after being disconnected due to network problems.
-
+
@param rtcChannel IChannel
@param uid The user ID.
@param elapsed Time elapsed (ms) from the local user starting to reconnect until this callback is triggered.
@@ -102,12 +102,12 @@ class IChannelEventHandler
(void)rtcChannel;
(void)stats;
}
- /** Occurs when the user role switches in a live broadcast. For example, from a host to an audience or vice versa.
+ /** Occurs when the user role switches in the live interactive streaming. For example, from a host to an audience or vice versa.
This callback notifies the application of a user role switch when the application calls the \ref IChannel::setClientRole "setClientRole" method.
The SDK triggers this callback when the local user switches the user role by calling the \ref IChannel::setClientRole "setClientRole" method after joining the channel.
-
+
@param rtcChannel IChannel
@param oldRole Role that the user switches from: #CLIENT_ROLE_TYPE.
@param newRole Role that the user switches to: #CLIENT_ROLE_TYPE.
@@ -117,10 +117,10 @@ class IChannelEventHandler
(void)oldRole;
(void)newRole;
}
- /** Occurs when a remote user (Communication)/ host (Live Broadcast) joins the channel.
+ /** Occurs when a remote user (`COMMUNICATION`)/ host (`LIVE_BROADCASTING`) joins the channel.
- - Communication profile: This callback notifies the application that another user joins the channel. If other users are already in the channel, the SDK also reports to the application on the existing users.
- - Live-broadcast profile: This callback notifies the application that the host joins the channel. If other hosts are already in the channel, the SDK also reports to the application on the existing hosts. We recommend limiting the number of hosts to 17.
+ - `COMMUNICATION` profile: This callback notifies the application that another user joins the channel. If other users are already in the channel, the SDK also reports to the application on the existing users.
+ - `LIVE_BROADCASTING` profile: This callback notifies the application that the host joins the channel. If other hosts are already in the channel, the SDK also reports to the application on the existing hosts. We recommend limiting the number of hosts to 17.
The SDK triggers this callback under one of the following circumstances:
- A remote user/host joins the channel by calling the \ref agora::rtc::IChannel::joinChannel "joinChannel" method.
@@ -128,11 +128,11 @@ class IChannelEventHandler
- A remote user/host rejoins the channel after a network interruption.
- The host injects an online media stream into the channel by calling the \ref agora::rtc::IChannel::addInjectStreamUrl "addInjectStreamUrl" method.
- @note In the Live-broadcast profile:
+ @note In the `LIVE_BROADCASTING` profile:
- The host receives this callback when another host joins the channel.
- The audience in the channel receives this callback when a new host joins the channel.
- When a web application joins the channel, the SDK triggers this callback as long as the web application publishes streams.
-
+
@param rtcChannel IChannel
@param uid User ID of the user or host joining the channel.
@param elapsed Time delay (ms) from the local user calling the \ref IChannel::joinChannel "joinChannel" method until the SDK triggers this callback.
@@ -142,7 +142,7 @@ class IChannelEventHandler
(void)uid;
(void)elapsed;
}
- /** Occurs when a remote user (Communication)/host (Live Broadcast) leaves the channel.
+ /** Occurs when a remote user ( `COMMUNICATION`)/host (`LIVE_BROADCASTING`) leaves the channel.
Reasons why the user is offline:
@@ -161,14 +161,14 @@ class IChannelEventHandler
/** Occurs when the SDK cannot reconnect to Agora's edge server 10 seconds after its connection to the server is interrupted.
The SDK triggers this callback when it cannot connect to the server 10 seconds after calling the \ref IChannel::joinChannel "joinChannel" method, whether or not it is in the channel.
-
+
This callback is different from \ref agora::rtc::IRtcEngineEventHandler::onConnectionInterrupted "onConnectionInterrupted":
- The SDK triggers the `onConnectionInterrupted` callback when it loses connection with the server for more than four seconds after it successfully joins the channel.
- The SDK triggers the `onConnectionLost` callback when it loses connection with the server for more than 10 seconds, whether or not it joins the channel.
If the SDK fails to rejoin the channel 20 minutes after being disconnected from Agora's edge server, the SDK stops rejoining the channel.
-
+
@param rtcChannel IChannel
*/
virtual void onConnectionLost(IChannel *rtcChannel) {
@@ -178,8 +178,9 @@ class IChannelEventHandler
After a token is specified by calling the \ref IChannel::joinChannel "joinChannel" method, if the SDK losses connection with the Agora server due to network issues, the token may expire after a certain period of time and a new token may be required to reconnect to the server.
- This callback notifies the app to generate a new token and call `joinChannel` to rejoin the channel with the new token.
-
+ Once you receive this callback, generate a new token on your app server, and call
+ \ref agora::rtc::IChannel::renewToken "renewToken" to pass the new token to the SDK.
+
@param rtcChannel IChannel
*/
virtual void onRequestToken(IChannel *rtcChannel) {
@@ -196,10 +197,10 @@ class IChannelEventHandler
(void)rtcChannel;
(void)token;
}
- /** Reports the statistics of the current call.
-
+ /** Reports the statistics of the current call.
+
The SDK triggers this callback once every two seconds after the user joins the channel.
-
+
@param rtcChannel IChannel
@param stats Statistics of the RtcEngine: RtcStats.
*/
@@ -213,7 +214,7 @@ class IChannelEventHandler
@param rtcChannel IChannel
@param uid User ID. The network quality of the user with this @p uid is reported. If @p uid is 0, the local network quality is reported.
- @param txQuality Uplink transmission quality rating of the user in terms of the transmission bitrate, packet loss rate, average RTT (Round-Trip Time), and jitter of the uplink network. @p txQuality is a quality rating helping you understand how well the current uplink network conditions can support the selected VideoEncoderConfiguration. For example, a 1000 Kbps uplink network may be adequate for video frames with a resolution of 640 * 480 and a frame rate of 15 fps in the Live-broadcast profile, but may be inadequate for resolutions higher than 1280 * 720. See #QUALITY_TYPE.
+ @param txQuality Uplink transmission quality rating of the user in terms of the transmission bitrate, packet loss rate, average RTT (Round-Trip Time), and jitter of the uplink network. @p txQuality is a quality rating helping you understand how well the current uplink network conditions can support the selected VideoEncoderConfiguration. For example, a 1000 Kbps uplink network may be adequate for video frames with a resolution of 640 * 480 and a frame rate of 15 fps in the `LIVE_BROADCASTING` profile, but may be inadequate for resolutions higher than 1280 * 720. See #QUALITY_TYPE.
@param rxQuality Downlink network quality rating of the user in terms of the packet loss rate, average RTT, and jitter of the downlink network. See #QUALITY_TYPE.
*/
virtual void onNetworkQuality(IChannel *rtcChannel, uid_t uid, int txQuality, int rxQuality) {
@@ -250,10 +251,10 @@ class IChannelEventHandler
(void)stats;
}
/** Occurs when the remote audio state changes.
-
+
This callback indicates the state change of the remote audio stream.
- @note This callback does not work properly when the number of users (in the Communication profile) or broadcasters (in the Live-broadcast profile) in the channel exceeds 17.
-
+ @note This callback does not work properly when the number of users (in the `COMMUNICATION` profile) or hosts (in the `LIVE_BROADCASTING` profile) in the channel exceeds 17.
+
@param rtcChannel IChannel
@param uid ID of the remote user whose audio state changes.
@param state State of the remote audio. See #REMOTE_AUDIO_STATE.
@@ -270,16 +271,118 @@ class IChannelEventHandler
(void)reason;
(void)elapsed;
}
- /** Reports which user is the loudest speaker.
- If the user enables the audio volume indication by calling the \ref IRtcEngine::enableAudioVolumeIndication(int, int, bool) "enableAudioVolumeIndication" method, this callback returns the @p uid of the active speaker detected by the audio volume detection module of the SDK.
+ /** Occurs when the audio publishing state changes.
+ *
+ * @since v3.1.0
+ *
+ * This callback indicates the publishing state change of the local audio stream.
+ *
+ * @param rtcChannel IChannel
+ * @param oldState The previous publishing state. For details, see #STREAM_PUBLISH_STATE.
+ * @param newState The current publishing state. For details, see #STREAM_PUBLISH_STATE.
+ * @param elapseSinceLastState The time elapsed (ms) from the previous state to the current state.
+ */
+ virtual void onAudioPublishStateChanged(IChannel *rtcChannel, STREAM_PUBLISH_STATE oldState, STREAM_PUBLISH_STATE newState, int elapseSinceLastState) {
+ (void)rtcChannel;
+ (void)oldState;
+ (void)newState;
+ (void)elapseSinceLastState;
+ }
+
+ /** Occurs when the video publishing state changes.
+ *
+ * @since v3.1.0
+ *
+ * This callback indicates the publishing state change of the local video stream.
+ *
+ * @param rtcChannel IChannel
+ * @param oldState The previous publishing state. For details, see #STREAM_PUBLISH_STATE.
+ * @param newState The current publishing state. For details, see #STREAM_PUBLISH_STATE.
+ * @param elapseSinceLastState The time elapsed (ms) from the previous state to the current state.
+ */
+ virtual void onVideoPublishStateChanged(IChannel *rtcChannel, STREAM_PUBLISH_STATE oldState, STREAM_PUBLISH_STATE newState, int elapseSinceLastState) {
+ (void)rtcChannel;
+ (void)oldState;
+ (void)newState;
+ (void)elapseSinceLastState;
+ }
+
+ /** Occurs when the audio subscribing state changes.
+ *
+ * @since v3.1.0
+ *
+ * This callback indicates the subscribing state change of a remote audio stream.
+ *
+ * @param rtcChannel IChannel
+ * @param uid The ID of the remote user.
+ * @param oldState The previous subscribing state. For details, see #STREAM_SUBSCRIBE_STATE.
+ * @param newState The current subscribing state. For details, see #STREAM_SUBSCRIBE_STATE.
+ * @param elapseSinceLastState The time elapsed (ms) from the previous state to the current state.
+ */
+ virtual void onAudioSubscribeStateChanged(IChannel *rtcChannel, uid_t uid, STREAM_SUBSCRIBE_STATE oldState, STREAM_SUBSCRIBE_STATE newState, int elapseSinceLastState) {
+ (void)rtcChannel;
+ (void)uid;
+ (void)oldState;
+ (void)newState;
+ (void)elapseSinceLastState;
+ }
+
+ /** Occurs when the audio subscribing state changes.
+ *
+ * @since v3.1.0
+ *
+ * This callback indicates the subscribing state change of a remote video stream.
+ *
+ * @param rtcChannel IChannel
+ * @param uid The ID of the remote user.
+ * @param oldState The previous subscribing state. For details, see #STREAM_SUBSCRIBE_STATE.
+ * @param newState The current subscribing state. For details, see #STREAM_SUBSCRIBE_STATE.
+ * @param elapseSinceLastState The time elapsed (ms) from the previous state to the current state.
+ */
+ virtual void onVideoSubscribeStateChanged(IChannel *rtcChannel, uid_t uid, STREAM_SUBSCRIBE_STATE oldState, STREAM_SUBSCRIBE_STATE newState, int elapseSinceLastState) {
+ (void)rtcChannel;
+ (void)uid;
+ (void)oldState;
+ (void)newState;
+ (void)elapseSinceLastState;
+ }
+ /// @cond
+ /** Reports whether the super-resolution algorithm is enabled.
+ *
+ * @since v3.2.0
+ *
+ * After calling \ref IRtcChannel::enableRemoteSuperResolution "enableRemoteSuperResolution", the SDK triggers this
+ * callback to report whether the super-resolution algorithm is successfully enabled. If not successfully enabled,
+ * you can use reason for troubleshooting.
+ *
+ * @param rtcChannel IChannel
+ * @param uid The ID of the remote user.
+ * @param enabled Whether the super-resolution algorithm is successfully enabled:
+ * - true: The super-resolution algorithm is successfully enabled.
+ * - false: The super-resolution algorithm is not successfully enabled.
+ * @param reason The reason why the super-resolution algorithm is not successfully enabled. See #SUPER_RESOLUTION_STATE_REASON.
+ */
+ virtual void onUserSuperResolutionEnabled(IChannel *rtcChannel, uid_t uid, bool enabled, SUPER_RESOLUTION_STATE_REASON reason) {
+ (void)rtcChannel;
+ (void)uid;
+ (void)enabled;
+ (void)reason;
+ }
+ /// @endcond
+
+ /** Occurs when the most active speaker is detected.
+
+ After a successful call of \ref IRtcEngine::enableAudioVolumeIndication(int, int, bool) "enableAudioVolumeIndication",
+ the SDK continuously detects which remote user has the loudest volume. During the current period, the remote user,
+ who is detected as the loudest for the most times, is the most active user.
+
+ When the number of user is no less than two and an active speaker exists, the SDK triggers this callback and reports the `uid` of the most active speaker.
+ - If the most active speaker is always the same user, the SDK triggers this callback only once.
+ - If the most active speaker changes to another user, the SDK triggers this callback again and reports the `uid` of the new active speaker.
- @note
- - To receive this callback, you need to call the \ref IRtcEngine::enableAudioVolumeIndication(int, int, bool) "enableAudioVolumeIndication" method.
- - This callback returns the user ID of the user with the highest voice volume during a period of time, instead of at the moment.
-
@param rtcChannel IChannel
- @param uid User ID of the active speaker. A `uid` of 0 represents the local user.
+ @param uid The user ID of the most active speaker.
*/
virtual void onActiveSpeaker(IChannel *rtcChannel, uid_t uid) {
(void)rtcChannel;
@@ -292,7 +395,7 @@ class IChannelEventHandler
@param width New width (pixels) of the video.
@param height New height (pixels) of the video.
@param rotation New rotation of the video [0 to 360).
- */
+ */
virtual void onVideoSizeChanged(IChannel *rtcChannel, uid_t uid, int width, int height, int rotation) {
(void)rtcChannel;
(void)uid;
@@ -301,9 +404,9 @@ class IChannelEventHandler
(void)rotation;
}
/** Occurs when the remote video state changes.
-
- @note This callback does not work properly when the number of users (in the Communication profile) or broadcasters (in the Live-broadcast profile) in the channel exceeds 17.
-
+
+ @note This callback does not work properly when the number of users (in the `COMMUNICATION` profile) or hosts (in the `LIVE_BROADCASTING` profile) in the channel exceeds 17.
+
@param rtcChannel IChannel
@param uid ID of the remote user whose video state changes.
@param state State of the remote video. See #REMOTE_VIDEO_STATE.
@@ -323,7 +426,7 @@ class IChannelEventHandler
/** Occurs when the local user receives the data stream from the remote user within five seconds.
The SDK triggers this callback when the local user receives the stream message that the remote user sends by calling the \ref agora::rtc::IChannel::sendStreamMessage "sendStreamMessage" method.
-
+
@param rtcChannel IChannel
@param uid User ID of the remote user sending the message.
@param streamId Stream ID.
@@ -340,7 +443,7 @@ class IChannelEventHandler
/** Occurs when the local user does not receive the data stream from the remote user within five seconds.
The SDK triggers this callback when the local user fails to receive the stream message that the remote user sends by calling the \ref agora::rtc::IChannel::sendStreamMessage "sendStreamMessage" method.
-
+
@param rtcChannel IChannel
@param uid User ID of the remote user sending the message.
@param streamId Stream ID.
@@ -379,11 +482,11 @@ class IChannelEventHandler
}
/**
Occurs when the state of the RTMP streaming changes.
-
+
The SDK triggers this callback to report the result of the local user calling the \ref agora::rtc::IChannel::addPublishStreamUrl "addPublishStreamUrl" or \ref agora::rtc::IChannel::removePublishStreamUrl "removePublishStreamUrl" method.
This callback indicates the state of the RTMP streaming. When exceptions occur, you can troubleshoot issues by referring to the detailed error descriptions in the *errCode* parameter.
-
+
@param rtcChannel IChannel
@param url The RTMP URL address.
@param state The RTMP streaming state. See: #RTMP_STREAM_PUBLISH_STATE.
@@ -395,19 +498,34 @@ class IChannelEventHandler
(RTMP_STREAM_PUBLISH_STATE) state;
(RTMP_STREAM_PUBLISH_ERROR) errCode;
}
- /** Occurs when the publisher's transcoding is updated.
-
+
+ /** Reports events during the RTMP streaming.
+ *
+ * @since v3.1.0
+ *
+ * @param rtcChannel IChannel
+ * @param url The RTMP streaming URL.
+ * @param eventCode The event code. See #RTMP_STREAMING_EVENT
+ */
+ virtual void onRtmpStreamingEvent(IChannel *rtcChannel, const char* url, RTMP_STREAMING_EVENT eventCode) {
+ (void) rtcChannel;
+ (void) url;
+ (RTMP_STREAMING_EVENT) eventCode;
+ }
+
+ /** Occurs when the publisher's transcoding is updated.
+
When the `LiveTranscoding` class in the \ref agora::rtc::IChannel::setLiveTranscoding "setLiveTranscoding" method updates, the SDK triggers the `onTranscodingUpdated` callback to report the update information to the local host.
-
+
@note If you call the `setLiveTranscoding` method to set the LiveTranscoding class for the first time, the SDK does not trigger the `onTranscodingUpdated` callback.
-
+
@param rtcChannel IChannel
- */
+ */
virtual void onTranscodingUpdated(IChannel *rtcChannel) {
(void)rtcChannel;
}
- /** Occurs when a voice or video stream URL address is added to a live broadcast.
-
+ /** Occurs when a voice or video stream URL address is added to the live interactive streaming.
+
@param rtcChannel IChannel
@param url The URL address of the externally injected stream.
@param uid User ID.
@@ -462,7 +580,7 @@ class IChannelEventHandler
(void)isFallbackOrRecover;
}
/** Occurs when the connection state between the SDK and the server changes.
-
+
@param rtcChannel IChannel
@param state See #CONNECTION_STATE_TYPE.
@param reason See #CONNECTION_CHANGED_REASON_TYPE.
@@ -483,7 +601,7 @@ class IChannel
virtual ~IChannel() {}
/** Releases all IChannel resources.
- @return
+ @return
- 0: Success.
- < 0: Failure.
- `ERR_NOT_INITIALIZED (7)`: The SDK is not initialized before calling this method.
@@ -495,7 +613,7 @@ class IChannel
@param channelEh The event handler of the `IChannel` object. For details, see IChannelEventHandler.
- @return
+ @return
- 0: Success.
- < 0: Failure.
*/
@@ -511,25 +629,27 @@ class IChannel
| Users can join multiple channels simultaneously by creating multiple `IChannel` objects and calling the `joinChannel` method of each object. | Users can join only one channel. |
| By default, the SDK does not publish any stream after the user joins the channel. You need to call the publish method to do that. | By default, the SDK publishes streams once the user joins the channel. |
- @note
+ @note
- If you are already in a channel, you cannot rejoin it with the same `uid`.
- We recommend using different UIDs for different channels.
- If you want to join the same channel from different devices, ensure that the UIDs in all devices are different.
- - Ensure that the app ID you use to generate the token is the same with the app ID used when creating the `IChannel` object.
+ - Ensure that the app ID you use to generate the token is the same with the app ID used when creating the `IRtcEngine` object.
@param token The token for authentication:
- - In situations not requiring high security: You can use the temporary token generated at Console. For details, see [Get a temporary token](https://docs.agora.io/en/Agora%20Platform/token?platfor%20*%20m=All%20Platforms#get-a-temporary-token).
- - In situations requiring high security: Set it as the token generated at your server. For details, see [Generate a token](https://docs.agora.io/en/Agora%20Platform/token?platfor%20*%20m=All%20Platforms#get-a-token).
+ - In situations not requiring high security: You can use the temporary token generated at Console. For details, see [Get a temporary token](https://docs.agora.io/en/Agora%20Platform/token?platform=All%20Platforms#generate-a-token).
+ - In situations requiring high security: Set it as the token generated at your server. For details, see [Generate a token](https://docs.agora.io/en/Interactive%20Broadcast/token_server?platform=All%20Platforms).
@param info (Optional) Additional information about the channel. This parameter can be set as null. Other users in the channel do not receive this information.
@param uid The user ID. A 32-bit unsigned integer with a value ranging from 1 to (232-1). This parameter must be unique. If `uid` is not assigned (or set as `0`), the SDK assigns a `uid` and reports it in the \ref agora::rtc::IChannelEventHandler::onJoinChannelSuccess "onJoinChannelSuccess" callback. The app must maintain this user ID.
@param options The channel media options: \ref agora::rtc::ChannelMediaOptions::ChannelMediaOptions "ChannelMediaOptions"
- @return
- - 0: Success.
+ @return
+ - 0(ERR_OK): Success.
- < 0: Failure.
- - #ERR_INVALID_ARGUMENT (-2)
- - #ERR_NOT_READY (-3)
- - #ERR_REFUSED (-5)
+ - -2(ERR_INALID_ARGUMENT): The parameter is invalid.
+ - -3(ERR_NOT_READY): The SDK fails to be initialized. You can try re-initializing the SDK.
+ - -5(ERR_REFUSED): The request is rejected. This may be caused by the following:
+ - You have created an IChannel object with the same channel name.
+ - You have joined and published a stream in a channel created by the IChannel object.
*/
virtual int joinChannel(const char* token,
const char* info,
@@ -540,7 +660,7 @@ class IChannel
After the user successfully joins the channel, the SDK triggers the following callbacks:
- The local client: \ref agora::rtc::IRtcEngineEventHandler::onLocalUserRegistered "onLocalUserRegistered" and \ref agora::rtc::IChannelEventHandler::onJoinChannelSuccess "onJoinChannelSuccess" .
- - The remote client: \ref agora::rtc::IChannelEventHandler::onUserJoined "onUserJoined" and \ref agora::rtc::IRtcEngineEventHandler::onUserInfoUpdated "onUserInfoUpdated" , if the user joining the channel is in the Communication profile, or is a BROADCASTER in the Live Broadcast profile.
+ - The remote client: \ref agora::rtc::IChannelEventHandler::onUserJoined "onUserJoined" and \ref agora::rtc::IRtcEngineEventHandler::onUserInfoUpdated "onUserInfoUpdated" , if the user joining the channel is in the `COMMUNICATION` profile, or is a host in the `LIVE_BROADCASTING` profile.
@note To ensure smooth communication, use the same parameter type to identify the user. For example, if a user joins the channel with a user ID, then ensure all the other users use the user ID too. The same applies to the user account.
If a user joins the channel with the Agora Web SDK, ensure that the uid of the user is set to the same parameter type.
@@ -576,23 +696,26 @@ class IChannel
A successful \ref agora::rtc::IChannel::leaveChannel "leaveChannel" method call triggers the following callbacks:
- The local client: \ref agora::rtc::IChannelEventHandler::onLeaveChannel "onLeaveChannel"
- - The remote client: \ref agora::rtc::IChannelEventHandler::onUserOffline "onUserOffline" , if the user leaving the channel is in the Communication channel, or is a BROADCASTER in the Live Broadcast profile.
+ - The remote client: \ref agora::rtc::IChannelEventHandler::onUserOffline "onUserOffline" , if the user leaving the channel is in the Communication channel, or is a host in the `LIVE_BROADCASTING` profile.
@note
- If you call the \ref IChannel::release "release" method immediately after the *leaveChannel* method, the *leaveChannel* process interrupts, and the \ref IChannelEventHandler::onLeaveChannel "onLeaveChannel" callback is not triggered.
- If you call the *leaveChannel* method during a CDN live streaming, the SDK triggers the \ref IChannel::removePublishStreamUrl "removePublishStreamUrl" method.
@return
- - 0: Success.
+ - 0(ERR_OK): Success.
- < 0: Failure.
+ - -1(ERR_FAILED): A general error occurs (no specified reason).
+ - -2(ERR_INALID_ARGUMENT): The parameter is invalid.
+ - -7(ERR_NOT_INITIALIZED): The SDK is not initialized.
*/
virtual int leaveChannel() = 0;
-
+
/** Publishes the local stream to the channel.
You must keep the following restrictions in mind when calling this method. Otherwise, the SDK returns the #ERR_REFUSED (5):
- This method publishes one stream only to the channel corresponding to the current `IChannel` object.
- - In a Live Broadcast channel, only a broadcaster can call this method. To switch the client role, call \ref agora::rtc::IChannel::setClientRole "setClientRole" of the current `IChannel` object.
+ - In the live interactive streaming channel, only a host can call this method. To switch the client role, call \ref agora::rtc::IChannel::setClientRole "setClientRole" of the current `IChannel` object.
- You can publish a stream to only one channel at a time. For details on joining multiple channels, see the advanced guide *Join Multiple Channels*.
@return
@@ -601,7 +724,7 @@ class IChannel
- #ERR_REFUSED (5): The method call is refused.
*/
virtual int publish() = 0;
-
+
/** Stops publishing a stream to the channel.
If you call this method in a channel where you are not publishing streams, the SDK returns #ERR_REFUSED (5).
@@ -612,21 +735,23 @@ class IChannel
- #ERR_REFUSED (5): The method call is refused.
*/
virtual int unpublish() = 0;
-
+
/** Gets the channel ID of the current `IChannel` object.
-
- @return
+
+ @return
- The channel ID of the current `IChannel` object, if the method call succeeds.
- The empty string "", if the method call fails.
*/
virtual const char *channelId() = 0;
/** Retrieves the current call ID.
- When a user joins a channel on a client, a `callId` is generated to identify the call from the client.
+ When a user joins a channel on a client, a `callId` is generated to identify the call from the client.
Feedback methods, such as \ref IRtcEngine::rate "rate" and \ref IRtcEngine::complain "complain", must be called after the call ends to submit feedback to the SDK.
The `rate` and `complain` methods require the `callId` parameter retrieved from the `getCallId` method during a call. `callId` is passed as an argument into the `rate` and `complain` methods after the call ends.
+ @note Ensure that you call this method after joining a channel.
+
@param callId The current call ID.
@return
@@ -644,13 +769,19 @@ class IChannel
The application should call this method to get the new `token`. Failure to do so will result in the SDK disconnecting from the server.
@param token Pointer to the new token.
+
@return
- - 0: Success.
+ - 0(ERR_OK): Success.
- < 0: Failure.
+ - -1(ERR_FAILED): A general error occurs (no specified reason).
+ - -2(ERR_INALID_ARGUMENT): The parameter is invalid.
+ - -7(ERR_NOT_INITIALIZED): The SDK is not initialized.
*/
virtual int renewToken(const char* token) = 0;
/** Enables built-in encryption with an encryption password before users join a channel.
+ @deprecated Deprecated as of v3.1.0. Use the \ref agora::rtc::IChannel::enableEncryption "enableEncryption" instead.
+
All users in a channel must use the same encryption password. The encryption password is automatically cleared once a user leaves the channel.
If an encryption password is not specified, the encryption functionality will be disabled.
@@ -668,6 +799,8 @@ class IChannel
virtual int setEncryptionSecret(const char* secret) = 0;
/** Sets the built-in encryption mode.
+ @deprecated Deprecated as of v3.1.0. Use the \ref agora::rtc::IChannel::enableEncryption "enableEncryption" instead.
+
The Agora SDK supports built-in encryption, which is set to the `aes-128-xts` mode by default. Call this method to use other encryption modes.
All users in the same channel must use the same encryption mode and password.
@@ -687,15 +820,40 @@ class IChannel
- < 0: Failure.
*/
virtual int setEncryptionMode(const char* encryptionMode) = 0;
+ /** Enables/Disables the built-in encryption.
+ *
+ * @since v3.1.0
+ *
+ * In scenarios requiring high security, Agora recommends calling this method to enable the built-in encryption before joining a channel.
+ *
+ * All users in the same channel must use the same encryption mode and encryption key. Once all users leave the channel, the encryption key of this channel is automatically cleared.
+ *
+ * @note
+ * - If you enable the built-in encryption, you cannot use the RTMP streaming function.
+ * - Agora supports four encryption modes. If you choose an encryption mode (excepting `SM4_128_ECB` mode), you need to add an external encryption library when integrating the Android and iOS SDK. See the advanced guide *Channel Encryption*.
+ *
+ * @param enabled Whether to enable the built-in encryption:
+ * - true: Enable the built-in encryption.
+ * - false: Disable the built-in encryption.
+ * @param config Configurations of built-in encryption schemas. See EncryptionConfig.
+ *
+ * @return
+ * - 0: Success.
+ * - < 0: Failure.
+ * - -2(ERR_INVALID_ARGUMENT): An invalid parameter is used. Set the parameter with a valid value.
+ * - -4(ERR_NOT_SUPPORTED): The encryption mode is incorrect or the SDK fails to load the external encryption library. Check the enumeration or reload the external encryption library.
+ * - -7(ERR_NOT_INITIALIZED): The SDK is not initialized. Initialize the `IRtcEngine` instance before calling this method.
+ */
+ virtual int enableEncryption(bool enabled, const EncryptionConfig& config) = 0;
/** Registers a packet observer.
The Agora SDK allows your application to register a packet observer to receive callbacks for voice or video packet transmission.
-
+
@note
- The size of the packet sent to the network after processing should not exceed 1200 bytes, otherwise, the packet may fail to be sent.
- Ensure that both receivers and senders call this method, otherwise, you may meet聽undefined behaviors such as no voice and black screen.
- When you use CDN live streaming, recording or storage functions, Agora doesn't recommend calling this method.
-
+ - Call this method before joining a channel.
@param observer The registered packet observer. See IPacketObserver.
@return
@@ -706,11 +864,11 @@ class IChannel
/** Registers the metadata observer.
Registers the metadata observer. You need to implement the IMetadataObserver class and specify the metadata type in this method. A successful call of this method triggers the \ref agora::rtc::IMetadataObserver::getMaxMetadataSize "getMaxMetadataSize" callback.
- This method enables you to add synchronized metadata in the video stream for more diversified live broadcast interactions, such as sending shopping links, digital coupons, and online quizzes.
+ This method enables you to add synchronized metadata in the video stream for more diversified interactive live streaming, such as sending shopping links, digital coupons, and online quizzes.
@note
- Call this method before the joinChannel method.
- - This method applies to the Live-broadcast channel profile.
+ - This method applies to the `LIVE_BROADCASTING` channel profile.
@param observer The IMetadataObserver class. See the definition of IMetadataObserver for details.
@param type See \ref IMetadataObserver::METADATA_TYPE "METADATA_TYPE". The SDK supports VIDEO_METADATA (0) only for now.
@@ -720,16 +878,16 @@ class IChannel
- < 0: Failure.
*/
virtual int registerMediaMetadataObserver(IMetadataObserver *observer, IMetadataObserver::METADATA_TYPE type) = 0;
- /** Sets the role of the user, such as a host or an audience (default), before joining a channel in a live broadcast.
+ /** Sets the role of the user, such as a host or an audience (default), before joining a channel in the interactive live streaming.
- This method can be used to switch the user role in a live broadcast after the user joins a channel.
+ This method can be used to switch the user role in the interactive live streaming after the user joins a channel.
- In the Live Broadcast profile, when a user switches user roles after joining a channel, a successful \ref agora::rtc::IChannel::setClientRole "setClientRole" method call triggers the following callbacks:
+ In the `LIVE_BROADCASTING` profile, when a user switches user roles after joining a channel, a successful \ref agora::rtc::IChannel::setClientRole "setClientRole" method call triggers the following callbacks:
- The local client: \ref agora::rtc::IChannelEventHandler::onClientRoleChanged "onClientRoleChanged"
- The remote client: \ref agora::rtc::IChannelEventHandler::onUserJoined "onUserJoined" or \ref agora::rtc::IChannelEventHandler::onUserOffline "onUserOffline" (BECOME_AUDIENCE)
@note
- This method applies only to the Live-broadcast profile.
+ This method applies only to the `LIVE_BROADCASTING` profile.
@param role Sets the role of the user. See #CLIENT_ROLE_TYPE.
@return
@@ -737,37 +895,81 @@ class IChannel
- < 0: Failure.
*/
virtual int setClientRole(CLIENT_ROLE_TYPE role) = 0;
+ /// @cond
+ /** Sets the role of a user in a live interactive streaming.
+ *
+ * @since v3.2.0
+ *
+ * You can call this method either before or after joining the channel to set the user role as audience or host. If
+ * you call this method to switch the user role after joining the channel, the SDK triggers the following callbacks:
+ * - The local client: \ref IRtcChannelEventHandler::onClientRoleChanged "onClientRoleChanged".
+ * - The remote client: \ref IRtcChannelEventHandler::onUserJoined "onUserJoined"
+ * or \ref IRtcChannelEventHandler::onUserOffline "onUserOffline".
+ *
+ * @note
+ * - This method applies to the `LIVE_BROADCASTING` profile only (when the `profile` parameter in
+ * \ref IRtcChannel::setChannelProfile "setChannelProfile" is set as `CHANNEL_PROFILE_LIVE_BROADCASTING`).
+ * - The difference between this method and \ref IRtcChannel::setClientRole(CLIENT_ROLE_TYPE) "setClientRole1" is that
+ * this method can set the user level in addition to the user role.
+ * - The user role determines the permissions that the SDK grants to a user, such as permission to send local
+ * streams, receive remote streams, and push streams to a CDN address.
+ * - The user level determines the level of services that a user can enjoy within the permissions of the user's
+ * role. For example, an audience can choose to receive remote streams with low latency or ultra low latency. Levels
+ * affect prices.
+ *
+ * **Example**
+ * ```cpp
+ * ClientRoleOptions options;
+ * options.audienceLatencyLevel = AUDIENCE_LATENCY_LEVEL_ULTRA_LOW_LATENCY;
+ * options.audienceLatencyLevel = AUDIENCE_LATENCY_LEVEL_LOW_LATENCY;
+ * agoraChannel->setClientRole(role, options);
+ * ```
+ *
+ * @param role The role of a user in a live interactive streaming. See #CLIENT_ROLE_TYPE.
+ * @param options The detailed options of a user, including user level. See ClientRoleOptions.
+ *
+ * @return
+ * - 0(ERR_OK): Success.
+ * - < 0: Failure.
+ * - -1(ERR_FAILED): A general error occurs (no specified reason).
+ * - -2(ERR_INALID_ARGUMENT): The parameter is invalid.
+ * - -7(ERR_NOT_INITIALIZED): The SDK is not initialized.
+ */
+ virtual int setClientRole(CLIENT_ROLE_TYPE role, const ClientRoleOptions& options) = 0;
+ /// @endcond
/** Prioritizes a remote user's stream.
-
- Use this method with the \ref IRtcEngine::setRemoteSubscribeFallbackOption "setRemoteSubscribeFallbackOption" method.
- If the fallback function is enabled for a subscribed stream, the SDK ensures the high-priority user gets the best possible stream quality.
-
- @note The Agora SDK supports setting `serPriority` as high for one user only.
-
- @param uid The ID of the remote user.
- @param userPriority Sets the priority of the remote user. See #PRIORITY_TYPE.
-
- @return
- - 0: Success.
- - < 0: Failure.
+ *
+ * The SDK ensures the high-priority user gets the best possible stream quality.
+ *
+ * @note
+ * - The Agora SDK supports setting `serPriority` as high for one user only.
+ * - Ensure that you call this method before joining a channel.
+ *
+ * @param uid The ID of the remote user.
+ * @param userPriority Sets the priority of the remote user. See #PRIORITY_TYPE.
+ *
+ * @return
+ * - 0: Success.
+ * - < 0: Failure.
*/
virtual int setRemoteUserPriority(uid_t uid, PRIORITY_TYPE userPriority) = 0;
/** Sets the sound position and gain of a remote user.
- When the local user calls this method to set the sound position of a remote user, the sound difference between the left and right channels allows the
- local user to track the real-time position of the remote user, creating a real sense of space. This method applies to massively multiplayer online games,
+ When the local user calls this method to set the sound position of a remote user, the sound difference between the left and right channels allows the
+ local user to track the real-time position of the remote user, creating a real sense of space. This method applies to massively multiplayer online games,
such as Battle Royale games.
@note
- For this method to work, enable stereo panning for remote users by calling the \ref agora::rtc::IRtcEngine::enableSoundPositionIndication "enableSoundPositionIndication" method before joining a channel.
- This method requires hardware support. For the best sound positioning, we recommend using a stereo speaker.
+ - Ensure that you call this method after joining a channel.
@param uid The ID of the remote user.
@param pan The sound position of the remote user. The value ranges from -1.0 to 1.0:
- 0.0: the remote sound comes from the front.
- -1.0: the remote sound comes from the left.
- 1.0: the remote sound comes from the right.
- @param gain Gain of the remote user. The value ranges from 0.0 to 100.0. The default value is 100.0 (the original gain of the remote user).
+ @param gain Gain of the remote user. The value ranges from 0.0 to 100.0. The default value is 100.0 (the original gain of the remote user).
The smaller the value, the less the gain.
@return
@@ -777,7 +979,7 @@ class IChannel
virtual int setRemoteVoicePosition(uid_t uid, double pan, double gain) = 0;
/** Updates the display mode of the video view of a remote user.
- After initializing the video view of a remote user, you can call this method to update its rendering and mirror modes.
+ After initializing the video view of a remote user, you can call this method to update its rendering and mirror modes.
This method affects only the video view that the local user sees.
@note
@@ -786,7 +988,7 @@ class IChannel
@param userId The ID of the remote user.
@param renderMode The rendering mode of the remote video view. See #RENDER_MODE_TYPE.
- @param mirrorMode
+ @param mirrorMode
- The mirror mode of the remote video view. See #VIDEO_MIRROR_MODE_TYPE.
- **Note**: The SDK disables the mirror mode by default.
@@ -795,13 +997,13 @@ class IChannel
- < 0: Failure.
*/
virtual int setRemoteRenderMode(uid_t userId, RENDER_MODE_TYPE renderMode, VIDEO_MIRROR_MODE_TYPE mirrorMode) = 0;
- /** Sets whether to receive all remote audio streams by default.
-
+ /** Sets whether to receive all remote audio streams by default.
+
You can call this method either before or after joining a channel. If you call `setDefaultMuteAllRemoteAudioStreams (true)` after joining a channel, the remote audio streams of all subsequent users are not received.
- @note If you want to resume receiving the audio stream, call \ref agora::rtc::IChannel::muteRemoteAudioStream "muteRemoteAudioStream (false)",
- and specify the ID of the remote user whose audio stream you want to receive.
- To receive the audio streams of multiple remote users, call `muteRemoteAudioStream (false)` as many times.
+ @note If you want to resume receiving the audio stream, call \ref agora::rtc::IChannel::muteRemoteAudioStream "muteRemoteAudioStream (false)",
+ and specify the ID of the remote user whose audio stream you want to receive.
+ To receive the audio streams of multiple remote users, call `muteRemoteAudioStream (false)` as many times.
Calling `setDefaultMuteAllRemoteAudioStreams (false)` resumes receiving the audio streams of subsequent users only.
@param mute Sets whether to receive/stop receiving all remote users' audio streams by default:
@@ -813,17 +1015,17 @@ class IChannel
- < 0: Failure.
*/
virtual int setDefaultMuteAllRemoteAudioStreams(bool mute) = 0;
- /** Sets whether to receive all remote video streams by default.
-
- You can call this method either before or after joining a channel. If you
- call `setDefaultMuteAllRemoteVideoStreams (true)` after joining a channel,
+ /** Sets whether to receive all remote video streams by default.
+
+ You can call this method either before or after joining a channel. If you
+ call `setDefaultMuteAllRemoteVideoStreams (true)` after joining a channel,
the remote video streams of all subsequent users are not received.
- @note If you want to resume receiving the video stream, call
- \ref agora::rtc::IChannel::muteRemoteVideoStream "muteRemoteVideoStream (false)",
- and specify the ID of the remote user whose video stream you want to receive.
- To receive the video streams of multiple remote users, call `muteRemoteVideoStream (false)`
- as many times. Calling `setDefaultMuteAllRemoteVideoStreams (false)` resumes
+ @note If you want to resume receiving the video stream, call
+ \ref agora::rtc::IChannel::muteRemoteVideoStream "muteRemoteVideoStream (false)",
+ and specify the ID of the remote user whose video stream you want to receive.
+ To receive the video streams of multiple remote users, call `muteRemoteVideoStream (false)`
+ as many times. Calling `setDefaultMuteAllRemoteVideoStreams (false)` resumes
receiving the video streams of subsequent users only.
@param mute Sets whether to receive/stop receiving all remote users' video streams by default:
@@ -848,13 +1050,13 @@ class IChannel
virtual int muteAllRemoteAudioStreams(bool mute) = 0;
/** Adjust the playback volume of the specified remote user.
- After joining a channel, call \ref agora::rtc::IRtcEngine::adjustPlaybackSignalVolume "adjustPlaybackSignalVolume" to adjust the playback volume of different remote users,
+ After joining a channel, call \ref agora::rtc::IRtcEngine::adjustPlaybackSignalVolume "adjustPlaybackSignalVolume" to adjust the playback volume of different remote users,
or adjust multiple times for one remote user.
-
+
@note
- Call this method after joining a channel.
- This method adjusts the playback volume, which is the mixed volume for the specified remote user.
- - This method can only adjust the playback volume of one specified remote user at a time. If you want to adjust the playback volume of several remote users,
+ - This method can only adjust the playback volume of one specified remote user at a time. If you want to adjust the playback volume of several remote users,
call the method multiple times, once for each remote user.
@param userId The user ID, which should be the same as the `uid` of \ref agora::rtc::IChannel::joinChannel "joinChannel"
@@ -864,13 +1066,16 @@ class IChannel
@return
- 0: Success.
- - < 0: Failure.
+ - < 0: Failure.
*/
virtual int adjustUserPlaybackSignalVolume(uid_t userId, int volume) = 0;
/** Stops/Resumes receiving a specified remote user's audio stream.
- @note If you called the \ref agora::rtc::IChannel::muteAllRemoteAudioStreams "muteAllRemoteAudioStreams" method and set `mute` as `true` to stop
- receiving all remote users' audio streams, call the `muteAllRemoteAudioStreams` method and set `mute` as `false` before calling this method.
+ @note
+ - You can call this method either before or after joining a channel. If you call it before joining a channel,
+ you need to maintain the `uid` of the remote user on your app level.
+ - If you called the \ref agora::rtc::IChannel::muteAllRemoteAudioStreams "muteAllRemoteAudioStreams" method and set `mute` as `true` to stop
+ receiving all remote users' audio streams, call the `muteAllRemoteAudioStreams` method and set `mute` as `false` before calling this method.
The `muteAllRemoteAudioStreams` method sets all remote audio streams, while the `muteRemoteAudioStream` method sets a specified remote audio stream.
@param userId The user ID of the specified remote user sending the audio.
@@ -886,7 +1091,9 @@ class IChannel
virtual int muteRemoteAudioStream(uid_t userId, bool mute) = 0;
/** Stops/Resumes receiving all video stream from a specified remote user.
- @param mute Sets whether to receive/stop receiving all remote users' video streams:
+ @note You can call this method either before or after joining a channel.
+
+ @param mute Sets whether to receive/stop receiving all remote users' video streams:
- true: Stop receiving all remote users' video streams.
- false: (Default) Receive all remote users' video streams.
@@ -897,8 +1104,11 @@ class IChannel
virtual int muteAllRemoteVideoStreams(bool mute) = 0;
/** Stops/Resumes receiving the video stream from a specified remote user.
- @note If you called the \ref agora::rtc::IChannel::muteAllRemoteVideoStreams "muteAllRemoteVideoStreams" method and
- set `mute` as `true` to stop receiving all remote video streams, call the `muteAllRemoteVideoStreams` method and
+ @note
+ - You can call this method either before or after joining a channel. If you call it before joining a channel, you
+ need to maintain the `uid` of the remote user on your app level.
+ - If you called the \ref agora::rtc::IChannel::muteAllRemoteVideoStreams "muteAllRemoteVideoStreams" method and
+ set `mute` as `true` to stop receiving all remote video streams, call the `muteAllRemoteVideoStreams` method and
set `mute` as `false` before calling this method.
@param userId The user ID of the specified remote user.
@@ -913,16 +1123,16 @@ class IChannel
virtual int muteRemoteVideoStream(uid_t userId, bool mute) = 0;
/** Sets the stream type of the remote video.
- Under limited network conditions, if the publisher has not disabled the dual-stream mode using
- \ref agora::rtc::IRtcEngine::enableDualStreamMode "enableDualStreamMode" (false),
- the receiver can choose to receive either the high-quality video stream (the high resolution, and high bitrate video stream) or
+ Under limited network conditions, if the publisher has not disabled the dual-stream mode using
+ \ref agora::rtc::IRtcEngine::enableDualStreamMode "enableDualStreamMode" (false),
+ the receiver can choose to receive either the high-quality video stream (the high resolution, and high bitrate video stream) or
the low-video stream (the low resolution, and low bitrate video stream).
- By default, users receive the high-quality video stream. Call this method if you want to switch to the low-video stream.
- This method allows the app to adjust the corresponding video stream type based on the size of the video window to
+ By default, users receive the high-quality video stream. Call this method if you want to switch to the low-video stream.
+ This method allows the app to adjust the corresponding video stream type based on the size of the video window to
reduce the bandwidth and resources.
- The aspect ratio of the low-video stream is the same as the high-quality video stream. Once the resolution of the high-quality video
+ The aspect ratio of the low-video stream is the same as the high-quality video stream. Once the resolution of the high-quality video
stream is set, the system automatically sets the resolution, frame rate, and bitrate of the low-video stream.
The method result returns in the \ref agora::rtc::IRtcEngineEventHandler::onApiCallExecuted "onApiCallExecuted" callback.
@@ -936,15 +1146,15 @@ class IChannel
virtual int setRemoteVideoStreamType(uid_t userId, REMOTE_VIDEO_STREAM_TYPE streamType) = 0;
/** Sets the default stream type of remote videos.
- Under limited network conditions, if the publisher has not disabled the dual-stream mode using
- \ref agora::rtc::IRtcEngine::enableDualStreamMode "enableDualStreamMode" (false),
- the receiver can choose to receive either the high-quality video stream (the high resolution, and high bitrate video stream) or
+ Under limited network conditions, if the publisher has not disabled the dual-stream mode using
+ \ref agora::rtc::IRtcEngine::enableDualStreamMode "enableDualStreamMode" (false),
+ the receiver can choose to receive either the high-quality video stream (the high resolution, and high bitrate video stream) or
the low-video stream (the low resolution, and low bitrate video stream).
- By default, users receive the high-quality video stream. Call this method if you want to switch to the low-video stream.
- This method allows the app to adjust the corresponding video stream type based on the size of the video window to
+ By default, users receive the high-quality video stream. Call this method if you want to switch to the low-video stream.
+ This method allows the app to adjust the corresponding video stream type based on the size of the video window to
reduce the bandwidth and resources. The aspect ratio of the low-video stream is the same as the high-quality video stream.
- Once the resolution of the high-quality video
+ Once the resolution of the high-quality video
stream is set, the system automatically sets the resolution, frame rate, and bitrate of the low-video stream.
The method result returns in the \ref agora::rtc::IRtcEngineEventHandler::onApiCallExecuted "onApiCallExecuted" callback.
@@ -960,13 +1170,15 @@ class IChannel
Each user can create up to five data streams during the lifecycle of the IChannel.
- @note Set both the `reliable` and `ordered` parameters to `true` or `false`. Do not set one as `true` and the other as `false`.
+ @note
+ - Set both the `reliable` and `ordered` parameters to `true` or `false`. Do not set one as `true` and the other as `false`.
+ - Ensure that you call this method after joining a channel.
- @param streamId The ID of the created data stream.
+ @param[out] streamId The ID of the created data stream.
@param reliable Sets whether or not the recipients are guaranteed to receive the data stream from the sender within five seconds:
- - true: The recipients receive the data stream from the sender within five seconds. If the recipient does not receive the data stream within five seconds,
+ - true: The recipients receive the data stream from the sender within five seconds. If the recipient does not receive the data stream within five seconds,
an error is reported to the application.
- - false: There is no guarantee that the recipients receive the data stream within five seconds and no error message is reported for
+ - false: There is no guarantee that the recipients receive the data stream within five seconds and no error message is reported for
any delay or missing data stream.
@param ordered Sets whether or not the recipients receive the data stream in the sent order:
- true: The recipients receive the data stream in the sent order.
@@ -984,14 +1196,14 @@ class IChannel
- Each client can send up to 6 kB of data per second.
- Each user can have up to five data streams simultaneously.
- A successful \ref agora::rtc::IChannel::sendStreamMessage "sendStreamMessage" method call triggers
+ A successful \ref agora::rtc::IChannel::sendStreamMessage "sendStreamMessage" method call triggers
the \ref agora::rtc::IChannelEventHandler::onStreamMessage "onStreamMessage" callback on the remote client, from which the remote user gets the stream message.
- A failed \ref agora::rtc::IChannel::sendStreamMessage "sendStreamMessage" method call triggers
+ A failed \ref agora::rtc::IChannel::sendStreamMessage "sendStreamMessage" method call triggers
the \ref agora::rtc::IChannelEventHandler::onStreamMessageError "onStreamMessage" callback on the remote client.
- @note
- - This method applies only to the Communication profile or to the hosts in the Live-broadcast profile. If an audience in the Live-broadcast profile calls this method, the audience may be switched to a host.
+ @note
+ - This method applies only to the `COMMUNICATION` profile or to the hosts in the `LIVE_BROADCASTING` profile. If an audience in the `LIVE_BROADCASTING` profile calls this method, the audience may be switched to a host.
- Ensure that you have created the data stream using \ref agora::rtc::IChannel::createDataStream "createDataStream" before calling this method.
@param streamId The ID of the sent data stream, returned in the \ref IChannel::createDataStream "createDataStream" method.
@@ -1004,11 +1216,11 @@ class IChannel
*/
virtual int sendStreamMessage(int streamId, const char* data, size_t length) = 0;
/** Publishes the local stream to a specified CDN live RTMP address. (CDN live only.)
-
+
The SDK returns the result of this method call in the \ref IRtcEngineEventHandler::onStreamPublished "onStreamPublished" callback.
-
- The \ref agora::rtc::IChannel::addPublishStreamUrl "addPublishStreamUrl" method call triggers
- the \ref agora::rtc::IChannelEventHandler::onRtmpStreamingStateChanged "onRtmpStreamingStateChanged" callback on the local client
+
+ The \ref agora::rtc::IChannel::addPublishStreamUrl "addPublishStreamUrl" method call triggers
+ the \ref agora::rtc::IChannelEventHandler::onRtmpStreamingStateChanged "onRtmpStreamingStateChanged" callback on the local client
to report the state of adding a local stream to the CDN.
@note
@@ -1028,15 +1240,14 @@ class IChannel
- #ERR_NOT_INITIALIZED (7): You have not initialized `IChannel` when publishing the stream.
*/
virtual int addPublishStreamUrl(const char *url, bool transcodingEnabled) = 0;
- /** Removes an RTMP stream from the CDN.
+ /** Removes an RTMP stream from the CDN.
This method removes the RTMP URL address (added by the \ref IChannel::addPublishStreamUrl "addPublishStreamUrl" method) from a CDN live stream.
-
The SDK returns the result of this method call in the \ref IRtcEngineEventHandler::onStreamUnpublished "onStreamUnpublished" callback.
- The \ref agora::rtc::IChannel::removePublishStreamUrl "removePublishStreamUrl" method call triggers
+ The \ref agora::rtc::IChannel::removePublishStreamUrl "removePublishStreamUrl" method call triggers
the \ref agora::rtc::IChannelEventHandler::onRtmpStreamingStateChanged "onRtmpStreamingStateChanged" callback on the local client to report the state of removing an RTMP stream from the CDN.
-
+
@note
- This method removes only one RTMP URL address each time it is called.
- The RTMP URL address must not contain special characters, such as Chinese language characters.
@@ -1049,14 +1260,15 @@ class IChannel
*/
virtual int removePublishStreamUrl(const char *url) = 0;
/** Sets the video layout and audio settings for CDN live. (CDN live only.)
-
- The SDK triggers the \ref agora::rtc::IChannelEventHandler::onTranscodingUpdated "onTranscodingUpdated" callback when you
+
+ The SDK triggers the \ref agora::rtc::IChannelEventHandler::onTranscodingUpdated "onTranscodingUpdated" callback when you
call the `setLiveTranscoding` method to update the transcoding setting.
-
+
@note
- Ensure that you enable the RTMP Converter service before using this function. See Prerequisites in the advanced guide *Push Streams to CDN*..
- If you call the `setLiveTranscoding` method to set the transcoding setting for the first time, the SDK does not trigger the `onTranscodingUpdated` callback.
-
+ - Ensure that you call this method after joining a channel.
+
@param transcoding Sets the CDN live audio/video transcoding settings. See LiveTranscoding.
@return
@@ -1064,9 +1276,9 @@ class IChannel
- < 0: Failure.
*/
virtual int setLiveTranscoding(const LiveTranscoding &transcoding) = 0;
- /** Adds a voice or video stream URL address to a live broadcast.
+ /** Adds a voice or video stream URL address to the interactive live streaming.
- The \ref IRtcEngineEventHandler::onStreamPublished "onStreamPublished" callback returns the inject status.
+ The \ref IRtcEngineEventHandler::onStreamPublished "onStreamPublished" callback returns the inject status.
If this method call is successful, the server pulls the voice or video stream and injects it into a live channel.
This is applicable to scenarios where all audience members in the channel can watch a live show and interact with each other.
@@ -1080,10 +1292,11 @@ class IChannel
@note
- Ensure聽that聽you聽enable聽the聽RTMP聽Converter聽service聽before聽using聽this聽function.聽See Prerequisites in the advanced guide *Push Streams to CDN*.
- This method applies to the Native SDK v2.4.1 and later.
- - This method applies to the Live-Broadcast profile only.
+ - This method applies to the `LIVE_BROADCASTING` profile only.
- You can inject only one media stream into the channel at the same time.
+ - Ensure that you call this method after joining a channel.
- @param url The URL address to be added to the ongoing live broadcast. Valid protocols are RTMP, HLS, and HTTP-FLV.
+ @param url The URL address to be added to the ongoing live streaming. Valid protocols are RTMP, HLS, and HTTP-FLV.
- Supported audio codec type: AAC.
- Supported video codec type: H264 (AVC).
@param config The InjectStreamConfig object that contains the configuration of the added voice or video stream.
@@ -1093,13 +1306,13 @@ class IChannel
- < 0: Failure.
- #ERR_INVALID_ARGUMENT (2): The injected URL does not exist. Call this method again to inject the stream and ensure that the URL is valid.
- #ERR_NOT_READY (3): The user is not in the channel.
- - #ERR_NOT_SUPPORTED (4): The channel profile is not live broadcast. Call the \ref IRtcEngine::setChannelProfile "setChannelProfile" method and set the channel profile to live broadcast before calling this method.
+ - #ERR_NOT_SUPPORTED (4): The channel profile is not `LIVE_BROADCASTING`. Call the \ref IRtcEngine::setChannelProfile "setChannelProfile" method and set the channel profile to `LIVE_BROADCASTING` before calling this method.
- #ERR_NOT_INITIALIZED (7): The SDK is not initialized. Ensure that the IChannel object is initialized before calling this method.
*/
virtual int addInjectStreamUrl(const char* url, const InjectStreamConfig& config) = 0;
- /** Removes the voice or video stream URL address from a live broadcast.
+ /** Removes the voice or video stream URL address from a live streaming.
- This method removes the URL address (added by the \ref IChannel::addInjectStreamUrl "addInjectStreamUrl" method) from the live broadcast.
+ This method removes the URL address (added by the \ref IChannel::addInjectStreamUrl "addInjectStreamUrl" method) from the live streaming.
@note If this method is called successfully, the SDK triggers the \ref IChannelEventHandler::onUserOffline "onUserOffline" callback and returns a stream uid of 666.
@@ -1124,7 +1337,7 @@ class IChannel
* #RELAY_STATE_RUNNING (2) and #RELAY_OK (0), and the
* \ref agora::rtc::IChannelEventHandler::onChannelMediaRelayEvent
* "onChannelMediaRelayEvent" callback returns
- * #RELAY_EVENT_PACKET_SENT_TO_DEST_CHANNEL (4), the broadcaster starts
+ * #RELAY_EVENT_PACKET_SENT_TO_DEST_CHANNEL (4), the host starts
* sending data to the destination channel.
* - If the
* \ref agora::rtc::IChannelEventHandler::onChannelMediaRelayStateChanged
@@ -1134,8 +1347,8 @@ class IChannel
*
* @note
* - Call this method after the \ref joinChannel() "joinChannel" method.
- * - This method takes effect only when you are a broadcaster in a
- * Live-broadcast channel.
+ * - This method takes effect only when you are a host in a
+ * `LIVE_BROADCASTING` channel.
* - After a successful method call, if you want to call this method
* again, ensure that you call the
* \ref stopChannelMediaRelay() "stopChannelMediaRelay" method to quit the
@@ -1151,8 +1364,8 @@ class IChannel
* - < 0: Failure.
*/
virtual int startChannelMediaRelay(const ChannelMediaRelayConfiguration &configuration) = 0;
- /** Updates the channels for media stream relay.
- *
+ /** Updates the channels for media stream relay.
+ *
* After a successful
* \ref startChannelMediaRelay() "startChannelMediaRelay" method call, if
* you want to relay the media stream to more channels, or leave the
@@ -1179,13 +1392,13 @@ class IChannel
virtual int updateChannelMediaRelay(const ChannelMediaRelayConfiguration &configuration) = 0;
/** Stops the media stream relay.
*
- * Once the relay stops, the broadcaster quits all the destination
+ * Once the relay stops, the host quits all the destination
* channels.
*
* After a successful method call, the SDK triggers the
* \ref agora::rtc::IChannelEventHandler::onChannelMediaRelayStateChanged
* "onChannelMediaRelayStateChanged" callback. If the callback returns
- * #RELAY_STATE_IDLE (0) and #RELAY_OK (0), the broadcaster successfully
+ * #RELAY_STATE_IDLE (0) and #RELAY_OK (0), the host successfully
* stops the relay.
*
* @note
@@ -1204,41 +1417,100 @@ class IChannel
virtual int stopChannelMediaRelay() = 0;
/** Gets the current connection state of the SDK.
+ @note You can call this method either before or after joining a channel.
+
@return #CONNECTION_STATE_TYPE.
*/
virtual CONNECTION_STATE_TYPE getConnectionState() = 0;
+ /// @cond
+ /** Enables/Disables the super-resolution algorithm for a remote user's video stream.
+ *
+ * @since v3.2.0
+ *
+ * The algorithm effectively improves the resolution of the specified remote user's video stream. When the original
+ * resolution of the remote video stream is a 脳 b pixels, you can receive and render the stream at a higher
+ * resolution (2a 脳 2b pixels) by enabling the algorithm.
+ *
+ * After calling this method, the SDK triggers the
+ * \ref IRtcChannelEventHandler::onUserSuperResolutionEnabled "onUserSuperResolutionEnabled" callback to report
+ * whether you have successfully enabled the super-resolution algorithm.
+ *
+ * @warning The super-resolution algorithm requires extra system resources.
+ * To balance the visual experience and system usage, the SDK poses the following restrictions:
+ * - The algorithm can only be used for a single user at a time.
+ * - On the Android platform, the original resolution of the remote video must not exceed 640 脳 360 pixels.
+ * - On the iOS platform, the original resolution of the remote video must not exceed 640 脳 480 pixels.
+ * If you exceed these limitations, the SDK triggers the \ref IRtcChannelEventHandler::onWarning "onWarning"
+ * callback with the corresponding warning codes:
+ * - #WARN_SUPER_RESOLUTION_STREAM_OVER_LIMITATION (1610): The origin resolution of the remote video is beyond the range where the super-resolution algorithm can be applied.
+ * - #WARN_SUPER_RESOLUTION_USER_COUNT_OVER_LIMITATION (1611): Another user is already using the super-resolution algorithm.
+ * - #WARN_SUPER_RESOLUTION_DEVICE_NOT_SUPPORTED (1612): The device does not support the super-resolution algorithm.
+ *
+ * @note
+ * - This method applies to Android and iOS only.
+ * - Requirements for the user's device:
+ * - Android: The following devices are known to support the method:
+ * - VIVO: V1821A, NEX S, 1914A, 1916A, and 1824BA
+ * - OPPO: PCCM00
+ * - OnePlus: A6000
+ * - Xiaomi: Mi 8, Mi 9, MIX3, and Redmi K20 Pro
+ * - SAMSUNG: SM-G9600, SM-G9650, SM-N9600, SM-G9708, SM-G960U, and SM-G9750
+ * - HUAWEI: SEA-AL00, ELE-AL00, VOG-AL00, YAL-AL10, HMA-AL00, and EVR-AN00
+ * - iOS: This method is supported on devices running iOS 12.0 or later. The following
+ * device models are known to support the method:
+ * - iPhone XR
+ * - iPhone XS
+ * - iPhone XS Max
+ * - iPhone 11
+ * - iPhone 11 Pro
+ * - iPhone 11 Pro Max
+ * - iPad Pro 11-inch (3rd Generation)
+ * - iPad Pro 12.9-inch (3rd Generation)
+ * - iPad Air 3 (3rd Generation)
+ *
+ * @param userId The ID of the remote user.
+ * @param enable Whether to enable the super-resolution algorithm:
+ * - true: Enable the super-resolution algorithm.
+ * - false: Disable the super-resolution algorithm.
+ *
+ * @return
+ * - 0: Success.
+ * - < 0: Failure.
+ */
+ virtual int enableRemoteSuperResolution(uid_t userId, bool enable) = 0;
+ /// @endcond
};
-/** @since v3.0.0
-
+/** @since v3.0.0
+
The IRtcEngine2 class. */
class IRtcEngine2 : public IRtcEngine
{
public:
-
+
/** Creates and gets an `IChannel` object.
- To join more than one channel, call this method multiple times to create as many `IChannel` objects as needed, and
+ To join more than one channel, call this method multiple times to create as many `IChannel` objects as needed, and
call the \ref agora::rtc::IChannel::joinChannel "joinChannel" method of each created `IChannel` object.
-
+
After joining multiple channels, you can simultaneously subscribe to streams of all the channels, but publish a stream in only one channel at one time.
@param channelId The unique channel name for an Agora RTC session. It must be in the string format and not exceed 64 bytes in length. Supported character scopes are:
- - All lowercase English letters: a to z.
- - All uppercase English letters: A to Z.
- - All numeric characters: 0 to 9.
- - The space character.
+ - All lowercase English letters: a to z.
+ - All uppercase English letters: A to Z.
+ - All numeric characters: 0 to 9.
+ - The space character.
- Punctuation characters and other symbols, including: "!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "=", ".", ">", "?", "@", "[", "]", "^", "_", " {", "}", "|", "~", ",".
@note
- This parameter does not have a default value. You must set it.
- Do not set it as the empty string "". Otherwise, the SDK returns #ERR_REFUSED (5).
- @return
+ @return
- The `IChannel` object, if the method call succeeds.
- An empty pointer NULL, if the method call fails.
- `ERR_REFUSED(5)`, if you set channelId as the empty string "".
*/
virtual IChannel* createChannel(const char *channelId) = 0;
-
+
};
diff --git a/Android/APIExample/lib-stream-encrypt/src/main/cpp/include/agora/IAgoraRtcEngine.h b/Android/APIExample/lib-stream-encrypt/src/main/cpp/include/agora/IAgoraRtcEngine.h
index 2118ce62b..ab6b753f4 100644
--- a/Android/APIExample/lib-stream-encrypt/src/main/cpp/include/agora/IAgoraRtcEngine.h
+++ b/Android/APIExample/lib-stream-encrypt/src/main/cpp/include/agora/IAgoraRtcEngine.h
@@ -13,6 +13,10 @@
#include "AgoraBase.h"
#include "IAgoraService.h"
+#if defined(_WIN32)
+#include "IAgoraMediaEngine.h"
+#endif
+
namespace agora {
namespace rtc {
typedef unsigned int uid_t;
@@ -145,16 +149,17 @@ enum MEDIA_ENGINE_EVENT_CODE_TYPE
/** The states of the local user's audio mixing file.
*/
enum AUDIO_MIXING_STATE_TYPE{
- /** 710: The audio mixing file is playing.
- */
+ /** 710: The audio mixing file is playing after the method call of
+ * \ref IRtcEngine::startAudioMixing "startAudioMixing" or \ref IRtcEngine::resumeAudioMixing "resumeAudioMixing" succeeds.
+ */
AUDIO_MIXING_STATE_PLAYING = 710,
- /** 711: The audio mixing file pauses playing.
+ /** 711: The audio mixing file pauses playing after the method call of \ref IRtcEngine::pauseAudioMixing "pauseAudioMixing" succeeds.
*/
AUDIO_MIXING_STATE_PAUSED = 711,
- /** 713: The audio mixing file stops playing.
+ /** 713: The audio mixing file stops playing after the method call of \ref IRtcEngine::stopAudioMixing "stopAudioMixing" succeeds.
*/
AUDIO_MIXING_STATE_STOPPED = 713,
- /** 714: An exception occurs when playing the audio mixing file. See #AUDIO_MIXING_ERROR_TYPE.
+ /** 714: An exception occurs during the playback of the audio mixing file. See the `errorCode` for details.
*/
AUDIO_MIXING_STATE_FAILED = 714,
};
@@ -222,31 +227,50 @@ enum MEDIA_DEVICE_TYPE
*/
enum LOCAL_VIDEO_STREAM_STATE
{
- /** Initial state */
+ /** 0: Initial state */
LOCAL_VIDEO_STREAM_STATE_STOPPED = 0,
- /** The capturer starts successfully. */
+ /** 1: The local video capturing device starts successfully.
+ *
+ * The SDK also reports this state when you share a maximized window by calling \ref IRtcEngine::startScreenCaptureByWindowId "startScreenCaptureByWindowId".
+ */
LOCAL_VIDEO_STREAM_STATE_CAPTURING = 1,
- /** The first video frame is successfully encoded. */
+ /** 2: The first video frame is successfully encoded. */
LOCAL_VIDEO_STREAM_STATE_ENCODING = 2,
- /** The local video fails to start. */
+ /** 3: The local video fails to start. */
LOCAL_VIDEO_STREAM_STATE_FAILED = 3
};
/** Local video state error codes
*/
enum LOCAL_VIDEO_STREAM_ERROR {
- /** The local video is normal. */
+ /** 0: The local video is normal. */
LOCAL_VIDEO_STREAM_ERROR_OK = 0,
- /** No specified reason for the local video failure. */
+ /** 1: No specified reason for the local video failure. */
LOCAL_VIDEO_STREAM_ERROR_FAILURE = 1,
- /** No permission to use the local video capturing device. */
+ /** 2: No permission to use the local video capturing device. */
LOCAL_VIDEO_STREAM_ERROR_DEVICE_NO_PERMISSION = 2,
- /** The local video capturing device is in use. */
+ /** 3: The local video capturing device is in use. */
LOCAL_VIDEO_STREAM_ERROR_DEVICE_BUSY = 3,
- /** The local video capture fails. Check whether the capturing device is working properly. */
+ /** 4: The local video capture fails. Check whether the capturing device is working properly. */
LOCAL_VIDEO_STREAM_ERROR_CAPTURE_FAILURE = 4,
- /** The local video encoding fails. */
- LOCAL_VIDEO_STREAM_ERROR_ENCODE_FAILURE = 5
+ /** 5: The local video encoding fails. */
+ LOCAL_VIDEO_STREAM_ERROR_ENCODE_FAILURE = 5,
+ /** 11: The shared window is minimized when you call \ref IRtcEngine::startScreenCaptureByWindowId "startScreenCaptureByWindowId" to share a window.
+ */
+ LOCAL_VIDEO_STREAM_ERROR_SCREEN_CAPTURE_WINDOW_MINIMIZED = 11,
+ /** 12: The error code indicates that a window shared by the window ID has been closed, or a full-screen window
+ * shared by the window ID has exited full-screen mode.
+ * After exiting full-screen mode, remote users cannot see the shared window. To prevent remote users from seeing a
+ * black screen, Agora recommends that you immediately stop screen sharing.
+ *
+ * Common scenarios for reporting this error code:
+ * - When the local user closes the shared window, the SDK reports this error code.
+ * - The local user shows some slides in full-screen mode first, and then shares the windows of the slides. After
+ * the user exits full-screen mode, the SDK reports this error code.
+ * - The local user watches web video or reads web document in full-screen mode first, and then shares the window of
+ * the web video or document. After the user exits full-screen mode, the SDK reports this error code.
+ */
+ LOCAL_VIDEO_STREAM_ERROR_SCREEN_CAPTURE_WINDOW_CLOSED = 12,
};
/** Local audio state types.
@@ -356,7 +380,7 @@ enum RENDER_MODE_TYPE
/** Video mirror modes. */
enum VIDEO_MIRROR_MODE_TYPE
{
- /** 0: (Default) The SDK enables the mirror mode.
+ /** 0: (Default) The SDK enables the mirror mode.
*/
VIDEO_MIRROR_MODE_AUTO = 0,//determined by SDK
/** 1: Enable mirror mode. */
@@ -397,15 +421,15 @@ enum VIDEO_PROFILE_TYPE
/** 37: 480 * 360, frame rate 30 fps, bitrate 490 Kbps. */
VIDEO_PROFILE_LANDSCAPE_360P_8 = 37,
/** 38: 640 * 360, frame rate 15 fps, bitrate 800 Kbps.
- @note Live broadcast profile only.
+ @note `LIVE_BROADCASTING` profile only.
*/
VIDEO_PROFILE_LANDSCAPE_360P_9 = 38,
/** 39: 640 * 360, frame rate 24 fps, bitrate 800 Kbps.
- @note Live broadcast profile only.
+ @note `LIVE_BROADCASTING` profile only.
*/
VIDEO_PROFILE_LANDSCAPE_360P_10 = 39,
/** 100: 640 * 360, frame rate 24 fps, bitrate 1000 Kbps.
- @note Live broadcast profile only.
+ @note `LIVE_BROADCASTING` profile only.
*/
VIDEO_PROFILE_LANDSCAPE_360P_11 = 100,
/** 40: 640 * 480, frame rate 15 fps, bitrate 500 Kbps. */
@@ -473,15 +497,15 @@ enum VIDEO_PROFILE_TYPE
/** 1037: 360 * 480, frame rate 30 fps, bitrate 490 Kbps. */
VIDEO_PROFILE_PORTRAIT_360P_8 = 1037,
/** 1038: 360 * 640, frame rate 15 fps, bitrate 800 Kbps.
- @note Live broadcast profile only.
+ @note `LIVE_BROADCASTING` profile only.
*/
VIDEO_PROFILE_PORTRAIT_360P_9 = 1038,
/** 1039: 360 * 640, frame rate 24 fps, bitrate 800 Kbps.
- @note Live broadcast profile only.
+ @note `LIVE_BROADCASTING` profile only.
*/
VIDEO_PROFILE_PORTRAIT_360P_10 = 1039,
/** 1100: 360 * 640, frame rate 24 fps, bitrate 1000 Kbps.
- @note Live broadcast profile only.
+ @note `LIVE_BROADCASTING` profile only.
*/
VIDEO_PROFILE_PORTRAIT_360P_11 = 1100,
/** 1040: 480 * 640, frame rate 15 fps, bitrate 500 Kbps. */
@@ -529,10 +553,12 @@ enum VIDEO_PROFILE_TYPE
Sets the sample rate, bitrate, encoding mode, and the number of channels:*/
enum AUDIO_PROFILE_TYPE // sample rate, bit rate, mono/stereo, speech/music codec
{
- /**
- 0: Default audio profile:
- - For the live-broadcast profile: A sample rate of 48 KHz, music encoding, mono, and a bitrate of up to 52 Kbps.
- - For the communication profile: A sample rate of 16 KHz, music encoding, mono, and a bitrate of up to 16 Kbps.
+ /**
+ 0: Default audio profile:
+ - For the interactive streaming profile: A sample rate of 48 KHz, music encoding, mono, and a bitrate of up to 64 Kbps.
+ - For the `COMMUNICATION` profile:
+ - Windows: A sample rate of 16 KHz, music encoding, mono, and a bitrate of up to 16 Kbps.
+ - Android/macOS/iOS: A sample rate of 32 KHz, music encoding, mono, and a bitrate of up to 18 Kbps.
*/
AUDIO_PROFILE_DEFAULT = 0, // use default settings
/**
@@ -540,19 +566,19 @@ enum AUDIO_PROFILE_TYPE // sample rate, bit rate, mono/stereo, speech/music code
*/
AUDIO_PROFILE_SPEECH_STANDARD = 1, // 32Khz, 18Kbps, mono, speech
/**
- 2: A sample rate of 48 KHz, music encoding, mono, and a bitrate of up to 48 Kbps.
+ 2: A sample rate of 48 KHz, music encoding, mono, and a bitrate of up to 64 Kbps.
*/
AUDIO_PROFILE_MUSIC_STANDARD = 2, // 48Khz, 48Kbps, mono, music
/**
- 3: A sample rate of 48 KHz, music encoding, stereo, and a bitrate of up to 56 Kbps.
+ 3: A sample rate of 48 KHz, music encoding, stereo, and a bitrate of up to 80 Kbps.
*/
AUDIO_PROFILE_MUSIC_STANDARD_STEREO = 3, // 48Khz, 56Kbps, stereo, music
/**
- 4: A sample rate of 48 KHz, music encoding, mono, and a bitrate of up to 128 Kbps.
+ 4: A sample rate of 48 KHz, music encoding, mono, and a bitrate of up to 96 Kbps.
*/
AUDIO_PROFILE_MUSIC_HIGH_QUALITY = 4, // 48Khz, 128Kbps, mono, music
/**
- 5: A sample rate of 48 KHz, music encoding, stereo, and a bitrate of up to 192 Kbps.
+ 5: A sample rate of 48 KHz, music encoding, stereo, and a bitrate of up to 128 Kbps.
*/
AUDIO_PROFILE_MUSIC_HIGH_QUALITY_STEREO = 5, // 48Khz, 192Kbps, stereo, music
/**
@@ -566,48 +592,91 @@ enum AUDIO_PROFILE_TYPE // sample rate, bit rate, mono/stereo, speech/music code
*/
enum AUDIO_SCENARIO_TYPE // set a suitable scenario for your app type
{
- /** 0: Default. */
+ /** 0: Default audio scenario. */
AUDIO_SCENARIO_DEFAULT = 0,
- /** 1: Entertainment scenario, supporting voice during gameplay. */
+ /** 1: Entertainment scenario where users need to frequently switch the user role. */
AUDIO_SCENARIO_CHATROOM_ENTERTAINMENT = 1,
- /** 2: Education scenario, prioritizing smoothness and stability. */
+ /** 2: Education scenario where users want smoothness and stability. */
AUDIO_SCENARIO_EDUCATION = 2,
- /** 3: Live gaming scenario, enabling the gaming audio effects in the speaker mode in a live broadcast scenario. Choose this scenario for high-fidelity music playback. */
+ /** 3: High-quality audio chatroom scenario where hosts mainly play music. */
AUDIO_SCENARIO_GAME_STREAMING = 3,
- /** 4: Showroom scenario, optimizing the audio quality with external professional equipment. */
+ /** 4: Showroom scenario where a single host wants high-quality audio. */
AUDIO_SCENARIO_SHOWROOM = 4,
- /** 5: Gaming scenario. */
+ /** 5: Gaming scenario for group chat that only contains the human voice. */
AUDIO_SCENARIO_CHATROOM_GAMING = 5,
- /** 6: Applicable to the IoT scenario. */
+ /** 6: IoT (Internet of Things) scenario where users use IoT devices with low power consumption. */
AUDIO_SCENARIO_IOT = 6,
- AUDIO_SCENARIO_NUM = 7,
+ /** 8: Meeting scenario that mainly contains the human voice.
+ *
+ * @since v3.2.0
+ */
+ AUDIO_SCENARIO_MEETING = 8,
+ /** The number of elements in the enumeration.
+ */
+ AUDIO_SCENARIO_NUM = 9,
};
- /** The channel profile of the IRtcEngine.
+ /** The channel profile.
*/
enum CHANNEL_PROFILE_TYPE
{
- /** (Default) The Communication profile. Use this profile in one-on-one calls or group calls, where all users can talk freely.
+ /** (Default) Communication. This profile applies to scenarios such as an audio call or video call,
+ * where all users can publish and subscribe to streams.
*/
- CHANNEL_PROFILE_COMMUNICATION = 0,
- /** The Live-Broadcast profile. Users in a live-broadcast channel have a role as either broadcaster or audience.
- A broadcaster can both send and receive streams; an audience can only receive streams.
+ CHANNEL_PROFILE_COMMUNICATION = 0,
+ /** Live streaming. In this profile, uses have roles, namely, host and audience (default).
+ * A host both publishes and subscribes to streams, while an audience subscribes to streams only.
+ * This profile applies to scenarios such as a chat room or interactive video streaming.
*/
- CHANNEL_PROFILE_LIVE_BROADCASTING = 1,
- /** 2: The Gaming profile. This profile uses a codec with a lower bitrate and consumes less power. Applies to the gaming scenario, where all game players can talk freely.
+ CHANNEL_PROFILE_LIVE_BROADCASTING = 1,
+ /** 2: Gaming. This profile uses a codec with a lower bitrate and consumes less power. Applies to the gaming scenario, where all game players can talk freely.
+ *
+ * @note Agora does not recommend using this setting.
*/
CHANNEL_PROFILE_GAME = 2,
};
-
-/** Client roles in a live broadcast. */
+/// @cond
+/** The role of a user in a live interactive streaming. */
enum CLIENT_ROLE_TYPE
{
- /** 1: Broadcaster. A broadcaster can both send and receive streams. */
+ /** 1: Host. A host can both send and receive streams. */
CLIENT_ROLE_BROADCASTER = 1,
- /** 2: Audience, the default role. An audience can only receive streams. */
+ /** 2: (Default) Audience. An `audience` member can only receive streams. */
CLIENT_ROLE_AUDIENCE = 2,
};
+/** The latency level of an audience member in a live interactive streaming.
+ *
+ * @note Takes effect only when the user role is `CLIENT_ROLE_BROADCASTER`.
+ */
+enum AUDIENCE_LATENCY_LEVEL_TYPE
+{
+ /** 1: Low latency. */
+ AUDIENCE_LATENCY_LEVEL_LOW_LATENCY = 1,
+ /** 2: (Default) Ultra low latency. */
+ AUDIENCE_LATENCY_LEVEL_ULTRA_LOW_LATENCY = 2,
+};
+/// @cond
+/** The reason why the super-resolution algorithm is not successfully enabled.
+ */
+enum SUPER_RESOLUTION_STATE_REASON
+{
+ /** 0: The super-resolution algorithm is successfully enabled.
+ */
+ SR_STATE_REASON_SUCCESS = 0,
+ /** 1: The origin resolution of the remote video is beyond the range where
+ * the super-resolution algorithm can be applied.
+ */
+ SR_STATE_REASON_STREAM_OVER_LIMITATION = 1,
+ /** 2: Another user is already using the super-resolution algorithm.
+ */
+ SR_STATE_REASON_USER_COUNT_OVER_LIMITATION = 2,
+ /** 3: The device does not support the super-resolution algorithm.
+ */
+ SR_STATE_REASON_DEVICE_NOT_SUPPORTED = 3,
+};
+/// @endcond
+
/** Reasons for a user being offline. */
enum USER_OFFLINE_REASON_TYPE
{
@@ -615,7 +684,7 @@ enum USER_OFFLINE_REASON_TYPE
USER_OFFLINE_QUIT = 0,
/** 1: The SDK times out and the user drops offline because no data packet is received within a certain period of time. If the user quits the call and the message is not passed to the SDK (due to an unreliable channel), the SDK assumes the user dropped offline. */
USER_OFFLINE_DROPPED = 1,
- /** 2: (Live broadcast only.) The client role switched from the host to the audience. */
+ /** 2: (`LIVE_BROADCASTING` only.) The client role switched from the host to the audience. */
USER_OFFLINE_BECOME_AUDIENCE = 2,
};
/**
@@ -672,7 +741,15 @@ enum RTMP_STREAM_PUBLISH_ERROR
RTMP_STREAM_PUBLISH_ERROR_FORMAT_NOT_SUPPORTED = 10,
};
-/** States of importing an external video stream in a live broadcast. */
+/** Events during the RTMP streaming. */
+enum RTMP_STREAMING_EVENT
+{
+ /** An error occurs when you add a background image or a watermark image to the RTMP stream.
+ */
+ RTMP_STREAMING_EVENT_FAILED_LOAD_IMAGE = 1,
+};
+
+/** States of importing an external video stream in the live interactive streaming. */
enum INJECT_STREAM_STATUS
{
/** 0: The external video stream imported successfully. */
@@ -736,7 +813,7 @@ enum VIDEO_CODEC_PROFILE_TYPE
VIDEO_CODEC_PROFILE_BASELINE = 66,
/** 77: Main video codec profile. Generally used in mainstream electronics such as MP4 players, portable video players, PSP, and iPads. */
VIDEO_CODEC_PROFILE_MAIN = 77,
- /** 100: (Default) High video codec profile. Generally used in high-resolution broadcasts or television. */
+ /** 100: (Default) High video codec profile. Generally used in high-resolution live streaming or television. */
VIDEO_CODEC_PROFILE_HIGH = 100,
};
@@ -752,6 +829,13 @@ enum VIDEO_CODEC_TYPE {
VIDEO_CODEC_E264 = 4,
};
+/** Video Codec types for publishing streams. */
+enum VIDEO_CODEC_TYPE_FOR_STREAM
+{
+ VIDEO_CODEC_H264_FOR_STREAM = 1,
+ VIDEO_CODEC_H265_FOR_STREAM = 2,
+};
+
/** Audio equalization band frequencies. */
enum AUDIO_EQUALIZATION_BAND_FREQUENCY
{
@@ -793,6 +877,8 @@ enum AUDIO_REVERB_TYPE
};
/**
+ * @deprecated Deprecated from v3.2.0.
+ *
* Local voice changer options.
*/
enum VOICE_CHANGER_PRESET {
@@ -875,7 +961,10 @@ enum VOICE_CHANGER_PRESET {
};
-/** Local voice reverberation presets. */
+/** @deprecated Deprecated from v3.2.0.
+ *
+ * Local voice reverberation presets.
+ */
enum AUDIO_REVERB_PRESET {
/**
* Turn off local voice reverberation, that is, to use the original voice.
@@ -947,51 +1036,250 @@ enum AUDIO_REVERB_PRESET {
* To achieve better virtual stereo reverberation, Agora recommends setting `profile` in `setAudioProfile`
* as `AUDIO_PROFILE_MUSIC_HIGH_QUALITY_STEREO(5)`.
*/
- AUDIO_VIRTUAL_STEREO = 0x00200001
+ AUDIO_VIRTUAL_STEREO = 0x00200001,
+ /** 1: Electronic Voice.*/
+ AUDIO_ELECTRONIC_VOICE = 0x00300001,
+ /** 1: 3D Voice.*/
+ AUDIO_THREEDIM_VOICE = 0x00400001
+};
+/** The options for SDK preset voice beautifier effects.
+ */
+enum VOICE_BEAUTIFIER_PRESET
+{
+ /** Turn off voice beautifier effects and use the original voice.
+ */
+ VOICE_BEAUTIFIER_OFF = 0x00000000,
+ /** A more magnetic voice.
+ *
+ * @note Agora recommends using this enumerator to process a male-sounding voice; otherwise, you may experience vocal distortion.
+ */
+ CHAT_BEAUTIFIER_MAGNETIC = 0x01010100,
+ /** A fresher voice.
+ *
+ * @note Agora recommends using this enumerator to process a female-sounding voice; otherwise, you may experience vocal distortion.
+ */
+ CHAT_BEAUTIFIER_FRESH = 0x01010200,
+ /** A more vital voice.
+ *
+ * @note Agora recommends using this enumerator to process a female-sounding voice; otherwise, you may experience vocal distortion.
+ */
+ CHAT_BEAUTIFIER_VITALITY = 0x01010300,
+ /** A more vigorous voice.
+ */
+ TIMBRE_TRANSFORMATION_VIGOROUS = 0x01030100,
+ /** A deeper voice.
+ */
+ TIMBRE_TRANSFORMATION_DEEP = 0x01030200,
+ /** A mellower voice.
+ */
+ TIMBRE_TRANSFORMATION_MELLOW = 0x01030300,
+ /** A falsetto voice.
+ */
+ TIMBRE_TRANSFORMATION_FALSETTO = 0x01030400,
+ /** A falsetto voice.
+ */
+ TIMBRE_TRANSFORMATION_FULL = 0x01030500,
+ /** A clearer voice.
+ */
+ TIMBRE_TRANSFORMATION_CLEAR = 0x01030600,
+ /** A more resounding voice.
+ */
+ TIMBRE_TRANSFORMATION_RESOUNDING = 0x01030700,
+ /** A more ringing voice.
+ */
+ TIMBRE_TRANSFORMATION_RINGING = 0x01030800
+};
+/** The options for SDK preset audio effects.
+ */
+enum AUDIO_EFFECT_PRESET
+{
+ /** Turn off audio effects and use the original voice.
+ */
+ AUDIO_EFFECT_OFF = 0x00000000,
+ /** An audio effect typical of a KTV venue.
+ *
+ * @note To achieve better audio effect quality, Agora recommends calling \ref IRtcEngine::setAudioProfile "setAudioProfile"
+ * and setting the `profile` parameter to `AUDIO_PROFILE_MUSIC_HIGH_QUALITY(4)` or `AUDIO_PROFILE_MUSIC_HIGH_QUALITY_STEREO(5)`
+ * before setting this enumerator.
+ */
+ ROOM_ACOUSTICS_KTV = 0x02010100,
+ /** An audio effect typical of a concert hall.
+ *
+ * @note To achieve better audio effect quality, Agora recommends calling \ref IRtcEngine::setAudioProfile "setAudioProfile"
+ * and setting the `profile` parameter to `AUDIO_PROFILE_MUSIC_HIGH_QUALITY(4)` or `AUDIO_PROFILE_MUSIC_HIGH_QUALITY_STEREO(5)`
+ * before setting this enumerator.
+ */
+ ROOM_ACOUSTICS_VOCAL_CONCERT = 0x02010200,
+ /** An audio effect typical of a recording studio.
+ *
+ * @note To achieve better audio effect quality, Agora recommends calling \ref IRtcEngine::setAudioProfile "setAudioProfile"
+ * and setting the `profile` parameter to `AUDIO_PROFILE_MUSIC_HIGH_QUALITY(4)` or `AUDIO_PROFILE_MUSIC_HIGH_QUALITY_STEREO(5)`
+ * before setting this enumerator.
+ */
+ ROOM_ACOUSTICS_STUDIO = 0x02010300,
+ /** An audio effect typical of a vintage phonograph.
+ *
+ * @note To achieve better audio effect quality, Agora recommends calling \ref IRtcEngine::setAudioProfile "setAudioProfile"
+ * and setting the `profile` parameter to `AUDIO_PROFILE_MUSIC_HIGH_QUALITY(4)` or `AUDIO_PROFILE_MUSIC_HIGH_QUALITY_STEREO(5)`
+ * before setting this enumerator.
+ */
+ ROOM_ACOUSTICS_PHONOGRAPH = 0x02010400,
+ /** A virtual stereo effect that renders monophonic audio as stereo audio.
+ *
+ * @note Call \ref IRtcEngine::setAudioProfile "setAudioProfile" and set the `profile` parameter to
+ * `AUDIO_PROFILE_MUSIC_STANDARD_STEREO(3)` or `AUDIO_PROFILE_MUSIC_HIGH_QUALITY_STEREO(5)` before setting this
+ * enumerator; otherwise, the enumerator setting does not take effect.
+ */
+ ROOM_ACOUSTICS_VIRTUAL_STEREO = 0x02010500,
+ /** A more spatial audio effect.
+ *
+ * @note To achieve better audio effect quality, Agora recommends calling \ref IRtcEngine::setAudioProfile "setAudioProfile"
+ * and setting the `profile` parameter to `AUDIO_PROFILE_MUSIC_HIGH_QUALITY(4)` or `AUDIO_PROFILE_MUSIC_HIGH_QUALITY_STEREO(5)`
+ * before setting this enumerator.
+ */
+ ROOM_ACOUSTICS_SPACIAL = 0x02010600,
+ /** A more ethereal audio effect.
+ *
+ * @note To achieve better audio effect quality, Agora recommends calling \ref IRtcEngine::setAudioProfile "setAudioProfile"
+ * and setting the `profile` parameter to `AUDIO_PROFILE_MUSIC_HIGH_QUALITY(4)` or `AUDIO_PROFILE_MUSIC_HIGH_QUALITY_STEREO(5)`
+ * before setting this enumerator.
+ */
+ ROOM_ACOUSTICS_ETHEREAL = 0x02010700,
+ /** A 3D voice effect that makes the voice appear to be moving around the user. The default cycle period of the 3D
+ * voice effect is 10 seconds. To change the cycle period, call \ref IRtcEngine::setAudioEffectParameters "setAudioEffectParameters"
+ * after this method.
+ *
+ * @note
+ * - Call \ref IRtcEngine::setAudioProfile "setAudioProfile" and set the `profile` parameter to `AUDIO_PROFILE_MUSIC_STANDARD_STEREO(3)`
+ * or `AUDIO_PROFILE_MUSIC_HIGH_QUALITY_STEREO(5)` before setting this enumerator; otherwise, the enumerator setting does not take effect.
+ * - If the 3D voice effect is enabled, users need to use stereo audio playback devices to hear the anticipated voice effect.
+ */
+ ROOM_ACOUSTICS_3D_VOICE = 0x02010800,
+ /** The voice of an uncle.
+ *
+ * @note
+ * - Agora recommends using this enumerator to process a male-sounding voice; otherwise, you may not hear the anticipated voice effect.
+ * - To achieve better audio effect quality, Agora recommends calling \ref IRtcEngine::setAudioProfile "setAudioProfile" and
+ * setting the `profile` parameter to `AUDIO_PROFILE_MUSIC_HIGH_QUALITY(4)` or `AUDIO_PROFILE_MUSIC_HIGH_QUALITY_STEREO(5)` before
+ * setting this enumerator.
+ */
+ VOICE_CHANGER_EFFECT_UNCLE = 0x02020100,
+ /** The voice of an old man.
+ *
+ * @note
+ * - Agora recommends using this enumerator to process a male-sounding voice; otherwise, you may not hear the anticipated voice effect.
+ * - To achieve better audio effect quality, Agora recommends calling \ref IRtcEngine::setAudioProfile "setAudioProfile" and setting
+ * the `profile` parameter to `AUDIO_PROFILE_MUSIC_HIGH_QUALITY(4)` or `AUDIO_PROFILE_MUSIC_HIGH_QUALITY_STEREO(5)` before setting
+ * this enumerator.
+ */
+ VOICE_CHANGER_EFFECT_OLDMAN = 0x02020200,
+ /** The voice of a boy.
+ *
+ * @note
+ * - Agora recommends using this enumerator to process a male-sounding voice; otherwise, you may not hear the anticipated voice effect.
+ * - To achieve better audio effect quality, Agora recommends calling \ref IRtcEngine::setAudioProfile "setAudioProfile" and setting
+ * the `profile` parameter to `AUDIO_PROFILE_MUSIC_HIGH_QUALITY(4)` or `AUDIO_PROFILE_MUSIC_HIGH_QUALITY_STEREO(5)` before
+ * setting this enumerator.
+ */
+ VOICE_CHANGER_EFFECT_BOY = 0x02020300,
+ /** The voice of a young woman.
+ *
+ * @note
+ * - Agora recommends using this enumerator to process a female-sounding voice; otherwise, you may not hear the anticipated voice effect.
+ * - To achieve better audio effect quality, Agora recommends calling \ref IRtcEngine::setAudioProfile "setAudioProfile" and setting
+ * the `profile` parameter to `AUDIO_PROFILE_MUSIC_HIGH_QUALITY(4)` or `AUDIO_PROFILE_MUSIC_HIGH_QUALITY_STEREO(5)` before
+ * setting this enumerator.
+ */
+ VOICE_CHANGER_EFFECT_SISTER = 0x02020400,
+ /** The voice of a girl.
+ *
+ * @note
+ * - Agora recommends using this enumerator to process a female-sounding voice; otherwise, you may not hear the anticipated voice effect.
+ * - To achieve better audio effect quality, Agora recommends calling \ref IRtcEngine::setAudioProfile "setAudioProfile" and setting
+ * the `profile` parameter to `AUDIO_PROFILE_MUSIC_HIGH_QUALITY(4)` or `AUDIO_PROFILE_MUSIC_HIGH_QUALITY_STEREO(5)` before
+ * setting this enumerator.
+ */
+ VOICE_CHANGER_EFFECT_GIRL = 0x02020500,
+ /** The voice of Pig King, a character in Journey to the West who has a voice like a growling bear.
+ *
+ * @note To achieve better audio effect quality, Agora recommends calling \ref IRtcEngine::setAudioProfile "setAudioProfile" and
+ * setting the `profile` parameter to `AUDIO_PROFILE_MUSIC_HIGH_QUALITY(4)` or `AUDIO_PROFILE_MUSIC_HIGH_QUALITY_STEREO(5)` before
+ * setting this enumerator.
+ */
+ VOICE_CHANGER_EFFECT_PIGKING = 0x02020600,
+ /** The voice of Hulk.
+ *
+ * @note To achieve better audio effect quality, Agora recommends calling \ref IRtcEngine::setAudioProfile "setAudioProfile" and
+ * setting the `profile` parameter to `AUDIO_PROFILE_MUSIC_HIGH_QUALITY(4)` or `AUDIO_PROFILE_MUSIC_HIGH_QUALITY_STEREO(5)` before
+ * setting this enumerator.
+ */
+ VOICE_CHANGER_EFFECT_HULK = 0x02020700,
+ /** An audio effect typical of R&B music.
+ *
+ * @note To achieve better audio effect quality, Agora recommends calling \ref IRtcEngine::setAudioProfile "setAudioProfile" and
+ * setting the `profile` parameter to `AUDIO_PROFILE_MUSIC_HIGH_QUALITY(4)` or `AUDIO_PROFILE_MUSIC_HIGH_QUALITY_STEREO(5)` before
+ * setting this enumerator.
+ */
+ STYLE_TRANSFORMATION_RNB = 0x02030100,
+ /** An audio effect typical of popular music.
+ *
+ * @note To achieve better audio effect quality, Agora recommends calling \ref IRtcEngine::setAudioProfile "setAudioProfile" and
+ * setting the `profile` parameter to `AUDIO_PROFILE_MUSIC_HIGH_QUALITY(4)` or `AUDIO_PROFILE_MUSIC_HIGH_QUALITY_STEREO(5)` before
+ * setting this enumerator.
+ */
+ STYLE_TRANSFORMATION_POPULAR = 0x02030200,
+ /** A pitch correction effect that corrects the user's pitch based on the pitch of the natural C major scale.
+ * To change the basic mode and tonic pitch, call \ref IRtcEngine::setAudioEffectParameters "setAudioEffectParameters" after this method.
+ *
+ * @note To achieve better audio effect quality, Agora recommends calling \ref IRtcEngine::setAudioProfile "setAudioProfile" and
+ * setting the `profile` parameter to `AUDIO_PROFILE_MUSIC_HIGH_QUALITY(4)` or `AUDIO_PROFILE_MUSIC_HIGH_QUALITY_STEREO(5)` before
+ * setting this enumerator.
+ */
+ PITCH_CORRECTION = 0x02040100
};
/** Audio codec profile types. The default value is LC_ACC. */
enum AUDIO_CODEC_PROFILE_TYPE
{
/** 0: LC-AAC, which is the low-complexity audio codec type. */
- AUDIO_CODEC_PROFILE_LC_AAC = 0,
+ AUDIO_CODEC_PROFILE_LC_AAC = 0,
/** 1: HE-AAC, which is the high-efficiency audio codec type. */
- AUDIO_CODEC_PROFILE_HE_AAC = 1,
+ AUDIO_CODEC_PROFILE_HE_AAC = 1,
};
/** Remote audio states.
*/
enum REMOTE_AUDIO_STATE
{
- /** 0: The remote audio is in the default state, probably due to
- * #REMOTE_AUDIO_REASON_LOCAL_MUTED (3),
- * #REMOTE_AUDIO_REASON_REMOTE_MUTED (5), or
- * #REMOTE_AUDIO_REASON_REMOTE_OFFLINE (7).
- */
- REMOTE_AUDIO_STATE_STOPPED = 0, // Default state, audio is started or remote user disabled/muted audio stream
- /** 1: The first remote audio packet is received.
- */
- REMOTE_AUDIO_STATE_STARTING = 1, // The first audio frame packet has been received
- /** 2: The remote audio stream is decoded and plays normally, probably
- * due to #REMOTE_AUDIO_REASON_NETWORK_RECOVERY (2),
- * #REMOTE_AUDIO_REASON_LOCAL_UNMUTED (4), or
- * #REMOTE_AUDIO_REASON_REMOTE_UNMUTED (6).
- */
- REMOTE_AUDIO_STATE_DECODING = 2, // The first remote audio frame has been decoded or fronzen state ends
- /** 3: The remote audio is frozen, probably due to
- * #REMOTE_AUDIO_REASON_NETWORK_CONGESTION (1).
- */
- REMOTE_AUDIO_STATE_FROZEN = 3, // Remote audio is frozen, probably due to network issue
- /** 4: The remote audio fails to start, probably due to
- * #REMOTE_AUDIO_REASON_INTERNAL (0).
- */
- REMOTE_AUDIO_STATE_FAILED = 4, // Remote audio play failed
+ /** 0: The remote audio is in the default state, probably due to
+ * #REMOTE_AUDIO_REASON_LOCAL_MUTED (3),
+ * #REMOTE_AUDIO_REASON_REMOTE_MUTED (5), or
+ * #REMOTE_AUDIO_REASON_REMOTE_OFFLINE (7).
+ */
+ REMOTE_AUDIO_STATE_STOPPED = 0, // Default state, audio is started or remote user disabled/muted audio stream
+ /** 1: The first remote audio packet is received.
+ */
+ REMOTE_AUDIO_STATE_STARTING = 1, // The first audio frame packet has been received
+ /** 2: The remote audio stream is decoded and plays normally, probably
+ * due to #REMOTE_AUDIO_REASON_NETWORK_RECOVERY (2),
+ * #REMOTE_AUDIO_REASON_LOCAL_UNMUTED (4), or
+ * #REMOTE_AUDIO_REASON_REMOTE_UNMUTED (6).
+ */
+ REMOTE_AUDIO_STATE_DECODING = 2, // The first remote audio frame has been decoded or fronzen state ends
+ /** 3: The remote audio is frozen, probably due to
+ * #REMOTE_AUDIO_REASON_NETWORK_CONGESTION (1).
+ */
+ REMOTE_AUDIO_STATE_FROZEN = 3, // Remote audio is frozen, probably due to network issue
+ /** 4: The remote audio fails to start, probably due to
+ * #REMOTE_AUDIO_REASON_INTERNAL (0).
+ */
+ REMOTE_AUDIO_STATE_FAILED = 4, // Remote audio play failed
};
/** Remote audio state reasons.
*/
enum REMOTE_AUDIO_STATE_REASON
{
- /** 0: Internal reasons.
+ /** 0: The SDK reports this reason when the audio state changes.
*/
REMOTE_AUDIO_REASON_INTERNAL = 0,
/** 1: Network congestion.
@@ -1054,10 +1342,83 @@ enum REMOTE_VIDEO_STATE {
*/
REMOTE_VIDEO_STATE_FAILED = 4
};
+/** The publishing state.
+ */
+enum STREAM_PUBLISH_STATE {
+ /** 0: The initial publishing state after joining the channel.
+ */
+ PUB_STATE_IDLE = 0,
+ /** 1: Fails to publish the local stream. Possible reasons:
+ * - The local user calls \ref IRtcEngine::muteLocalAudioStream "muteLocalAudioStream(true)" or \ref IRtcEngine::muteLocalVideoStream "muteLocalVideoStream(true)" to stop sending local streams.
+ * - The local user calls \ref IRtcEngine::disableAudio "disableAudio" or \ref IRtcEngine::disableVideo "disableVideo" to disable the entire audio or video module.
+ * - The local user calls \ref IRtcEngine::enableLocalAudio "enableLocalAudio(false)" or \ref IRtcEngine::enableLocalVideo "enableLocalVideo(false)" to disable the local audio sampling or video capturing.
+ * - The role of the local user is `AUDIENCE`.
+ */
+ PUB_STATE_NO_PUBLISHED = 1,
+ /** 2: Publishing.
+ */
+ PUB_STATE_PUBLISHING = 2,
+ /** 3: Publishes successfully.
+ */
+ PUB_STATE_PUBLISHED = 3
+};
+/** The subscribing state.
+ */
+enum STREAM_SUBSCRIBE_STATE {
+ /** 0: The initial subscribing state after joining the channel.
+ */
+ SUB_STATE_IDLE = 0,
+ /** 1: Fails to subscribe to the remote stream. Possible reasons:
+ * - The remote user:
+ * - Calls \ref IRtcEngine::muteLocalAudioStream "muteLocalAudioStream(true)" or \ref IRtcEngine::muteLocalVideoStream "muteLocalVideoStream(true)" to stop sending local streams.
+ * - Calls \ref IRtcEngine::disableAudio "disableAudio" or \ref IRtcEngine::disableVideo "disableVideo" to disable the entire audio or video modules.
+ * - Calls \ref IRtcEngine::enableLocalAudio "enableLocalAudio(false)" or \ref IRtcEngine::enableLocalVideo "enableLocalVideo(false)" to disable the local audio sampling or video capturing.
+ * - The role of the remote user is `AUDIENCE`.
+ * - The local user calls the following methods to stop receiving remote streams:
+ * - Calls \ref IRtcEngine::muteRemoteAudioStream "muteRemoteAudioStream(true)", \ref IRtcEngine::muteAllRemoteAudioStreams "muteAllRemoteAudioStreams(true)", or \ref IRtcEngine::setDefaultMuteAllRemoteAudioStreams "setDefaultMuteAllRemoteAudioStreams(true)" to stop receiving remote audio streams.
+ * - Calls \ref IRtcEngine::muteRemoteVideoStream "muteRemoteVideoStream(true)", \ref IRtcEngine::muteAllRemoteVideoStreams "muteAllRemoteVideoStreams(true)", or \ref IRtcEngine::setDefaultMuteAllRemoteVideoStreams "setDefaultMuteAllRemoteVideoStreams(true)" to stop receiving remote video streams.
+ */
+ SUB_STATE_NO_SUBSCRIBED = 1,
+ /** 2: Subscribing.
+ */
+ SUB_STATE_SUBSCRIBING = 2,
+ /** 3: Subscribes to and receives the remote stream successfully.
+ */
+ SUB_STATE_SUBSCRIBED = 3
+};
+
+/** The remote video frozen type. */
+enum XLA_REMOTE_VIDEO_FROZEN_TYPE {
+ /** 0: 500ms video frozen type.
+ */
+ XLA_REMOTE_VIDEO_FROZEN_500MS = 0,
+ /** 1: 200ms video frozen type.
+ */
+ XLA_REMOTE_VIDEO_FROZEN_200MS = 1,
+ /** 2: 600ms video frozen type.
+ */
+ XLA_REMOTE_VIDEO_FROZEN_600MS = 2,
+ /** 3: max video frozen type.
+ */
+ XLA_REMOTE_VIDEO_FROZEN_TYPE_MAX = 3,
+};
-/** The reason of the remote video state change. */
+/** The remote audio frozen type. */
+enum XLA_REMOTE_AUDIO_FROZEN_TYPE {
+ /** 0: 80ms audio frozen.
+ */
+ XLA_REMOTE_AUDIO_FROZEN_80MS = 0,
+ /** 1: 200ms audio frozen.
+ */
+ XLA_REMOTE_AUDIO_FROZEN_200MS = 1,
+ /** 2: max audio frozen type.
+ */
+ XLA_REMOTE_AUDIO_FROZEN_TYPE_MAX = 2,
+};
+
+/** The reason for the remote video state change. */
enum REMOTE_VIDEO_STATE_REASON {
- /** 0: Internal reasons.
+ /** 0: The SDK reports this reason when the video state changes.
*/
REMOTE_VIDEO_STATE_REASON_INTERNAL = 0,
@@ -1089,11 +1450,11 @@ enum REMOTE_VIDEO_STATE_REASON {
*/
REMOTE_VIDEO_STATE_REASON_REMOTE_OFFLINE = 7,
- /** 8: The remote media stream falls back to the audio-only stream due to poor network conditions.
+ /** 8: The remote audio-and-video stream falls back to the audio-only stream due to poor network conditions.
*/
REMOTE_VIDEO_STATE_REASON_AUDIO_FALLBACK = 8,
- /** 9: The remote media stream switches back to the video stream after the network conditions improve.
+ /** 9: The remote audio-only stream switches back to the audio-and-video stream after the network conditions improve.
*/
REMOTE_VIDEO_STATE_REASON_AUDIO_FALLBACK_RECOVERY = 9
@@ -1256,7 +1617,13 @@ enum CONNECTION_CHANGED_REASON_TYPE
CONNECTION_CHANGED_INVALID_TOKEN = 8,
/** 9: The connection failed since token is expired. */
CONNECTION_CHANGED_TOKEN_EXPIRED = 9,
- /** 10: The connection is rejected by server. */
+ /** 10: The connection is rejected by server. This error usually occurs in the following situations:
+ * - When the user is already in the channel, and still calls the method to join the channel, for example,
+ * \ref IRtcEngine::joinChannel "joinChannel".
+ * - When the user tries to join a channel during \ref IRtcEngine::startEchoTest "startEchoTest". Once you
+ * call \ref IRtcEngine::startEchoTest "startEchoTest", you need to call \ref IRtcEngine::stopEchoTest "stopEchoTest" before joining a channel.
+ *
+ */
CONNECTION_CHANGED_REJECTED_BY_SERVER = 10,
/** 11: The connection changed to reconnecting since SDK has set a proxy server. */
CONNECTION_CHANGED_SETTING_PROXY_SERVER = 11,
@@ -1319,16 +1686,16 @@ enum AUDIO_ROUTE_TYPE {
/** Bluetooth headset.
*/
AUDIO_ROUTE_BLUETOOTH = 5,
- /** USB peripheral.
+ /** USB peripheral (macOS only).
*/
AUDIO_ROUTE_USB = 6,
- /** HDMI peripheral.
+ /** HDMI peripheral (macOS only).
*/
AUDIO_ROUTE_HDMI = 7,
- /** DisplayPort peripheral.
+ /** DisplayPort peripheral (macOS only).
*/
AUDIO_ROUTE_DISPLAYPORT = 8,
- /** Apple AirPlay.
+ /** Apple AirPlay (macOS only).
*/
AUDIO_ROUTE_AIRPLAY = 9,
};
@@ -1382,7 +1749,7 @@ struct LastmileProbeResult{
/** Configurations of the last-mile network probe test. */
struct LastmileProbeConfig {
- /** Sets whether or not to test the uplink network. Some users, for example, the audience in a Live-broadcast channel, do not need such a test:
+ /** Sets whether or not to test the uplink network. Some users, for example, the audience in a `LIVE_BROADCASTING` channel, do not need such a test:
- true: test.
- false: do not test. */
bool probeUplink;
@@ -1412,7 +1779,7 @@ struct AudioVolumeInfo
/** Voice activity status of the local user.
* - 0: The local user is not speaking.
* - 1: The local user is speaking.
- *
+ *
* @note
* - The `vad` parameter cannot report the voice activity status of the remote users. In the remote users' callback, `vad` = 0.
* - Ensure that you set `report_vad`(true) in the \ref agora::rtc::IRtcEngine::enableAudioVolumeIndication(int, int, bool) "enableAudioVolumeIndication" method to enable the voice activity detection of the local user.
@@ -1422,24 +1789,35 @@ struct AudioVolumeInfo
*/
const char * channelId;
};
-
+/// @cond
+/** The detailed options of a user.
+ */
+struct ClientRoleOptions
+{
+ /** The latency level of an audience member in a live interactive streaming. See #AUDIENCE_LATENCY_LEVEL_TYPE.
+ */
+ AUDIENCE_LATENCY_LEVEL_TYPE audienceLatencyLevel;
+ ClientRoleOptions()
+ : audienceLatencyLevel(AUDIENCE_LATENCY_LEVEL_ULTRA_LOW_LATENCY) {}
+};
+/// @endcond
/** Statistics of the channel.
*/
struct RtcStats
{
- /**
- Call duration (s), represented by an aggregate value.
- */
+ /**
+ * Call duration of the local user in seconds, represented by an aggregate value.
+ */
unsigned int duration;
/**
- Total number of bytes transmitted, represented by an aggregate value.
+ * Total number of bytes transmitted, represented by an aggregate value.
*/
unsigned int txBytes;
/**
- Total number of bytes received, represented by an aggregate value.
+ * Total number of bytes received, represented by an aggregate value.
*/
unsigned int rxBytes;
- /** Total number of audio bytes sent (bytes), represented
+ /** Total number of audio bytes sent (bytes), represented
* by an aggregate value.
*/
unsigned int txAudioBytes;
@@ -1457,27 +1835,27 @@ struct RtcStats
unsigned int rxVideoBytes;
/**
- Transmission bitrate (Kbps), represented by an instantaneous value.
+ * Transmission bitrate (Kbps), represented by an instantaneous value.
*/
unsigned short txKBitRate;
/**
- Receive bitrate (Kbps), represented by an instantaneous value.
+ * Receive bitrate (Kbps), represented by an instantaneous value.
*/
unsigned short rxKBitRate;
/**
- Audio receive bitrate (Kbps), represented by an instantaneous value.
+ * Audio receive bitrate (Kbps), represented by an instantaneous value.
*/
unsigned short rxAudioKBitRate;
/**
- Audio transmission bitrate (Kbps), represented by an instantaneous value.
+ * Audio transmission bitrate (Kbps), represented by an instantaneous value.
*/
unsigned short txAudioKBitRate;
/**
- Video receive bitrate (Kbps), represented by an instantaneous value.
+ * Video receive bitrate (Kbps), represented by an instantaneous value.
*/
unsigned short rxVideoKBitRate;
/**
- Video transmission bitrate (Kbps), represented by an instantaneous value.
+ * Video transmission bitrate (Kbps), represented by an instantaneous value.
*/
unsigned short txVideoKBitRate;
/** Client-server latency (ms)
@@ -1492,22 +1870,21 @@ struct RtcStats
*/
unsigned short rxPacketLossRate;
/** Number of users in the channel.
-
- - Communication profile: The number of users in the channel.
- - Live broadcast profile:
-
- - If the local user is an audience: The number of users in the channel = The number of hosts in the channel + 1.
- - If the user is a host: The number of users in the channel = The number of hosts in the channel.
+ *
+ * - `COMMUNICATION` profile: The number of users in the channel.
+ * - `LIVE_BROADCASTING` profile:
+ * - If the local user is an audience: The number of users in the channel = The number of hosts in the channel + 1.
+ * - If the user is a host: The number of users in the channel = The number of hosts in the channel.
*/
unsigned int userCount;
/**
- Application CPU usage (%).
+ * Application CPU usage (%).
*/
double cpuAppUsage;
/**
System CPU usage (%).
- In the multi-kernel environment, this member represents the average CPU usage.
+ In the multi-kernel environment, this member represents the average CPU usage.
The value **=** 100 **-** System Idle Progress in Task Manager (%).
*/
double cpuTotalUsage;
@@ -1515,18 +1892,21 @@ struct RtcStats
*/
int gatewayRtt;
/**
- The memory usage ratio of the app (%).
- @note This value is for reference only. Due to system limitations, you may not get the value of this member.
+ * The memory usage ratio of the app (%).
+ *
+ * @note This value is for reference only. Due to system limitations, you may not get the value of this member.
*/
double memoryAppUsageRatio;
/**
- The memory usage ratio of the system (%).
- @note This value is for reference only. Due to system limitations, you may not get the value of this member.
+ * The memory usage ratio of the system (%).
+ *
+ * @note This value is for reference only. Due to system limitations, you may not get the value of this member.
*/
double memoryTotalUsageRatio;
/**
- The memory usage of the app (KB).
- @note This value is for reference only. Due to system limitations, you may not get the value of this member.
+ * The memory usage of the app (KB).
+ *
+ * @note This value is for reference only. Due to system limitations, you may not get the value of this member.
*/
int memoryAppUsageInKbytes;
RtcStats()
@@ -1574,9 +1954,15 @@ enum CHANNEL_MEDIA_RELAY_ERROR {
/** 1: An error occurs in the server response.
*/
RELAY_ERROR_SERVER_ERROR_RESPONSE = 1,
- /** 2: No server response. You can call the
+ /** 2: No server response.
+ *
+ * You can call the
* \ref agora::rtc::IRtcEngine::leaveChannel "leaveChannel" method to
* leave the channel.
+ *
+ * This error can also occur if your project has not enabled co-host token
+ * authentication. Contact support@agora.io to enable the co-host token
+ * authentication service before starting a channel media relay.
*/
RELAY_ERROR_SERVER_NO_RESPONSE = 2,
/** 3: The SDK fails to access the service, probably due to limited
@@ -1655,7 +2041,9 @@ enum CHANNEL_MEDIA_RELAY_EVENT {
/** The state code in CHANNEL_MEDIA_RELAY_STATE. */
enum CHANNEL_MEDIA_RELAY_STATE {
- /** 0: The SDK is initializing.
+ /** 0: The initial state. After you successfully stop the channel media
+ * relay by calling \ref IRtcEngine::stopChannelMediaRelay "stopChannelMediaRelay",
+ * the \ref IRtcEngineEventHandler::onChannelMediaRelayStateChanged "onChannelMediaRelayStateChanged" callback returns this state.
*/
RELAY_STATE_IDLE = 0,
/** 1: The SDK tries to relay the media stream to the destination channel.
@@ -1716,64 +2104,73 @@ struct LocalVideoStats
* - VIDEO_CODEC_H264 = 2: (Default) H.264.
*/
VIDEO_CODEC_TYPE codecType;
+ /** The video packet loss rate (%) from the local client to the Agora edge server before applying the anti-packet loss strategies.
+ */
+ unsigned short txPacketLossRate;
+ /** The capture frame rate (fps) of the local video.
+ */
+ int captureFrameRate;
};
/** Statistics of the remote video stream.
*/
struct RemoteVideoStats
{
- /**
+/**
User ID of the remote user sending the video streams.
*/
- uid_t uid;
- /** **DEPRECATED** Time delay (ms).
- *
- * In scenarios where audio and video is synchronized, you can use the value of
- * `networkTransportDelay` and `jitterBufferDelay` in `RemoteAudioStats` to know the delay statistics of the remote video.
- */
- int delay;
-/**
- Width (pixels) of the video stream.
+uid_t uid;
+/** **DEPRECATED** Time delay (ms).
+ *
+ * In scenarios where audio and video is synchronized, you can use the value of
+ * `networkTransportDelay` and `jitterBufferDelay` in `RemoteAudioStats` to know the delay statistics of the remote video.
*/
- int width;
- /**
+int delay;
+/** Width (pixels) of the video stream.
+ */
+int width;
+/**
Height (pixels) of the video stream.
*/
- int height;
- /**
+int height;
+/**
Bitrate (Kbps) received since the last count.
*/
- int receivedBitrate;
- /** The decoder output frame rate (fps) of the remote video.
- */
- int decoderOutputFrameRate;
- /** The render output frame rate (fps) of the remote video.
- */
- int rendererOutputFrameRate;
- /** Packet loss rate (%) of the remote video stream after using the anti-packet-loss method.
- */
- int packetLossRate;
- /** The type of the remote video stream: #REMOTE_VIDEO_STREAM_TYPE
- */
- REMOTE_VIDEO_STREAM_TYPE rxStreamType;
- /**
- The total freeze time (ms) of the remote video stream after the remote user joins the channel.
- In a video session where the frame rate is set to no less than 5 fps, video freeze occurs when
- the time interval between two adjacent renderable video frames is more than 500 ms.
- */
- int totalFrozenTime;
- /**
- The total video freeze time as a percentage (%) of the total time when the video is available.
- */
- int frozenRate;
- /**
- The total time (ms) when the remote user in the Communication profile or the remote
- broadcaster in the Live-broadcast profile neither stops sending the video stream nor
- disables the video module after joining the channel.
+int receivedBitrate;
+/** The decoder output frame rate (fps) of the remote video.
+ */
+int decoderOutputFrameRate;
+/** The render output frame rate (fps) of the remote video.
+ */
+int rendererOutputFrameRate;
+/** Packet loss rate (%) of the remote video stream after using the anti-packet-loss method.
+ */
+int packetLossRate;
+/** The type of the remote video stream: #REMOTE_VIDEO_STREAM_TYPE
+ */
+REMOTE_VIDEO_STREAM_TYPE rxStreamType;
+/**
+ The total freeze time (ms) of the remote video stream after the remote user joins the channel.
+ In a video session where the frame rate is set to no less than 5 fps, video freeze occurs when
+ the time interval between two adjacent renderable video frames is more than 500 ms.
+ */
+int totalFrozenTime;
+/**
+ The total video freeze time as a percentage (%) of the total time when the video is available.
+ */
+int frozenRate;
+/**
+ The total time (ms) when the remote user in the Communication profile or the remote
+ broadcaster in the Live-broadcast profile neither stops sending the video stream nor
+ disables the video module after joining the channel.
- @since v3.0.1
- */
- int totalActiveTime;
+ @since v3.0.1
+*/
+int totalActiveTime;
+/**
+ * The total publish duration (ms) of the remote video stream.
+ */
+int publishDuration;
};
/** Audio statistics of the local user */
@@ -1788,6 +2185,9 @@ struct LocalAudioStats
/** The average sending bitrate (Kbps).
*/
int sentBitrate;
+ /** The audio packet loss rate (%) from the local client to the Agora edge server before applying the anti-packet loss strategies.
+ */
+ unsigned short txPacketLossRate;
};
/** Audio statistics of a remote user */
@@ -1824,10 +2224,14 @@ struct RemoteAudioStats
int totalFrozenTime;
/** The total audio freeze time as a percentage (%) of the total time when the audio is available. */
int frozenRate;
- /** The total time (ms) when the remote user in the Communication profile or the remote broadcaster in
- the Live-broadcast profile neither stops sending the audio stream nor disables the audio module after joining the channel.
+ /** The total time (ms) when the remote user in the `COMMUNICATION` profile or the remote host in
+ the `LIVE_BROADCASTING` profile neither stops sending the audio stream nor disables the audio module after joining the channel.
*/
int totalActiveTime;
+ /**
+ * The total publish duration (ms) of the remote audio stream.
+ */
+ int publishDuration;
};
/**
@@ -1848,17 +2252,17 @@ struct VideoDimensions {
/** (Recommended) The standard bitrate set in the \ref IRtcEngine::setVideoEncoderConfiguration "setVideoEncoderConfiguration" method.
- In this mode, the bitrates differ between the live broadcast and communication profiles:
+ In this mode, the bitrates differ between the live interactive streaming and communication profiles:
- - Communication profile: The video bitrate is the same as the base bitrate.
- - Live broadcast profile: The video bitrate is twice the base bitrate.
+ - `COMMUNICATION` profile: The video bitrate is the same as the base bitrate.
+ - `LIVE_BROADCASTING` profile: The video bitrate is twice the base bitrate.
*/
const int STANDARD_BITRATE = 0;
/** The compatible bitrate set in the \ref IRtcEngine::setVideoEncoderConfiguration "setVideoEncoderConfiguration" method.
- The bitrate remains the same regardless of the channel profile. If you choose this mode in the Live-broadcast profile, the video frame rate may be lower than the set value.
+ The bitrate remains the same regardless of the channel profile. If you choose this mode in the `LIVE_BROADCASTING` profile, the video frame rate may be lower than the set value.
*/
const int COMPATIBLE_BITRATE = -1;
@@ -1885,16 +2289,16 @@ struct VideoEncoderConfiguration {
Choose one of the following options:
- #STANDARD_BITRATE: (Recommended) The standard bitrate.
- - The Communication profile: the encoding bitrate equals the base bitrate.
- - The Live-broadcast profile: the encoding bitrate is twice the base bitrate.
+ - the `COMMUNICATION` profile: the encoding bitrate equals the base bitrate.
+ - the `LIVE_BROADCASTING` profile: the encoding bitrate is twice the base bitrate.
- #COMPATIBLE_BITRATE: The compatible bitrate: the bitrate stays the same regardless of the profile.
- The Communication profile prioritizes smoothness, while the Live-broadcast profile prioritizes video quality (requiring a higher bitrate). We recommend setting the bitrate mode as #STANDARD_BITRATE to address this difference.
+ the `COMMUNICATION` profile prioritizes smoothness, while the `LIVE_BROADCASTING` profile prioritizes video quality (requiring a higher bitrate). We recommend setting the bitrate mode as #STANDARD_BITRATE to address this difference.
- The following table lists the recommended video encoder configurations, where the base bitrate applies to the Communication profile. Set your bitrate based on this table. If you set a bitrate beyond the proper range, the SDK automatically sets it to within the range.
+ The following table lists the recommended video encoder configurations, where the base bitrate applies to the `COMMUNICATION` profile. Set your bitrate based on this table. If you set a bitrate beyond the proper range, the SDK automatically sets it to within the range.
@note
- In the following table, **Base Bitrate** applies to the Communication profile, and **Live Bitrate** applies to the Live-broadcast profile.
+ In the following table, **Base Bitrate** applies to the `COMMUNICATION` profile, and **Live Bitrate** applies to the `LIVE_BROADCASTING` profile.
| Resolution | Frame Rate (fps) | Base Bitrate (Kbps) | Live Bitrate (Kbps) |
|------------------------|------------------|----------------------------------------|----------------------------------------|
@@ -1937,7 +2341,7 @@ struct VideoEncoderConfiguration {
The SDK automatically adjusts the encoding bitrate to adapt to the network conditions. Using a value greater than the default value forces the video encoder to output high-quality images but may cause more packet loss and hence sacrifice the smoothness of the video transmission. That said, unless you have special requirements for image quality, Agora does not recommend changing this value.
- @note This parameter applies only to the Live-broadcast profile.
+ @note This parameter applies only to the `LIVE_BROADCASTING` profile.
*/
int minBitrate;
/** The video orientation mode of the video: #ORIENTATION_MODE.
@@ -2017,12 +2421,12 @@ typedef struct TranscodingUser {
double alpha;
/** The audio channel of the sound. The default value is 0:
- - 0: (Default) Supports dual channels at most, depending on the upstream of the broadcaster.
- - 1: The audio stream of the broadcaster uses the FL audio channel. If the upstream of the broadcaster uses multiple audio channels, these channels are mixed into mono first.
- - 2: The audio stream of the broadcaster uses the FC audio channel. If the upstream of the broadcaster uses multiple audio channels, these channels are mixed into mono first.
- - 3: The audio stream of the broadcaster uses the FR audio channel. If the upstream of the broadcaster uses multiple audio channels, these channels are mixed into mono first.
- - 4: The audio stream of the broadcaster uses the BL audio channel. If the upstream of the broadcaster uses multiple audio channels, these channels are mixed into mono first.
- - 5: The audio stream of the broadcaster uses the BR audio channel. If the upstream of the broadcaster uses multiple audio channels, these channels are mixed into mono first.
+ - 0: (Default) Supports dual channels at most, depending on the upstream of the host.
+ - 1: The audio stream of the host uses the FL audio channel. If the upstream of the host uses multiple audio channels, these channels are mixed into mono first.
+ - 2: The audio stream of the host uses the FC audio channel. If the upstream of the host uses multiple audio channels, these channels are mixed into mono first.
+ - 3: The audio stream of the host uses the FR audio channel. If the upstream of the host uses multiple audio channels, these channels are mixed into mono first.
+ - 4: The audio stream of the host uses the BL audio channel. If the upstream of the host uses multiple audio channels, these channels are mixed into mono first.
+ - 5: The audio stream of the host uses the BR audio channel. If the upstream of the host uses multiple audio channels, these channels are mixed into mono first.
@note If your setting is not 0, you may need a specialized player.
*/
@@ -2052,18 +2456,40 @@ typedef struct RtcImage {
width(0),
height(0)
{}
- /** HTTP/HTTPS URL address of the image on the broadcasting video. The maximum length of this parameter is 1024 bytes. */
+ /** HTTP/HTTPS URL address of the image on the live video. The maximum length of this parameter is 1024 bytes. */
const char* url;
- /** Horizontal position of the image from the upper left of the broadcasting video. */
+ /** Horizontal position of the image from the upper left of the live video. */
int x;
- /** Vertical position of the image from the upper left of the broadcasting video. */
+ /** Vertical position of the image from the upper left of the live video. */
int y;
- /** Width of the image on the broadcasting video. */
+ /** Width of the image on the live video. */
int width;
- /** Height of the image on the broadcasting video. */
+ /** Height of the image on the live video. */
int height;
} RtcImage;
+/// @cond
+/** The configuration for advanced features of the RTMP streaming with transcoding.
+ */
+typedef struct LiveStreamAdvancedFeature {
+ LiveStreamAdvancedFeature() : featureName(NULL) , opened(false) {
+ }
+
+ /** The advanced feature for high-quality video with a lower bitrate. */
+ const char* LBHQ = "lbhq";
+ /** The advanced feature for the optimized video encoder. */
+ const char* VEO = "veo";
+
+ /** The name of the advanced feature. It contains LBHQ and VEO.
+ */
+ const char* featureName;
+ /** Whether to enable the advanced feature:
+ * - true: Enable the advanced feature.
+ * - false: (Default) Disable the advanced feature.
+ */
+ bool opened;
+} LiveStreamAdvancedFeature;
+/// @endcond
/** A struct for managing CDN live audio/video transcoding settings.
*/
typedef struct LiveTranscoding {
@@ -2079,12 +2505,12 @@ typedef struct LiveTranscoding {
int height;
/** Bitrate of the CDN live output video stream. The default value is 400 Kbps.
- Set this parameter according to the Video Bitrate Table. If you set a bitrate beyond the proper range, the SDK automatically adapts it to a value within the range.
+ Set this parameter according to the Video Bitrate Table. If you set a bitrate beyond the proper range, the SDK automatically adapts it to a value within the range.
*/
int videoBitrate;
- /** Frame rate of the output video stream set for the CDN live broadcast. The default value is 15 fps, and the value range is (0,30].
+ /** Frame rate of the output video stream set for the CDN live streaming. The default value is 15 fps, and the value range is (0,30].
- @note The Agora server adjusts any value over 30 to 30.
+ @note The Agora server adjusts any value over 30 to 30.
*/
int videoFramerate;
@@ -2100,13 +2526,17 @@ typedef struct LiveTranscoding {
int videoGop;
/** Self-defined video codec profile: #VIDEO_CODEC_PROFILE_TYPE.
- @note If you set this parameter to other values, Agora adjusts it to the default value of 100.
+ @note If you set this parameter to other values, Agora adjusts it to the default value of 100.
*/
VIDEO_CODEC_PROFILE_TYPE videoCodecProfile;
/** The background color in RGB hex value. Value only. Do not include a preceeding #. For example, 0xFFB6C1 (light pink). The default value is 0x000000 (black).
*/
unsigned int backgroundColor;
- /** The number of users in the live broadcast.
+
+ /** video codec type */
+ VIDEO_CODEC_TYPE_FOR_STREAM videoCodecType;
+
+ /** The number of users in the live interactive streaming.
*/
unsigned int userCount;
/** TranscodingUser
@@ -2123,7 +2553,7 @@ typedef struct LiveTranscoding {
const char *metadata;
/** The watermark image added to the CDN live publishing stream.
- Ensure that the format of the image is PNG. Once a watermark image is added, the audience of the CDN live publishing stream can see the watermark image. See RtcImage.
+ Ensure that the format of the image is PNG. Once a watermark image is added, the audience of the CDN live publishing stream can see the watermark image. See RtcImage.
*/
RtcImage* watermark;
/** The background image added to the CDN live publishing stream.
@@ -2150,8 +2580,16 @@ typedef struct LiveTranscoding {
*/
AUDIO_CODEC_PROFILE_TYPE audioCodecProfile;
+ /// @cond
+ /** Advanced features of the RTMP streaming with transcoding. See LiveStreamAdvancedFeature.
+ *
+ * @since v3.1.0
+ */
+ LiveStreamAdvancedFeature* advancedFeatures;
-
+ /** The number of enabled advanced features. The default value is 0. */
+ unsigned int advancedFeatureCount;
+ /// @endcond
LiveTranscoding()
: width(360)
, height(640)
@@ -2161,6 +2599,7 @@ typedef struct LiveTranscoding {
, videoGop(30)
, videoCodecProfile(VIDEO_CODEC_PROFILE_HIGH)
, backgroundColor(0x000000)
+ , videoCodecType(VIDEO_CODEC_H264_FOR_STREAM)
, userCount(0)
, transcodingUsers(NULL)
, transcodingExtraInfo(NULL)
@@ -2171,6 +2610,8 @@ typedef struct LiveTranscoding {
, audioBitrate(48)
, audioChannels(1)
, audioCodecProfile(AUDIO_CODEC_PROFILE_LC_AAC)
+ , advancedFeatures(NULL)
+ , advancedFeatureCount(0)
{}
} LiveTranscoding;
@@ -2186,37 +2627,38 @@ typedef struct LiveTranscoding {
#endif
};
-/** Configuration of the imported live broadcast voice or video stream.
+/** Configuration of the injected media stream.
*/
struct InjectStreamConfig {
- /** Width of the added stream in the live broadcast. The default value is 0 (same width as the original stream).
+ /** Width of the injected stream in the live interactive streaming. The default value is 0 (same width as the original stream).
*/
int width;
- /** Height of the added stream in the live broadcast. The default value is 0 (same height as the original stream).
+ /** Height of the injected stream in the live interactive streaming. The default value is 0 (same height as the original stream).
*/
int height;
- /** Video GOP of the added stream in the live broadcast in frames. The default value is 30 fps.
+ /** Video GOP (in frames) of the injected stream in the live interactive streaming. The default value is 30 fps.
*/
int videoGop;
- /** Video frame rate of the added stream in the live broadcast. The default value is 15 fps.
+ /** Video frame rate of the injected stream in the live interactive streaming. The default value is 15 fps.
*/
int videoFramerate;
- /** Video bitrate of the added stream in the live broadcast. The default value is 400 Kbps.
+ /** Video bitrate of the injected stream in the live interactive streaming. The default value is 400 Kbps.
@note The setting of the video bitrate is closely linked to the resolution. If the video bitrate you set is beyond a reasonable range, the SDK sets it within a reasonable range.
*/
int videoBitrate;
- /** Audio-sample rate of the added stream in the live broadcast: #AUDIO_SAMPLE_RATE_TYPE. The default value is 48000 Hz.
+ /** Audio-sample rate of the injected stream in the live interactive streaming: #AUDIO_SAMPLE_RATE_TYPE. The default value is 48000 Hz.
@note We recommend setting the default value.
*/
AUDIO_SAMPLE_RATE_TYPE audioSampleRate;
- /** Audio bitrate of the added stream in the live broadcast. The default value is 48.
+ /** Audio bitrate of the injected stream in the live interactive streaming. The default value is 48.
@note We recommend setting the default value.
*/
int audioBitrate;
- /** Audio channels in the live broadcast.
+ /** Audio channels in the live interactive streaming.
+
- 1: (Default) Mono
- 2: Two-channel stereo
@@ -2240,15 +2682,15 @@ struct InjectStreamConfig {
/** The definition of ChannelMediaInfo.
*/
struct ChannelMediaInfo {
- /** The channel name.
+ /** The channel name.
*/
- const char* channelName;
+ const char* channelName;
/** The token that enables the user to join the channel.
*/
- const char* token;
+ const char* token;
/** The user ID.
*/
- uid_t uid;
+ uid_t uid;
};
/** The definition of ChannelMediaRelayConfiguration.
@@ -2256,26 +2698,29 @@ struct ChannelMediaInfo {
struct ChannelMediaRelayConfiguration {
/** Pointer to the information of the source channel: ChannelMediaInfo. It contains the following members:
* - `channelName`: The name of the source channel. The default value is `NULL`, which means the SDK applies the name of the current channel.
- * - `uid`: ID of the broadcaster whose media stream you want to relay. The default value is 0, which means the SDK generates a random UID. You must set it as 0.
+ * - `uid`: The unique ID to identify the relay stream in the source channel. The default value is 0, which means the SDK generates a random UID. You must set it as 0.
* - `token`: The token for joining the source channel. It is generated with the `channelName` and `uid` you set in `srcInfo`.
* - If you have not enabled the App Certificate, set this parameter as the default value `NULL`, which means the SDK applies the App ID.
* - If you have enabled the App Certificate, you must use the `token` generated with the `channelName` and `uid`, and the `uid` must be set as 0.
*/
- ChannelMediaInfo *srcInfo;
+ ChannelMediaInfo *srcInfo;
/** Pointer to the information of the destination channel: ChannelMediaInfo. It contains the following members:
* - `channelName`: The name of the destination channel.
- * - `uid`: ID of the broadcaster in the destination channel. The value ranges from 0 to (232-1). To avoid UID conflicts, this `uid` must be different from any other UIDs in the destination channel. The default value is 0, which means the SDK generates a random UID.
+ * - `uid`: The unique ID to identify the relay stream in the destination channel. The value ranges from 0 to (232-1).
+ * To avoid UID conflicts, this `uid` must be different from any other UIDs in the destination channel. The default
+ * value is 0, which means the SDK generates a random UID. Do not set this parameter as the `uid` of the host in
+ * the destination channel, and ensure that this `uid` is different from any other `uid` in the channel.
* - `token`: The token for joining the destination channel. It is generated with the `channelName` and `uid` you set in `destInfos`.
* - If you have not enabled the App Certificate, set this parameter as the default value `NULL`, which means the SDK applies the App ID.
* - If you have enabled the App Certificate, you must use the `token` generated with the `channelName` and `uid`.
*/
- ChannelMediaInfo *destInfos;
+ ChannelMediaInfo *destInfos;
/** The number of destination channels. The default value is 0, and the
* value range is [0,4). Ensure that the value of this parameter
* corresponds to the number of ChannelMediaInfo structs you define in
* `destInfos`.
*/
- int destCount;
+ int destCount;
ChannelMediaRelayConfiguration()
: srcInfo(nullptr)
@@ -2290,10 +2735,10 @@ enum RTMP_STREAM_LIFE_CYCLE_TYPE
{
/** Bind to the channel lifecycle. If all hosts leave the channel, the CDN live streaming stops after 30 seconds.
*/
- RTMP_STREAM_LIFE_CYCLE_BIND2CHANNEL = 1,
+ RTMP_STREAM_LIFE_CYCLE_BIND2CHANNEL = 1,
/** Bind to the owner of the RTMP stream. If the owner leaves the channel, the CDN live streaming stops immediately.
*/
- RTMP_STREAM_LIFE_CYCLE_BIND2OWNER = 2,
+ RTMP_STREAM_LIFE_CYCLE_BIND2OWNER = 2,
};
/** Content hints for screen sharing.
@@ -2382,34 +2827,48 @@ struct ScreenCaptureParameters
{
/** The maximum encoding dimensions of the shared region in terms of width * height.
- The default value is 1920 * 1080 pixels, that is, 2073600 pixels. Agora uses the value of this parameter to calculate the charges.
+ The default value is 1920 * 1080 pixels, that is, 2073600 pixels. Agora uses the value of this parameter to calculate the charges.
- If the aspect ratio is different between the encoding dimensions and screen dimensions, Agora applies the following algorithms for encoding. Suppose the encoding dimensions are 1920 x 1080:
+ If the aspect ratio is different between the encoding dimensions and screen dimensions, Agora applies the following algorithms for encoding. Suppose the encoding dimensions are 1920 x 1080:
- - If the value of the screen dimensions is lower than that of the encoding dimensions, for example, 1000 * 1000, the SDK uses 1000 * 1000 for encoding.
- - If the value of the screen dimensions is higher than that of the encoding dimensions, for example, 2000 * 1500, the SDK uses the maximum value under 1920 * 1080 with the aspect ratio of the screen dimension (4:3) for encoding, that is, 1440 * 1080.
+ - If the value of the screen dimensions is lower than that of the encoding dimensions, for example, 1000 * 1000, the SDK uses 1000 * 1000 for encoding.
+ - If the value of the screen dimensions is higher than that of the encoding dimensions, for example, 2000 * 1500, the SDK uses the maximum value under 1920 * 1080 with the aspect ratio of the screen dimension (4:3) for encoding, that is, 1440 * 1080.
*/
VideoDimensions dimensions;
/** The frame rate (fps) of the shared region.
- The default value is 5. We do not recommend setting this to a value greater than 15.
+ The default value is 5. We do not recommend setting this to a value greater than 15.
*/
int frameRate;
/** The bitrate (Kbps) of the shared region.
- The default value is 0 (the SDK works out a bitrate according to the dimensions of the current screen).
+ The default value is 0 (the SDK works out a bitrate according to the dimensions of the current screen).
*/
int bitrate;
/** Sets whether or not to capture the mouse for screen sharing:
- - true: (Default) Capture the mouse.
- - false: Do not capture the mouse.
+ - true: (Default) Capture the mouse.
+ - false: Do not capture the mouse.
*/
bool captureMouseCursor;
+ /** Whether to bring the window to the front when calling \ref IRtcEngine::startScreenCaptureByWindowId "startScreenCaptureByWindowId" to share the window:
+ * - true: Bring the window to the front.
+ * - false: (Default) Do not bring the window to the front.
+ */
+ bool windowFocus;
+ /** A list of IDs of windows to be blocked.
+ *
+ * When calling \ref IRtcEngine::startScreenCaptureByScreenRect "startScreenCaptureByScreenRect" to start screen sharing, you can use this parameter to block the specified windows.
+ * When calling \ref IRtcEngine::updateScreenCaptureParameters "updateScreenCaptureParameters" to update the configuration for screen sharing, you can use this parameter to dynamically block the specified windows during screen sharing.
+ */
+ view_t* excludeWindowList;
+ /** The number of windows to be blocked.
+ */
+ int excludeWindowCount;
- ScreenCaptureParameters() : dimensions(1920, 1080), frameRate(5), bitrate(STANDARD_BITRATE), captureMouseCursor(true) {}
- ScreenCaptureParameters(const VideoDimensions& d, int f, int b, bool c) : dimensions(d), frameRate(f), bitrate(b), captureMouseCursor(c) {}
- ScreenCaptureParameters(int width, int height, int f, int b, bool c) : dimensions(width, height), frameRate(f), bitrate(b), captureMouseCursor(c) {}
+ ScreenCaptureParameters() : dimensions(1920, 1080), frameRate(5), bitrate(STANDARD_BITRATE), captureMouseCursor(true), windowFocus(false), excludeWindowList(NULL), excludeWindowCount(0) {}
+ ScreenCaptureParameters(const VideoDimensions& d, int f, int b, bool c, bool focus, view_t *ex = NULL, int cnt = 0) : dimensions(d), frameRate(f), bitrate(b), captureMouseCursor(c), windowFocus(focus), excludeWindowList(ex), excludeWindowCount(cnt) {}
+ ScreenCaptureParameters(int width, int height, int f, int b, bool c, bool focus, view_t *ex = NULL, int cnt = 0) : dimensions(width, height), frameRate(f), bitrate(b), captureMouseCursor(c), windowFocus(focus), excludeWindowList(ex), excludeWindowCount(cnt) {}
};
/** Video display settings of the VideoCanvas class.
@@ -2419,17 +2878,17 @@ struct VideoCanvas
/** Video display window (view).
*/
view_t view;
- /** The rendering mode of the video view. See RENDER_MODE_TYPE
+ /** The rendering mode of the video view. See #RENDER_MODE_TYPE
*/
int renderMode;
/** The unique channel name for the AgoraRTC session in the string format. The string length must be less than 64 bytes. Supported character scopes are:
- - All lowercase English letters: a to z.
- - All uppercase English letters: A to Z.
- - All numeric characters: 0 to 9.
- - The space character.
+ - All lowercase English letters: a to z.
+ - All uppercase English letters: A to Z.
+ - All numeric characters: 0 to 9.
+ - The space character.
- Punctuation characters and other symbols, including: "!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "=", ".", ">", "?", "@", "[", "]", "^", "_", " {", "}", "|", "~", ",".
- @note
+ @note
- The default value is the empty string "". Use the default value if the user joins the channel using the \ref IRtcEngine::joinChannel "joinChannel" method in the IRtcEngine class. The `VideoCanvas` struct defines the video canvas of the user in the channel.
- If the user joins the channel using the \ref IRtcEngine::joinChannel "joinChannel" method in the IChannel class, set this parameter as the `channelId` of the `IChannel` object. The `VideoCanvas` struct defines the video canvas of the user in the channel with the specified channel ID.
*/
@@ -2555,41 +3014,49 @@ struct UserInfo {
};
/**
- * IP areas.
+ * Regions for connetion.
*/
enum AREA_CODE {
/**
* Mainland China.
*/
- AREA_CODE_CN = (1 << 0),
+ AREA_CODE_CN = 0x00000001,
/**
* North America.
*/
- AREA_CODE_NA = (1 << 1),
+ AREA_CODE_NA = 0x00000002,
/**
* Europe.
*/
- AREA_CODE_EUR = (1 << 2),
+ AREA_CODE_EU = 0x00000004,
+ /**
+ * Asia, excluding Mainland China.
+ */
+ AREA_CODE_AS = 0x00000008,
+ /**
+ * Japan.
+ */
+ AREA_CODE_JP = 0x00000010,
/**
- * Asia, excluding mainland China.
+ * India.
*/
- AREA_CODE_AS = (1 << 3),
+ AREA_CODE_IN = 0x00000020,
/**
* (Default) Global.
*/
- AREA_CODE_GLOBAL = (0xFFFFFFFF)
+ AREA_CODE_GLOB = 0xFFFFFFFF
};
enum ENCRYPTION_CONFIG {
- /**
- * - 1: Force set master key and mode;
- * - 0: Not force set, checking whether encryption plugin exists
- */
+ /**
+ * - 1: Force set master key and mode;
+ * - 0: Not force set, checking whether encryption plugin exists
+ */
ENCRYPTION_FORCE_SETTING = (1 << 0),
/**
- * - 1: Force not encrypting packet;
- * - 0: Not force encrypting;
- */
+ * - 1: Force not encrypting packet;
+ * - 0: Not force encrypting;
+ */
ENCRYPTION_FORCE_DISABLE_PACKET = (1 << 1)
};
/** Definition of IPacketObserver.
@@ -2599,72 +3066,185 @@ class IPacketObserver
public:
/** Definition of Packet.
*/
- struct Packet
- {
+ struct Packet
+ {
/** Buffer address of the sent or received data.
* @note Agora recommends that the value of buffer is more than 2048 bytes, otherwise, you may meet聽undefined behaviors such as a crash.
*/
- const unsigned char* buffer;
+ const unsigned char* buffer;
/** Buffer size of the sent or received data.
*/
- unsigned int size;
- };
- /** Occurs when the local user sends an audio packet.
+ unsigned int size;
+ };
+ /** Occurs when the local user sends an audio packet.
@param packet The sent audio packet. See Packet.
@return
- true: The audio packet is sent successfully.
- false: The audio packet is discarded.
*/
- virtual bool onSendAudioPacket(Packet& packet) = 0;
- /** Occurs when the local user sends a video packet.
+ virtual bool onSendAudioPacket(Packet& packet) = 0;
+ /** Occurs when the local user sends a video packet.
@param packet The sent video packet. See Packet.
@return
- true: The video packet is sent successfully.
- false: The video packet is discarded.
*/
- virtual bool onSendVideoPacket(Packet& packet) = 0;
- /** Occurs when the local user receives an audio packet.
+ virtual bool onSendVideoPacket(Packet& packet) = 0;
+ /** Occurs when the local user receives an audio packet.
@param packet The received audio packet. See Packet.
@return
- true: The audio packet is received successfully.
- false: The audio packet is discarded.
- */
- virtual bool onReceiveAudioPacket(Packet& packet) = 0;
- /** Occurs when the local user receives a video packet.
+ */
+ virtual bool onReceiveAudioPacket(Packet& packet) = 0;
+ /** Occurs when the local user receives a video packet.
@param packet The received video packet. See Packet.
@return
- true: The video packet is received successfully.
- false: The video packet is discarded.
- */
- virtual bool onReceiveVideoPacket(Packet& packet) = 0;
+ */
+ virtual bool onReceiveVideoPacket(Packet& packet) = 0;
};
-/** The SDK uses the IRtcEngineEventHandler interface class to send callbacks to the application. The application inherits the methods of this interface class to retrieve these callbacks.
- All methods in this interface class have default (empty) implementations. Therefore, the application can only inherit some required events. In the callbacks, avoid time-consuming tasks or calling blocking APIs, such as the SendMessage method. Otherwise, the SDK may not work properly.
+#if defined(_WIN32)
+/** The capture type of the custom video source.
*/
-class IRtcEngineEventHandler
-{
+enum VIDEO_CAPTURE_TYPE {
+ /** Unknown type.
+ */
+ VIDEO_CAPTURE_UNKNOWN,
+ /** (Default) Video captured by the camera.
+ */
+ VIDEO_CAPTURE_CAMERA,
+ /** Video for screen sharing.
+ */
+ VIDEO_CAPTURE_SCREEN,
+};
+
+/** The IVideoFrameConsumer class. The SDK uses it to receive the video frame that you capture.
+ */
+class IVideoFrameConsumer {
public:
- virtual ~IRtcEngineEventHandler() {}
-
- /** Reports a warning during SDK runtime.
+ /** Receives the raw video frame.
+ *
+ * @note Ensure that the video frame type that you specify in this method is the same as that in the \ref agora::rtc::IVideoSource::getBufferType "getBufferType" callback.
+ *
+ * @param buffer The video buffer.
+ * @param frameType The video frame type. See \ref agora::media::ExternalVideoFrame::VIDEO_PIXEL_FORMAT "VIDEO_PIXEL_FORMAT".
+ * @param width The width (px) of the video frame.
+ * @param height The height (px) of the video frame.
+ * @param rotation The angle (degree) at which the video frame rotates clockwise. If you set the rotation angle, the
+ * SDK rotates the video frame after receiving it. You can set the rotation angle as `0`, `90`, `180`, and `270`.
+ * @param timestamp The Unix timestamp (ms) of the video frame. You must set a timestamp for each video frame.
+ */
+ virtual void consumeRawVideoFrame(const unsigned char *buffer, agora::media::ExternalVideoFrame::VIDEO_PIXEL_FORMAT frameType, int width, int height, int rotation, long timestamp) = 0;
+};
- In most cases, the application can ignore the warning reported by the SDK because the SDK can usually fix the issue and resume running. For example, when losing connection with the server, the SDK may report #WARN_LOOKUP_CHANNEL_TIMEOUT and automatically try to reconnect.
+/** The IVideoSource class. You can use it to customize the video source.
+ */
+class IVideoSource {
+public:
+ /** Notification for initializing the custom video source.
+ *
+ * The SDK triggers this callback to remind you to initialize the custom video source. After receiving this callback,
+ * you can do some preparation, such as enabling the camera, and then use the return value to tell the SDK whether the
+ * custom video source is prepared.
+ *
+ * @param consumer An IVideoFrameConsumer object that the SDK passes to you. You need to reserve this object and use it
+ * to send the video frame to the SDK once the custom video source is started. See IVideoFrameConsumer.
+ *
+ * @return
+ * - true: The custom video source is initialized.
+ * - false: The custom video source is not ready or fails to initialize. The SDK stops and reports the error.
+ */
+ virtual bool onInitialize(IVideoFrameConsumer *consumer) = 0;
- @param warn Warning code: #WARN_CODE_TYPE.
- @param msg Pointer to the warning message.
+ /** Notification for disabling the custom video source.
+ *
+ * The SDK triggers this callback to remind you to disable the custom video source device. This callback tells you
+ * that the SDK is about to release the IVideoFrameConsumer object. Ensure that you no longer use IVideoFrameConsumer
+ * after receiving this callback.
*/
- virtual void onWarning(int warn, const char* msg) {
- (void)warn;
- (void)msg;
- }
+ virtual void onDispose() = 0;
- /** Reports an error during SDK runtime.
+ /** Notification for starting the custom video source.
+ *
+ * The SDK triggers this callback to remind you to start the custom video source for capturing video. The SDK uses
+ * IVideoFrameConsumer to receive the video frame that you capture after the video source is started. You must use
+ * the return value to tell the SDK whether the custom video source is started.
+ *
+ * @return
+ * - true: The custom video source is started.
+ * - false: The custom video source fails to start. The SDK stops and reports the error.
+ */
+ virtual bool onStart() = 0;
+
+ /** Notification for stopping capturing video.
+ *
+ * The SDK triggers this callback to remind you to stop capturing video. This callback tells you that the SDK is about
+ * to stop using IVideoFrameConsumer to receive the video frame that you capture.
+ */
+ virtual void onStop() = 0;
+
+ /** Gets the video frame type.
+ *
+ * Before you initialize the custom video source, the SDK triggers this callback to query the video frame type. You
+ * must specify the video frame type in the return value and then pass it to the SDK.
+ *
+ * @note Ensure that the video frame type that you specify in this callback is the same as that in the \ref agora::rtc::IVideoFrameConsumer::consumeRawVideoFrame "consumeRawVideoFrame" method.
+ *
+ * @return \ref agora::media::ExternalVideoFrame::VIDEO_PIXEL_FORMAT "VIDEO_PIXEL_FORMAT"
+ */
+ virtual agora::media::ExternalVideoFrame::VIDEO_PIXEL_FORMAT getBufferType() = 0;
+ /** Gets the capture type of the custom video source.
+ *
+ * Before you initialize the custom video source, the SDK triggers this callback to query the capture type of the video source.
+ * You must specify the capture type in the return value and then pass it to the SDK. The SDK enables the corresponding video
+ * processing algorithm according to the capture type after receiving the video frame.
+ *
+ * @return #VIDEO_CAPTURE_TYPE
+ */
+ virtual VIDEO_CAPTURE_TYPE getVideoCaptureType() = 0;
+ /** Gets the content hint of the custom video source.
+ *
+ * If you specify the custom video source as a screen-sharing video, the SDK triggers this callback to query the
+ * content hint of the video source before you initialize the video source. You must specify the content hint in the
+ * return value and then pass it to the SDK. The SDK enables the corresponding video processing algorithm according
+ * to the content hint after receiving the video frame.
+ *
+ * @return \ref agora::rtc::VideoContentHint "VideoContentHint"
+ */
+ virtual VideoContentHint getVideoContentHint() = 0;
+};
+#endif
+
+/** The SDK uses the IRtcEngineEventHandler interface class to send callbacks to the application. The application inherits the methods of this interface class to retrieve these callbacks.
+
+ All methods in this interface class have default (empty) implementations. Therefore, the application can only inherit some required events. In the callbacks, avoid time-consuming tasks or calling blocking APIs, such as the SendMessage method. Otherwise, the SDK may not work properly.
+ */
+class IRtcEngineEventHandler
+{
+public:
+ virtual ~IRtcEngineEventHandler() {}
+
+ /** Reports a warning during SDK runtime.
+
+ In most cases, the application can ignore the warning reported by the SDK because the SDK can usually fix the issue and resume running. For example, when losing connection with the server, the SDK may report #WARN_LOOKUP_CHANNEL_TIMEOUT and automatically try to reconnect.
+
+ @param warn Warning code: #WARN_CODE_TYPE.
+ @param msg Pointer to the warning message.
+ */
+ virtual void onWarning(int warn, const char* msg) {
+ (void)warn;
+ (void)msg;
+ }
+
+ /** Reports an error during SDK runtime.
In most cases, the SDK cannot fix the issue and resume running. The SDK requires the application to take action or informs the user about the issue.
@@ -2722,7 +3302,7 @@ class IRtcEngineEventHandler
(void)stats;
}
- /** Occurs when the user role switches in a live broadcast. For example, from a host to an audience or vice versa.
+ /** Occurs when the user role switches in the live interactive streaming. For example, from a host to an audience or vice versa.
This callback notifies the application of a user role switch when the application calls the \ref IRtcEngine::setClientRole "setClientRole" method.
@@ -2733,10 +3313,10 @@ class IRtcEngineEventHandler
virtual void onClientRoleChanged(CLIENT_ROLE_TYPE oldRole, CLIENT_ROLE_TYPE newRole) {
}
- /** Occurs when a remote user (Communication)/ host (Live Broadcast) joins the channel.
+ /** Occurs when a remote user (`COMMUNICATION`)/ host (`LIVE_BROADCASTING`) joins the channel.
- - Communication profile: This callback notifies the application that another user joins the channel. If other users are already in the channel, the SDK also reports to the application on the existing users.
- - Live-broadcast profile: This callback notifies the application that the host joins the channel. If other hosts are already in the channel, the SDK also reports to the application on the existing hosts. We recommend limiting the number of hosts to 17.
+ - `COMMUNICATION` profile: This callback notifies the application that another user joins the channel. If other users are already in the channel, the SDK also reports to the application on the existing users.
+ - `LIVE_BROADCASTING` profile: This callback notifies the application that the host joins the channel. If other hosts are already in the channel, the SDK also reports to the application on the existing hosts. We recommend limiting the number of hosts to 17.
The SDK triggers this callback under one of the following circumstances:
- A remote user/host joins the channel by calling the \ref agora::rtc::IRtcEngine::joinChannel "joinChannel" method.
@@ -2744,7 +3324,7 @@ class IRtcEngineEventHandler
- A remote user/host rejoins the channel after a network interruption.
- The host injects an online media stream into the channel by calling the \ref agora::rtc::IRtcEngine::addInjectStreamUrl "addInjectStreamUrl" method.
- @note In the Live-broadcast profile:
+ @note In the `LIVE_BROADCASTING` profile:
- The host receives this callback when another host joins the channel.
- The audience in the channel receives this callback when a new host joins the channel.
- When a web application joins the channel, the SDK triggers this callback as long as the web application publishes streams.
@@ -2757,7 +3337,7 @@ class IRtcEngineEventHandler
(void)elapsed;
}
- /** Occurs when a remote user (Communication)/host (Live Broadcast) leaves the channel.
+ /** Occurs when a remote user (`COMMUNICATION`)/ host (`LIVE_BROADCASTING`) leaves the channel.
Reasons why the user is offline:
@@ -2843,9 +3423,12 @@ class IRtcEngineEventHandler
/** Occurs when the token expires.
- After a token is specified by calling the \ref IRtcEngine::joinChannel "joinChannel" method, if the SDK losses connection with the Agora server due to network issues, the token may expire after a certain period of time and a new token may be required to reconnect to the server.
+ After a token is specified by calling the \ref IRtcEngine::joinChannel "joinChannel" method, if the SDK losses
+ connection with the Agora server due to network issues, the token may expire after a certain period of time and a
+ new token may be required to reconnect to the server.
- This callback notifies the app to generate a new token and call joinChannel to rejoin the channel with the new token.
+ Once you receive this callback, generate a new token on your app server, and call
+ \ref agora::rtc::IRtcEngine::renewToken "renewToken" to pass the new token to the SDK.
*/
virtual void onRequestToken() {
}
@@ -2878,10 +3461,10 @@ class IRtcEngineEventHandler
(void)lost;
}
- /** Reports the statistics of the current call.
-
+ /** Reports the statistics of the current call.
+
The SDK triggers this callback once every two seconds after the user joins the channel.
-
+
@param stats Statistics of the IRtcEngine: RtcStats.
*/
virtual void onRtcStats(const RtcStats& stats) {
@@ -2893,7 +3476,7 @@ class IRtcEngineEventHandler
Last mile refers to the connection between the local device and Agora's edge server. This callback reports once every two seconds the last mile network conditions of each user in the channel. If a channel includes multiple users, the SDK triggers this callback as many times.
@param uid User ID. The network quality of the user with this @p uid is reported. If @p uid is 0, the local network quality is reported.
- @param txQuality Uplink transmission quality rating of the user in terms of the transmission bitrate, packet loss rate, average RTT (Round-Trip Time), and jitter of the uplink network. @p txQuality is a quality rating helping you understand how well the current uplink network conditions can support the selected VideoEncoderConfiguration. For example, a 1000 Kbps uplink network may be adequate for video frames with a resolution of 640 * 480 and a frame rate of 15 fps in the Live-broadcast profile, but may be inadequate for resolutions higher than 1280 * 720. See #QUALITY_TYPE.
+ @param txQuality Uplink transmission quality rating of the user in terms of the transmission bitrate, packet loss rate, average RTT (Round-Trip Time), and jitter of the uplink network. @p txQuality is a quality rating helping you understand how well the current uplink network conditions can support the selected VideoEncoderConfiguration. For example, a 1000 Kbps uplink network may be adequate for video frames with a resolution of 640 * 480 and a frame rate of 15 fps in the `LIVE_BROADCASTING` profile, but may be inadequate for resolutions higher than 1280 * 720. See #QUALITY_TYPE.
@param rxQuality Downlink network quality rating of the user in terms of the packet loss rate, average RTT, and jitter of the downlink network. See #QUALITY_TYPE.
*/
virtual void onNetworkQuality(uid_t uid, int txQuality, int rxQuality) {
@@ -2909,9 +3492,9 @@ class IRtcEngineEventHandler
* triggers this callback as many times.
*
* @note
- * If you have called the
- * \ref agora::rtc::IRtcEngine::enableDualStreamMode "enableDualStreamMode"
- * method, the \ref onLocalVideoStats() "onLocalVideoStats" callback
+ * If you have called the
+ * \ref agora::rtc::IRtcEngine::enableDualStreamMode "enableDualStreamMode"
+ * method, the \ref onLocalVideoStats() "onLocalVideoStats" callback
* reports the statistics of the high-video
* stream (high bitrate, and high-resolution video stream).
*
@@ -2958,7 +3541,6 @@ class IRtcEngineEventHandler
}
/** Occurs when the local audio state changes.
- *
* This callback indicates the state change of the local audio stream,
* including the state of the audio recording and encoding, and allows
* you to troubleshoot issues when exceptions occur.
@@ -2977,10 +3559,10 @@ class IRtcEngineEventHandler
}
/** Occurs when the remote audio state changes.
-
+
This callback indicates the state change of the remote audio stream.
- @note This callback does not work properly when the number of users (in the Communication profile) or broadcasters (in the Live-broadcast profile) in the channel exceeds 17.
-
+ @note This callback does not work properly when the number of users (in the `COMMUNICATION` profile) or hosts (in the `LIVE_BROADCASTING` profile) in the channel exceeds 17.
+
@param uid ID of the remote user whose audio state changes.
@param state State of the remote audio. See #REMOTE_AUDIO_STATE.
@param reason The reason of the remote audio state change.
@@ -2996,6 +3578,82 @@ class IRtcEngineEventHandler
(void)elapsed;
}
+ /** Occurs when the audio publishing state changes.
+ *
+ * @since v3.1.0
+ *
+ * This callback indicates the publishing state change of the local audio stream.
+ *
+ * @param channel The channel name.
+ * @param oldState The previous publishing state. For details, see #STREAM_PUBLISH_STATE.
+ * @param newState The current publishing state. For details, see #STREAM_PUBLISH_STATE.
+ * @param elapseSinceLastState The time elapsed (ms) from the previous state to the current state.
+ */
+ virtual void onAudioPublishStateChanged(const char* channel, STREAM_PUBLISH_STATE oldState, STREAM_PUBLISH_STATE newState, int elapseSinceLastState) {
+ (void)channel;
+ (void)oldState;
+ (void)newState;
+ (void)elapseSinceLastState;
+ }
+
+ /** Occurs when the video publishing state changes.
+ *
+ * @since v3.1.0
+ *
+ * This callback indicates the publishing state change of the local video stream.
+ *
+ * @param channel The channel name.
+ * @param oldState The previous publishing state. For details, see #STREAM_PUBLISH_STATE.
+ * @param newState The current publishing state. For details, see #STREAM_PUBLISH_STATE.
+ * @param elapseSinceLastState The time elapsed (ms) from the previous state to the current state.
+ */
+ virtual void onVideoPublishStateChanged(const char* channel, STREAM_PUBLISH_STATE oldState, STREAM_PUBLISH_STATE newState, int elapseSinceLastState) {
+ (void)channel;
+ (void)oldState;
+ (void)newState;
+ (void)elapseSinceLastState;
+ }
+
+ /** Occurs when the audio subscribing state changes.
+ *
+ * @since v3.1.0
+ *
+ * This callback indicates the subscribing state change of a remote audio stream.
+ *
+ * @param channel The channel name.
+ * @param uid The ID of the remote user.
+ * @param oldState The previous subscribing state. For details, see #STREAM_SUBSCRIBE_STATE.
+ * @param newState The current subscribing state. For details, see #STREAM_SUBSCRIBE_STATE.
+ * @param elapseSinceLastState The time elapsed (ms) from the previous state to the current state.
+ */
+ virtual void onAudioSubscribeStateChanged(const char* channel, uid_t uid, STREAM_SUBSCRIBE_STATE oldState, STREAM_SUBSCRIBE_STATE newState, int elapseSinceLastState) {
+ (void)channel;
+ (void)uid;
+ (void)oldState;
+ (void)newState;
+ (void)elapseSinceLastState;
+ }
+
+ /** Occurs when the audio subscribing state changes.
+ *
+ * @since v3.1.0
+ *
+ * This callback indicates the subscribing state change of a remote video stream.
+ *
+ * @param channel The channel name.
+ * @param uid The ID of the remote user.
+ * @param oldState The previous subscribing state. For details, see #STREAM_SUBSCRIBE_STATE.
+ * @param newState The current subscribing state. For details, see #STREAM_SUBSCRIBE_STATE.
+ * @param elapseSinceLastState The time elapsed (ms) from the previous state to the current state.
+ */
+ virtual void onVideoSubscribeStateChanged(const char* channel, uid_t uid, STREAM_SUBSCRIBE_STATE oldState, STREAM_SUBSCRIBE_STATE newState, int elapseSinceLastState) {
+ (void)channel;
+ (void)uid;
+ (void)oldState;
+ (void)newState;
+ (void)elapseSinceLastState;
+ }
+
/** Reports which users are speaking, the speakers' volume and whether the local user is speaking.
This callback reports the IDs and volumes of the loudest speakers (at most 3 users) at the moment in the channel, and whether the local user is speaking.
@@ -3003,7 +3661,7 @@ class IRtcEngineEventHandler
By default, this callback is disabled. You can enable it by calling the \ref IRtcEngine::enableAudioVolumeIndication(int, int, bool) "enableAudioVolumeIndication" method.
Once enabled, this callback is triggered at the set interval, regardless of whether a user speaks or not.
- The SDK triggers two independent `onAudioVolumeIndication` callbacks at one time, which separately report the volume information of the local user and all the remote speakers.
+ The SDK triggers two independent `onAudioVolumeIndication` callbacks at one time, which separately report the volume information of the local user and all the remote speakers.
For more information, see the detailed parameter descriptions.
@note
@@ -3015,14 +3673,14 @@ class IRtcEngineEventHandler
@param speakers A pointer to AudioVolumeInfo:
- In the local user's callback, this struct contains the following members:
- - `uid` = 0,
+ - `uid` = 0,
- `volume` = `totalVolume`, which reports the sum of the voice volume and audio-mixing volume of the local user, and
- `vad`, which reports the voice activity status of the local user.
- In the remote speakers' callback, this array contains the following members:
- `uid` of the remote speaker,
- `volume`, which reports the sum of the voice volume and audio-mixing volume of each remote speaker, and
- `vad` = 0.
-
+
An empty speakers array in the callback indicates that no remote user is speaking at the moment.
@param speakerNumber Total number of speakers. The value range is [0, 3].
- In the local user鈥檚 callback, `speakerNumber` = 1, regardless of whether the local user speaks or not.
@@ -3037,15 +3695,17 @@ class IRtcEngineEventHandler
(void)totalVolume;
}
- /** Reports which user is the loudest speaker.
+ /** Occurs when the most active speaker is detected.
- If the user enables the audio volume indication by calling the \ref IRtcEngine::enableAudioVolumeIndication(int, int, bool) "enableAudioVolumeIndication" method, this callback returns the @p uid of the active speaker detected by the audio volume detection module of the SDK.
+ After a successful call of \ref IRtcEngine::enableAudioVolumeIndication(int, int, bool) "enableAudioVolumeIndication",
+ the SDK continuously detects which remote user has the loudest volume. During the current period, the remote user,
+ who is detected as the loudest for the most times, is the most active user.
- @note
- - To receive this callback, you need to call the \ref IRtcEngine::enableAudioVolumeIndication(int, int, bool) "enableAudioVolumeIndication" method.
- - This callback returns the user ID of the user with the highest voice volume during a period of time, instead of at the moment.
+ When the number of user is no less than two and an active speaker exists, the SDK triggers this callback and reports the `uid` of the most active speaker.
+ - If the most active speaker is always the same user, the SDK triggers this callback only once.
+ - If the most active speaker changes to another user, the SDK triggers this callback again and reports the `uid` of the new active speaker.
- @param uid User ID of the active speaker. A @p uid of 0 represents the local user.
+ @param uid The user ID of the most active speaker.
*/
virtual void onActiveSpeaker(uid_t uid) {
(void)uid;
@@ -3072,6 +3732,21 @@ class IRtcEngineEventHandler
(void)elapsed;
}
+ /** Occurs when the first video frame is published.
+ *
+ * @since v3.1.0
+ *
+ * The SDK triggers this callback under one of the following circumstances:
+ * - The local client enables the video module and calls \ref IRtcEngine::joinChannel "joinChannel" successfully.
+ * - The local client calls \ref IRtcEngine::muteLocalVideoStream "muteLocalVideoStream(true)" and \ref IRtcEngine::muteLocalVideoStream "muteLocalVideoStream(false)" in sequence.
+ * - The local client calls \ref IRtcEngine::disableVideo "disableVideo" and \ref IRtcEngine::enableVideo "enableVideo" in sequence.
+ *
+ * @param elapsed The time elapsed (ms) from the local client calling \ref IRtcEngine::joinChannel "joinChannel" until the SDK triggers this callback.
+ */
+ virtual void onFirstLocalVideoFramePublished(int elapsed) {
+ (void)elapsed;
+ }
+
/** Occurs when the first remote video frame is received and decoded.
*
* @deprecated v2.9.0
@@ -3113,7 +3788,6 @@ class IRtcEngineEventHandler
}
/** Occurs when the first remote video frame is rendered.
-
The SDK triggers this callback when the first frame of the remote video is displayed in the user's video window. The application can retrieve the time elapsed from a user joining the channel until the first video frame is displayed.
@param uid User ID of the remote user sending the video stream.
@@ -3129,12 +3803,12 @@ class IRtcEngineEventHandler
}
/** @deprecated This method is deprecated from v3.0.0, use the聽\ref agora::rtc::IRtcEngineEventHandler::onRemoteAudioStateChanged "onRemoteAudioStateChanged" callback instead.
-
+
Occurs when a remote user's audio stream playback pauses/resumes.
The SDK triggers this callback when the remote user stops or resumes sending the audio stream by calling the \ref agora::rtc::IRtcEngine::muteLocalAudioStream "muteLocalAudioStream" method.
-
- @note This callback does not work properly when the number of users (in the Communication profile) or broadcasters (in the Live-broadcast profile) in the channel exceeds 17.
+
+ @note This callback does not work properly when the number of users (in the `COMMUNICATION` profile) or hosts (in the `LIVE_BROADCASTING` profile) in the channel exceeds 17.
@param uid User ID of the remote user.
@param muted Whether the remote user's audio stream is muted/unmuted:
@@ -3161,7 +3835,7 @@ class IRtcEngineEventHandler
* \ref agora::rtc::IRtcEngine::muteLocalVideoStream
* "muteLocalVideoStream" method.
*
- * @note This callback does not work properly when the number of users (in the Communication profile) or broadcasters (in the Live-broadcast profile) in the channel exceeds 17.
+ * @note This callback does not work properly when the number of users (in the `COMMUNICATION` profile) or hosts (in the `LIVE_BROADCASTING` profile) in the channel exceeds 17.
*
* @param uid User ID of the remote user.
* @param muted Whether the remote user's video stream playback is
@@ -3250,7 +3924,7 @@ class IRtcEngineEventHandler
/** Occurs when the camera focus area changes.
The SDK triggers this callback when the local user changes the camera focus position by calling the setCameraFocusPositionInPreview method.
-
+
@note This callback is for Android and iOS only.
@param x x coordinate of the changed camera focus area.
@@ -3302,9 +3976,9 @@ class IRtcEngineEventHandler
/** Occurs when the camera exposure area changes.
The SDK triggers this callback when the local user changes the camera exposure position by calling the setCameraExposurePosition method.
-
+
@note This callback is for Android and iOS only.
-
+
@param x x coordinate of the changed camera exposure area.
@param y y coordinate of the changed camera exposure area.
@param width Width of the changed camera exposure area.
@@ -3330,7 +4004,7 @@ class IRtcEngineEventHandler
}
/** Occurs when the state of the local user's audio mixing file changes.
-
+
When you call the \ref IRtcEngine::startAudioMixing "startAudioMixing" method and the state of audio mixing file changes, the SDK triggers this callback.
- When the audio mixing file plays, pauses playing, or stops playing, this callback returns 710, 711, or 713 in @p state, and 0 in @p errorCode.
- When exceptions occur during playback, this callback returns 714 in @p state and an error in @p errorCode.
@@ -3428,8 +4102,8 @@ class IRtcEngineEventHandler
(void)rotation;
}
/** Occurs when the remote video state changes.
- @note This callback does not work properly when the number of users (in the Communication profile) or broadcasters (in the Live-broadcast profile) in the channel exceeds 17.
-
+ @note This callback does not work properly when the number of users (in the `COMMUNICATION` profile) or hosts (in the `LIVE_BROADCASTING` profile) in the channel exceeds 17.
+
@param uid ID of the remote user whose video state changes.
@param state State of the remote video. See #REMOTE_VIDEO_STATE.
@param reason The reason of the remote video state change. See
@@ -3445,7 +4119,7 @@ class IRtcEngineEventHandler
(void)elapsed;
}
- /** Occurs when a specified remote user enables/disables the local video
+ /** Occurs when a specified remote user enables/disables the local video
* capturing function.
*
* @deprecated v2.9.0
@@ -3519,6 +4193,27 @@ class IRtcEngineEventHandler
/** Occurs when the media engine call starts.*/
virtual void onMediaEngineStartCallSuccess() {
}
+ /// @cond
+ /** Reports whether the super-resolution algorithm is enabled.
+ *
+ * @since v3.2.0
+ *
+ * After calling \ref IRtcEngine::enableRemoteSuperResolution "enableRemoteSuperResolution", the SDK triggers this
+ * callback to report whether the super-resolution algorithm is successfully enabled. If not successfully enabled,
+ * you can use reason for troubleshooting.
+ *
+ * @param uid The ID of the remote user.
+ * @param enabled Whether the super-resolution algorithm is successfully enabled:
+ * - true: The super-resolution algorithm is successfully enabled.
+ * - false: The super-resolution algorithm is not successfully enabled.
+ * @param reason The reason why the super-resolution algorithm is not successfully enabled. See #SUPER_RESOLUTION_STATE_REASON.
+ */
+ virtual void onUserSuperResolutionEnabled(uid_t uid, bool enabled, SUPER_RESOLUTION_STATE_REASON reason) {
+ (void)uid;
+ (void)enabled;
+ (void)reason;
+ }
+ /// @endcond
/** Occurs when the state of the media stream relay changes.
*
@@ -3540,12 +4235,29 @@ class IRtcEngineEventHandler
/** Occurs when the engine sends the first local audio frame.
- @param elapsed Time elapsed (ms) from the local user calling \ref IRtcEngine::joinChannel "joinChannel" until the SDK triggers this callback.
- */
+ @deprecated Deprecated as of v3.1.0. Use the \ref IRtcEngineEventHandler::onFirstLocalAudioFramePublished "onFirstLocalAudioFramePublished" callback instead.
+
+ @param elapsed Time elapsed (ms) from the local user calling \ref IRtcEngine::joinChannel "joinChannel" until the SDK triggers this callback.
+ */
virtual void onFirstLocalAudioFrame(int elapsed) {
(void)elapsed;
}
+ /** Occurs when the first audio frame is published.
+ *
+ * @since v3.1.0
+ *
+ * The SDK triggers this callback under one of the following circumstances:
+ * - The local client enables the audio module and calls \ref IRtcEngine::joinChannel "joinChannel" successfully.
+ * - The local client calls \ref IRtcEngine::muteLocalAudioStream "muteLocalAudioStream(true)" and \ref IRtcEngine::muteLocalAudioStream "muteLocalAudioStream(false)" in sequence.
+ * - The local client calls \ref IRtcEngine::disableAudio "disableAudio" and \ref IRtcEngine::enableAudio "enableAudio" in sequence.
+ *
+ * @param elapsed The time elapsed (ms) from the local client calling \ref IRtcEngine::joinChannel "joinChannel" until the SDK triggers this callback.
+ */
+ virtual void onFirstLocalAudioFramePublished(int elapsed) {
+ (void)elapsed;
+ }
+
/** Occurs when the engine receives the first audio frame from a specific remote user.
@deprecated v3.0.0
@@ -3577,8 +4289,20 @@ class IRtcEngineEventHandler
(void) errCode;
}
+ /** Reports events during the RTMP streaming.
+ *
+ * @since v3.1.0
+ *
+ * @param url The RTMP streaming URL.
+ * @param eventCode The event code. See #RTMP_STREAMING_EVENT
+ */
+ virtual void onRtmpStreamingEvent(const char* url, RTMP_STREAMING_EVENT eventCode) {
+ (void) url;
+ (void) eventCode;
+ }
+
/** @deprecated This method is deprecated, use the聽\ref agora::rtc::IRtcEngineEventHandler::onRtmpStreamingStateChanged "onRtmpStreamingStateChanged" callback instead.
-
+
Reports the result of calling the \ref IRtcEngine::addPublishStreamUrl "addPublishStreamUrl" method. (CDN live only.)
@param url The RTMP URL address.
@@ -3601,7 +4325,7 @@ class IRtcEngineEventHandler
(void)error;
}
/** @deprecated This method is deprecated, use the聽\ref agora::rtc::IRtcEngineEventHandler::onRtmpStreamingStateChanged "onRtmpStreamingStateChanged" callback instead.
-
+
Reports the result of calling the \ref agora::rtc::IRtcEngine::removePublishStreamUrl "removePublishStreamUrl" method. (CDN live only.)
This callback indicates whether you have successfully removed an RTMP stream from the CDN.
@@ -3611,16 +4335,16 @@ class IRtcEngineEventHandler
virtual void onStreamUnpublished(const char *url) {
(void)url;
}
-/** Occurs when the publisher's transcoding is updated.
- *
+/** Occurs when the publisher's transcoding is updated.
+ *
* When the `LiveTranscoding` class in the \ref agora::rtc::IRtcEngine::setLiveTranscoding "setLiveTranscoding" method updates, the SDK triggers the `onTranscodingUpdated` callback to report the update information to the local host.
- *
+ *
* @note If you call the `setLiveTranscoding` method to set the LiveTranscoding class for the first time, the SDK does not trigger the `onTranscodingUpdated` callback.
- *
+ *
*/
virtual void onTranscodingUpdated() {
}
- /** Occurs when a voice or video stream URL address is added to a live broadcast.
+ /** Occurs when a voice or video stream URL address is added to the live interactive streaming.
@param url Pointer to the URL address of the externally injected stream.
@param uid User ID.
@@ -3633,20 +4357,15 @@ class IRtcEngineEventHandler
}
/** Occurs when the local audio route changes.
-
- The SDK triggers this callback when the local audio route switches to an earpiece, speakerphone, headset, or Bluetooth device.
-
- @note This callback is for Android and iOS only.
-
- @param routing Audio output routing. See: #AUDIO_ROUTE_TYPE.
+ @param routing The current audio routing. See: #AUDIO_ROUTE_TYPE.
*/
virtual void onAudioRouteChanged(AUDIO_ROUTE_TYPE routing) {
- (void)routing;
- }
+ (void)routing;
+ }
/** Occurs when the published media stream falls back to an audio-only stream due to poor network conditions or switches back to the video after the network conditions improve.
- If you call \ref IRtcEngine::setLocalPublishFallbackOption "setLocalPublishFallbackOption" and set *option* as #STREAM_FALLBACK_OPTION_AUDIO_ONLY, the SDK triggers this callback when the
+ If you call \ref IRtcEngine::setLocalPublishFallbackOption "setLocalPublishFallbackOption" and set *option* as #STREAM_FALLBACK_OPTION_AUDIO_ONLY, the SDK triggers this callback when the
published stream falls back to audio-only mode due to poor uplink conditions, or when the audio stream switches back to the video after the uplink network condition improves.
@note If the local stream fallbacks to the audio-only stream, the remote user receives the \ref IRtcEngineEventHandler::onUserMuteVideo "onUserMuteVideo" callback.
@@ -3753,7 +4472,7 @@ class IRtcEngineEventHandler
*
* The SDK triggers this callback when the local user resumes or stops
* capturing the local audio stream by calling the
- * \ref agora::rtc::IRtcEngine::enableLocalAudio "enbaleLocalAudio" method.
+ * \ref agora::rtc::IRtcEngine::enableLocalAudio "enableLocalAudio" method.
*
* @param enabled Whether the microphone is enabled/disabled:
* - true: Enabled.
@@ -3775,7 +4494,7 @@ class IRtcEngineEventHandler
/** Occurs when the local network type changes.
- When the network connection is interrupted, this callback indicates whether the interruption is caused by a network type change or poor network conditions.
+ When the network connection is interrupted, this callback indicates whether the interruption is caused by a network type change or poor network conditions.
@param type See #NETWORK_TYPE.
*/
@@ -3858,7 +4577,9 @@ class IVideoDeviceManager
/** Enumerates the video devices.
- This method returns an IVideoDeviceCollection object including all video devices in the system. With the IVideoDeviceCollection object, the application can enumerate the video devices. The application must call the \ref IVideoDeviceCollection::release "release" method to release the returned object after using it.
+ This method returns an IVideoDeviceCollection object including all video devices
+ in the system. With the IVideoDeviceCollection object, the application can enumerate
+ the video devices. The application must call the \ref IVideoDeviceCollection::release "release" method to release the returned object after using it.
@return
- An IVideoDeviceCollection object including all video devices in the system: Success.
@@ -4258,31 +4979,30 @@ struct RtcEngineContext
IRtcEngineEventHandler* eventHandler;
/**
* The App ID issued to you by Agora. See [How to get the App ID](https://docs.agora.io/en/Agora%20Platform/token#get-an-app-id).
- * Only users in apps with the same App ID can join the same channel and communicate with each other. Use an App ID to create only one `IRtcEngine` instance. To change your App ID, call `release` to destroy the current `IRtcEngine` instance and then call `createAgoraRtcEngine`
+ * Only users in apps with the same App ID can join the same channel and communicate with each other. Use an App ID to create only
+ * one `IRtcEngine` instance. To change your App ID, call `release` to destroy the current `IRtcEngine` instance and then call `createAgoraRtcEngine`
* and `initialize` to create an `IRtcEngine` instance with the new App ID.
*/
const char* appId;
// For android, it the context(Activity or Application
- // for windows,Video hot plug device
+ // for windows,Video hot plug device
/** The video window handle. Once set, this parameter enables you to plug
* or unplug the video devices while they are powered.
*/
- void* context;
+ void* context;
/**
- * The area of connection. This advanced feature applies to scenarios that have regional restrictions.
- *
- * You can use the bitwise OR operator (|) to specify multiple areas. For details, see #AREA_CODE.
+ * The region for connection. This advanced feature applies to scenarios that have regional restrictions.
+ *
+ * For the regions that Agora supports, see #AREA_CODE. After specifying the region, the SDK connects to the Agora servers within that region.
*
- * After specifying the area of connection:
- * - When the app that integrates the Agora SDK is used within the specified area, it connects to the Agora servers within the specified area under normal circumstances.
- * - When the app that integrates the Agora SDK is used out of the specified area, it connects to the Agora servers either in the specified area or in the area where the app is located.
+ * @note The SDK supports specify only one region.
*/
- int areaCode;
+ unsigned int areaCode;
RtcEngineContext()
:eventHandler(NULL)
,appId(NULL)
,context(NULL)
- ,areaCode(rtc::AREA_CODE_GLOBAL)
+ ,areaCode(rtc::AREA_CODE_GLOB)
{}
};
@@ -4318,7 +5038,7 @@ class IMetadataObserver
/** Buffer address of the sent or received Metadata.
*/
unsigned char *buffer;
- /** Time statmp of the frame following the metadata.
+ /** Timestamp (ms) of the frame following the metadata.
*/
long long timeStampMs;
};
@@ -4331,7 +5051,7 @@ class IMetadataObserver
- `uid`: ID of the user who sends the metadata.
- `size`: The size of the sent or received metadata.
- `buffer`: The sent or received metadata.
- - `timeStampMs`: The timestamp of the metadata.
+ - `timeStampMs`: The timestamp (ms) of the metadata.
The SDK triggers this callback after you successfully call the \ref agora::rtc::IRtcEngine::registerMediaMetadataObserver "registerMediaMetadataObserver" method. You need to specify the maximum size of the metadata in the return value of this callback.
@@ -4357,6 +5077,65 @@ class IMetadataObserver
virtual void onMetadataReceived(const Metadata &metadata) = 0;
};
+/** Encryption mode.
+*/
+enum ENCRYPTION_MODE
+{
+ /** 1: (Default) 128-bit AES encryption, XTS mode.
+ */
+ AES_128_XTS = 1,
+ /** 2: 128-bit AES encryption, ECB mode.
+ */
+ AES_128_ECB = 2,
+ /** 3: 256-bit AES encryption, XTS mode.
+ */
+ AES_256_XTS = 3,
+ /** 4: 128-bit SM4 encryption, ECB mode.
+ */
+ SM4_128_ECB = 4,
+ /** Enumerator boundary.
+ */
+ MODE_END,
+};
+
+/** Configurations of built-in encryption schemas. */
+struct EncryptionConfig{
+ /**
+ * Encryption mode. The default encryption mode is `AES_128_XTS`. See #ENCRYPTION_MODE.
+ */
+ ENCRYPTION_MODE encryptionMode;
+ /**
+ * Encryption key in string type.
+ *
+ * @note If you do not set an encryption key or set it as NULL, you cannot use the built-in encryption, and the SDK returns #ERR_INVALID_ARGUMENT (-2).
+ */
+ const char* encryptionKey;
+
+ EncryptionConfig() {
+ encryptionMode = AES_128_XTS;
+ encryptionKey = nullptr;
+ }
+
+ /// @cond
+ const char* getEncryptionString() const {
+ switch(encryptionMode)
+ {
+ case AES_128_XTS:
+ return "aes-128-xts";
+ case AES_128_ECB:
+ return "aes-128-ecb";
+ case AES_256_XTS:
+ return "aes-256-xts";
+ case SM4_128_ECB:
+ return "sm4-128-ecb";
+ default:
+ return "aes-128-xts";
+ }
+ return "aes-128-xts";
+ }
+ /// @endcond
+};
+
/** IRtcEngine is the base interface class of the Agora SDK that provides the main Agora SDK methods invoked by your application.
Enable the Agora SDK's communication functionality through the creation of an IRtcEngine object, then call the methods of this object.
@@ -4368,9 +5147,9 @@ class IRtcEngine
public:
/** Initializes the Agora service.
- *
+ *
* Unless otherwise specified, all the methods provided by the IRtcEngine class are executed asynchronously. Agora recommends calling these methods in the same thread.
- *
+ *
* @note Ensure that you call the
* \ref agora::rtc::IRtcEngine::createAgoraRtcEngine
* "createAgoraRtcEngine" and \ref agora::rtc::IRtcEngine::initialize
@@ -4379,59 +5158,122 @@ class IRtcEngine
* @param context Pointer to the RTC engine context. See RtcEngineContext.
*
* @return
- * - 0: Success.
+ * - 0(ERR_OK): Success.
* - < 0: Failure.
- * - `ERR_INVALID_APP_ID (101)`: The app ID is invalid. Check if it is in the correct format.
+ * - -1(ERR_FAILED): A general error occurs (no specified reason).
+ * - -2(ERR_INALID_ARGUMENT): No `IRtcEngineEventHandler` object is specified.
+ * - -7(ERR_NOT_INITIALIZED): The SDK is not initialized. Check whether `context` is properly set.
+ * - -22(ERR_RESOURCE_LIMITED): The resource is limited. The app uses too much of the system resource and fails to allocate any resources.
+ * - -101(ERR_INVALID_APP_ID): The App ID is invalid.
*/
virtual int initialize(const RtcEngineContext& context) = 0;
/** Releases all IRtcEngine resources.
-
- @note
- - If you want to create a new `IRtcEngine` instance after releasing the current one,
- ensure that you wait till this method is executed.
- - Do not immediately uninstall the SDK's dynamic library after the call, or it may cause a crash due to the SDK clean-up thread not quitting.
-
- @param sync
- - true: (Synchronous call) The result returns after the IRtcEngine resources are released. The application should not call this method in the SDK generated callbacks. Otherwise, the SDK must wait for the callbacks to return to recover the associated IRtcEngine resources, resulting in a deadlock. The SDK automatically detects the deadlock and converts this method into an asynchronous call, causing the test to take additional time.
- - false: (Asynchronous call) The result returns immediately, even when the IRtcEngine resources have not been released. The SDK releases all resources.
+ *
+ * Use this method for apps in which users occasionally make voice or video calls. When users do not make calls, you
+ * can free up resources for other operations. Once you call `release` to destroy the created `IRtcEngine` instance,
+ * you cannot use any method or callback in the SDK any more. If you want to use the real-time communication functions
+ * again, you must call \ref createAgoraRtcEngine "createAgoraRtcEngine" and \ref agora::rtc::IRtcEngine::initialize "initialize"
+ * to create a new `IRtcEngine` instance.
+ *
+ * @note If you want to create a new `IRtcEngine` instance after destroying the current one, ensure that you wait
+ * till the `release` method completes executing.
+ *
+ * @param sync
+ * - true: Synchronous call. Agora suggests calling this method in a sub-thread to avoid congestion in the main thread
+ * because the synchronous call and the app cannot move on to another task until the execution completes.
+ * Besides, you **cannot** call this method in any method or callback of the SDK. Otherwise, the SDK cannot release the
+ * resources occupied by the `IRtcEngine` instance until the callbacks return results, which may result in a deadlock.
+ * The SDK automatically detects the deadlock and converts this method into an asynchronous call, causing the test to
+ * take additional time.
+ * - false: Asynchronous call. Do not immediately uninstall the SDK's dynamic library after the call, or it may cause
+ * a crash due to the SDK clean-up thread not quitting.
*/
AGORA_CPP_API static void release (bool sync = false);
/** Sets the channel profile of the Agora IRtcEngine.
-
- The Agora IRtcEngine differentiates channel profiles and applies optimization algorithms accordingly.
- For example, it prioritizes smoothness and low latency for a video call, and prioritizes video quality for a video broadcast.
-
- @warning
- - To ensure the quality of real-time communication, we recommend that all users in a channel use the same channel profile.
- - Call this method before calling \ref IRtcEngine::joinChannel "joinChannel" . You cannot set the channel profile once you have joined the channel.
-
- @param profile The channel profile of the Agora IRtcEngine. See #CHANNEL_PROFILE_TYPE
- @return
- - 0: Success.
- - < 0: Failure.
+ *
+ * The Agora IRtcEngine differentiates channel profiles and applies optimization algorithms accordingly.
+ * For example, it prioritizes smoothness and low latency for a video call, and prioritizes video quality for the live interactive video streaming.
+ *
+ * @warning
+ * - To ensure the quality of real-time communication, we recommend that all users in a channel use the same channel profile.
+ * - Call this method before calling \ref IRtcEngine::joinChannel "joinChannel" . You cannot set the channel profile once you have joined the channel.
+ * - The default audio route and video encoding bitrate are different in different channel profiles. For details, see
+ * \ref IRtcEngine::setDefaultAudioRouteToSpeakerphone "setDefaultAudioRouteToSpeakerphone" and \ref IRtcEngine::setVideoEncoderConfiguration "setVideoEncoderConfiguration".
+ *
+ * @param profile The channel profile of the Agora IRtcEngine. See #CHANNEL_PROFILE_TYPE
+ * @return
+ * - 0(ERR_OK): Success.
+ * - < 0: Failure.
+ * - -2 (ERR_INVALID_ARGUMENT): The parameter is invalid.
+ * - -7(ERR_NOT_INITIALIZED): The SDK is not initialized.
*/
virtual int setChannelProfile(CHANNEL_PROFILE_TYPE profile) = 0;
- /** Sets the role of the user, such as a host or an audience (default), before joining a channel in a live broadcast.
-
- This method can be used to switch the user role in a live broadcast after the user joins a channel.
-
- In the Live Broadcast profile, when a user switches user roles after joining a channel, a successful \ref agora::rtc::IRtcEngine::setClientRole "setClientRole" method call triggers the following callbacks:
- - The local client: \ref agora::rtc::IRtcEngineEventHandler::onClientRoleChanged "onClientRoleChanged"
- - The remote client: \ref agora::rtc::IRtcEngineEventHandler::onUserJoined "onUserJoined" or \ref agora::rtc::IRtcEngineEventHandler::onUserOffline "onUserOffline" (BECOME_AUDIENCE)
-
- @note
- This method applies only to the Live-broadcast profile.
-
- @param role Sets the role of the user. See #CLIENT_ROLE_TYPE.
- @return
- - 0: Success.
- - < 0: Failure.
+ /** Sets the role of the user, such as a host or an audience (default), before joining a channel in the live interactive streaming.
+ *
+ * This method can be used to switch the user role in the live interactive streaming after the user joins a channel.
+ *
+ * In the `LIVE_BROADCASTING` profile, when a user switches user roles after joining a channel, a successful \ref agora::rtc::IRtcEngine::setClientRole "setClientRole" method call triggers the following callbacks:
+ * - The local client: \ref agora::rtc::IRtcEngineEventHandler::onClientRoleChanged "onClientRoleChanged"
+ * - The remote client: \ref agora::rtc::IRtcEngineEventHandler::onUserJoined "onUserJoined" or \ref agora::rtc::IRtcEngineEventHandler::onUserOffline "onUserOffline" (BECOME_AUDIENCE)
+ *
+ * @note
+ * This method applies only to the `LIVE_BROADCASTING` profile.
+ *
+ * @param role Sets the role of the user. See #CLIENT_ROLE_TYPE.
+ *
+ * @return
+ * - 0(ERR_OK): Success.
+ * - < 0: Failure.
+ * - -1(ERR_FAILED): A general error occurs (no specified reason).
+ * - -2(ERR_INALID_ARGUMENT): The parameter is invalid.
+ * - -7(ERR_NOT_INITIALIZED): The SDK is not initialized.
*/
virtual int setClientRole(CLIENT_ROLE_TYPE role) = 0;
-
+ /// @cond
+ /** Sets the role of a user in a live interactive streaming.
+ *
+ * @since v3.2.0
+ *
+ * You can call this method either before or after joining the channel to set the user role as audience or host. If
+ * you call this method to switch the user role after joining the channel, the SDK triggers the following callbacks:
+ * - The local client: \ref IRtcEngineEventHandler::onClientRoleChanged "onClientRoleChanged".
+ * - The remote client: \ref IRtcEngineEventHandler::onUserJoined "onUserJoined"
+ * or \ref IRtcEngineEventHandler::onUserOffline "onUserOffline".
+ *
+ * @note
+ * - This method applies to the `LIVE_BROADCASTING` profile only (when the `profile` parameter in
+ * \ref IRtcEngine::setChannelProfile "setChannelProfile" is set as `CHANNEL_PROFILE_LIVE_BROADCASTING`).
+ * - The difference between this method and \ref IRtcEngine::setClientRole(CLIENT_ROLE_TYPE) "setClientRole1" is that
+ * this method can set the user level in addition to the user role.
+ * - The user role determines the permissions that the SDK grants to a user, such as permission to send local
+ * streams, receive remote streams, and push streams to a CDN address.
+ * - The user level determines the level of services that a user can enjoy within the permissions of the user's
+ * role. For example, an audience can choose to receive remote streams with low latency or ultra low latency. Levels
+ * affect prices.
+ *
+ * **Example**
+ * ```cpp
+ * ClientRoleOptions options;
+ * options.audienceLatencyLevel = AUDIENCE_LATENCY_LEVEL_ULTRA_LOW_LATENCY;
+ * options.audienceLatencyLevel = AUDIENCE_LATENCY_LEVEL_LOW_LATENCY;
+ * agoraEngine->setClientRole(role, options);
+ * ```
+ *
+ * @param role The role of a user in a live interactive streaming. See #CLIENT_ROLE_TYPE.
+ * @param options The detailed options of a user, including user level. See ClientRoleOptions.
+ *
+ * @return
+ * - 0(ERR_OK): Success.
+ * - < 0: Failure.
+ * - -1(ERR_FAILED): A general error occurs (no specified reason).
+ * - -2(ERR_INALID_ARGUMENT): The parameter is invalid.
+ * - -7(ERR_NOT_INITIALIZED): The SDK is not initialized.
+ */
+ virtual int setClientRole(CLIENT_ROLE_TYPE role, const ClientRoleOptions& options) = 0;
+ /// @endcond
/** Joins a channel with the user ID.
Users in the same channel can talk to each other, and multiple users in the same channel can start a group chat. Users with different App IDs cannot call each other.
@@ -4441,16 +5283,16 @@ class IRtcEngine
A successful \ref agora::rtc::IRtcEngine::joinChannel "joinChannel" method call triggers the following callbacks:
- The local client: \ref agora::rtc::IRtcEngineEventHandler::onJoinChannelSuccess "onJoinChannelSuccess"
- - The remote client: \ref agora::rtc::IRtcEngineEventHandler::onUserJoined "onUserJoined" , if the user joining the channel is in the Communication profile, or is a BROADCASTER in the Live Broadcast profile.
+ - The remote client: \ref agora::rtc::IRtcEngineEventHandler::onUserJoined "onUserJoined" , if the user joining the channel is in the `COMMUNICATION` profile, or is a host in the `LIVE_BROADCASTING` profile.
When the connection between the client and Agora's server is interrupted due to poor network conditions, the SDK tries reconnecting to the server. When the local client successfully rejoins the channel, the SDK triggers the \ref agora::rtc::IRtcEngineEventHandler::onRejoinChannelSuccess "onRejoinChannelSuccess" callback on the local client.
@note A channel does not accept duplicate uids, such as two users with the same @p uid. If you set @p uid as 0, the system automatically assigns a @p uid. If you want to join a channel from different devices, ensure that each device has a different uid.
@warning Ensure that the App ID used for creating the token is the same App ID used by the \ref IRtcEngine::initialize "initialize" method for initializing the RTC engine. Otherwise, the CDN live streaming may fail.
- @param token Pointer to the token generated by the application server. In most circumstances, a static App ID suffices. For added security, use a Channel Key.
+ @param token Pointer to the token generated by the application server. In most circumstances, a static App ID suffices. For added security, use a token.
- If the user uses a static App ID, *token* is optional and can be set as NULL.
- - If the user uses a Channel Key, Agora issues an additional App Certificate for you to generate a user key based on the algorithm and App Certificate for user authentication on the server.
+ - If the user uses a token, Agora issues an additional App Certificate for you to generate a user key based on the algorithm and App Certificate for user authentication on the server.
@param channelId Pointer to the unique channel name for the Agora RTC session in the string format smaller than 64 bytes. Supported characters:
- All lowercase English letters: a to z.
- All uppercase English letters: A to Z.
@@ -4461,16 +5303,18 @@ class IRtcEngine
@param uid (Optional) User ID. A 32-bit unsigned integer with a value ranging from 1 to 232-1. The @p uid must be unique. If a @p uid is not assigned (or set to 0), the SDK assigns and returns a @p uid in the \ref IRtcEngineEventHandler::onJoinChannelSuccess "onJoinChannelSuccess" callback. Your application must record and maintain the returned *uid* since the SDK does not do so.
@return
- - 0: Success.
- - < 0: Failure:
- - #ERR_INVALID_ARGUMENT (-2)
- - #ERR_NOT_READY (-3)
- - #ERR_REFUSED (-5)
+ - 0(ERR_OK): Success.
+ - < 0: Failure.
+ - -2(ERR_INALID_ARGUMENT): The parameter is invalid.
+ - -3(ERR_NOT_READY): The SDK fails to be initialized. You can try re-initializing the SDK.
+ - -5(ERR_REFUSED): The request is rejected. This may be caused by the following:
+ - You have created an IChannel object with the same channel name.
+ - You have joined and published a stream in a channel created by the IChannel object.
*/
virtual int joinChannel(const char* token, const char* channelId, const char* info, uid_t uid) = 0;
/** Switches to a different channel.
*
- * This method allows the audience of a Live-broadcast channel to switch
+ * This method allows the audience of a `LIVE_BROADCASTING` channel to switch
* to a different channel.
*
* After the user successfully switches to another channel, the
@@ -4480,16 +5324,16 @@ class IRtcEngine
* user has left the original channel and joined a new one.
*
* @note
- * This method applies to the audience role in a Live-broadcast channel
+ * This method applies to the audience role in a `LIVE_BROADCASTING` channel
* only.
*
* @param token The token generated at your server:
* - For low-security requirements: You can use the temporary token
* generated in Console. For details, see
- * [Get a temporary token](https://docs.agora.io/en/Agora%20Platform/token?platfor%20*%20m=All%20Platforms#get-a-temporary-token).
+ * [Get a temporary token](https://docs.agora.io/en/Agora%20Platform/token?platform=All%20Platforms#generate-a-token).
* - For high-security requirements: Use the token generated at your
* server. For details, see
- * [Get a token](https://docs.agora.io/en/Agora%20Platform/token?platfor%20*%20m=All%20Platforms#get-a-token).
+ * [Get a token](https://docs.agora.io/en/Interactive%20Broadcast/token_server?platform=All%20Platforms).
* @param channelId Unique channel name for the AgoraRTC session in the
* string format. The string length must be less than 64 bytes. Supported
* character scopes are:
@@ -4498,16 +5342,19 @@ class IRtcEngine
* - All numeric characters: 0 to 9.
* - The space character.
* - Punctuation characters and other symbols, including: "!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "=", ".", ">", "?", "@", "[", "]", "^", "_", " {", "}", "|", "~", ",".
-
- @return
- - 0: Success.
- - < 0: Failure.
- - #ERR_INVALID_ARGUMENT (-2)
- - #ERR_NOT_READY (-3)
- - #ERR_REFUSED (-5)
+ *
+ * @return
+ * - 0(ERR_OK): Success.
+ * - < 0: Failure.
+ * - -1(ERR_FAILED): A general error occurs (no specified reason).
+ * - -2(ERR_INALID_ARGUMENT): The parameter is invalid.
+ * - -5(ERR_REFUSED): The request is rejected, probably because the user is not an audience.
+ * - -7(ERR_NOT_INITIALIZED): The SDK is not initialized.
+ * - -102(ERR_INVALID_CHANNEL_NAME): The channel name is invalid.
+ * - -113(ERR_NOT_IN_CHANNEL): The user is not in the channel.
*/
virtual int switchChannel(const char* token, const char* channelId) = 0;
-
+
/** Allows a user to leave a channel, such as hanging up or exiting a call.
After joining a channel, the user must call the *leaveChannel* method to end the call before joining another channel.
@@ -4518,18 +5365,21 @@ class IRtcEngine
A successful \ref agora::rtc::IRtcEngine::leaveChannel "leaveChannel" method call triggers the following callbacks:
- The local client: \ref agora::rtc::IRtcEngineEventHandler::onLeaveChannel "onLeaveChannel"
- - The remote client: \ref agora::rtc::IRtcEngineEventHandler::onUserOffline "onUserOffline" , if the user leaving the channel is in the Communication channel, or is a BROADCASTER in the Live Broadcast profile.
+ - The remote client: \ref agora::rtc::IRtcEngineEventHandler::onUserOffline "onUserOffline" , if the user leaving the channel is in the `COMMUNICATION` channel, or is a host in the `LIVE_BROADCASTING` profile.
@note
- If you call the \ref IRtcEngine::release "release" method immediately after the *leaveChannel* method, the *leaveChannel* process interrupts, and the \ref IRtcEngineEventHandler::onLeaveChannel "onLeaveChannel" callback is not triggered.
- If you call the *leaveChannel* method during a CDN live streaming, the SDK triggers the \ref IRtcEngine::removePublishStreamUrl "removePublishStreamUrl" method.
@return
- - 0: Success.
+ - 0(ERR_OK): Success.
- < 0: Failure.
+ - -1(ERR_FAILED): A general error occurs (no specified reason).
+ - -2(ERR_INALID_ARGUMENT): The parameter is invalid.
+ - -7(ERR_NOT_INITIALIZED): The SDK is not initialized.
*/
virtual int leaveChannel() = 0;
-
+
/** Gets a new token when the current token expires after a period of time.
The `token` expires after a period of time once the token schema is enabled when:
@@ -4540,9 +5390,13 @@ class IRtcEngine
The application should call this method to get the new `token`. Failure to do so will result in the SDK disconnecting from the server.
@param token Pointer to the new token.
+
@return
- - 0: Success.
+ - 0(ERR_OK): Success.
- < 0: Failure.
+ - -1(ERR_FAILED): A general error occurs (no specified reason).
+ - -2(ERR_INALID_ARGUMENT): The parameter is invalid.
+ - -7(ERR_NOT_INITIALIZED): The SDK is not initialized.
*/
virtual int renewToken(const char* token) = 0;
@@ -4594,7 +5448,7 @@ class IRtcEngine
After the user successfully joins the channel, the SDK triggers the following callbacks:
- The local client: \ref agora::rtc::IRtcEngineEventHandler::onLocalUserRegistered "onLocalUserRegistered" and \ref agora::rtc::IRtcEngineEventHandler::onJoinChannelSuccess "onJoinChannelSuccess" .
- The remote client: \ref agora::rtc::IRtcEngineEventHandler::onUserJoined "onUserJoined" and \ref agora::rtc::IRtcEngineEventHandler::onUserInfoUpdated "onUserInfoUpdated" , if the user joining the channel is in the Communication profile, or is a BROADCASTER in the Live Broadcast profile.
+ The remote client: \ref agora::rtc::IRtcEngineEventHandler::onUserJoined "onUserJoined" and \ref agora::rtc::IRtcEngineEventHandler::onUserInfoUpdated "onUserInfoUpdated" , if the user joining the channel is in the `COMMUNICATION` profile, or is a host in the `LIVE_BROADCASTING` profile.
@note To ensure smooth communication, use the same parameter type to identify the user. For example, if a user joins the channel with a user ID, then ensure all the other users use the user ID too. The same applies to the user account.
If a user joins the channel with the Agora Web SDK, ensure that the uid of the user is set to the same parameter type.
@@ -4620,12 +5474,12 @@ class IRtcEngine
- < 0: Failure.
- #ERR_INVALID_ARGUMENT (-2)
- #ERR_NOT_READY (-3)
- - #ERR_REFUSED (-5)
+ - #ERR_REFUSED (-5)
*/
virtual int joinChannelWithUserAccount(const char* token,
const char* channelId,
const char* userAccount) = 0;
-
+
/** Gets the user information by passing in the user account.
After a remote user joins the channel, the SDK gets the user ID and user account of the remote user, caches them
@@ -4676,7 +5530,7 @@ class IRtcEngine
@note
- After calling this method, always call the \ref IRtcEngine::stopEchoTest "stopEchoTest" method to end the test. Otherwise, the application cannot run the next echo test.
- - In the Live-broadcast profile, only the hosts can call this method. If the user switches from the Communication to Live-broadcast profile, the user must call the \ref IRtcEngine::setClientRole "setClientRole" method to change the user role from the audience (default) to the host before calling this method.
+ - In the `LIVE_BROADCASTING` profile, only the hosts can call this method. If the user switches from the `COMMUNICATION` to`LIVE_BROADCASTING` profile, the user must call the \ref IRtcEngine::setClientRole "setClientRole" method to change the user role from the audience (default) to the host before calling this method.
@return
- 0: Success.
@@ -4693,7 +5547,7 @@ class IRtcEngine
@note
- Call this method before joining a channel.
- After calling this method, call the \ref IRtcEngine::stopEchoTest "stopEchoTest" method to end the test. Otherwise, the app cannot run the next echo test, or call the \ref IRtcEngine::joinChannel "joinChannel" method.
- - In the Live-broadcast profile, only a host can call this method.
+ - In the `LIVE_BROADCASTING` profile, only a host can call this method.
@param intervalInSeconds The time interval (s) between when you speak and when the recording plays back.
@return
@@ -4755,6 +5609,7 @@ class IRtcEngine
Each video profile includes a set of parameters, such as the resolution, frame rate, and bitrate. If the camera device does not support the specified resolution, the SDK automatically chooses a suitable camera resolution, keeping the encoder resolution specified by the *setVideoProfile* method.
@note
+ - You can call this method either before or after joining a channel.
- If you do not need to set the video profile after joining the channel, call this method before the \ref IRtcEngine::enableVideo "enableVideo" method to reduce the render time of the first video frame.
- Always set the video profile before calling the \ref IRtcEngine::joinChannel "joinChannel" or \ref IRtcEngine::startPreview "startPreview" method.
@@ -4777,7 +5632,9 @@ class IRtcEngine
The parameters specified in this method are the maximum values under ideal network conditions. If the video engine cannot render the video using the specified parameters due to poor network conditions, the parameters further down the list are considered until a successful configuration is found.
- @note If you do not need to set the video encoder configuration after joining the channel, you can call this method before the \ref IRtcEngine::enableVideo "enableVideo" method to reduce the render time of the first video frame.
+ @note
+ - You can call this method either before or after joining a channel.
+ - If you do not need to set the video encoder configuration after joining the channel, you can call this method before the \ref IRtcEngine::enableVideo "enableVideo" method to reduce the render time of the first video frame.
@param config Sets the local video encoder configuration. See VideoEncoderConfiguration.
@return
@@ -4787,7 +5644,7 @@ class IRtcEngine
virtual int setVideoEncoderConfiguration(const VideoEncoderConfiguration& config) = 0;
/** Sets the camera capture configuration.
- For a video call or live broadcast, generally the SDK controls the camera output parameters. When the default camera capturer settings do not meet special requirements or cause performance problems, we recommend using this method to set the camera capturer configuration:
+ For a video call or the live interactive video streaming, generally the SDK controls the camera output parameters. When the default camera capturer settings do not meet special requirements or cause performance problems, we recommend using this method to set the camera capturer configuration:
- If the resolution or frame rate of the captured raw video data are higher than those set by \ref IRtcEngine::setVideoEncoderConfiguration "setVideoEncoderConfiguration", processing video frames requires extra CPU and RAM usage and degrades performance. We recommend setting config as CAPTURER_OUTPUT_PREFERENCE_PERFORMANCE = 1 to avoid such problems.
- If you do not need local video preview or are willing to sacrifice preview quality, we recommend setting config as CAPTURER_OUTPUT_PREFERENCE_PERFORMANCE = 1 to optimize CPU and RAM usage.
@@ -4806,13 +5663,14 @@ class IRtcEngine
/** Initializes the local video view.
This method initializes the video view of a local stream on the local device. It affects only the video view that the local user sees, not the published local video stream.
-
+
Call this method to bind the local video stream to a video view and to set the rendering and mirror modes of the video view.
The binding is still valid after the user leaves the channel, which means that the window still displays. To unbind the view, set the *view* in VideoCanvas to NULL.
-
- @note
- - Call this method before joining a channel.
+
+ @note
+ - You can call this method either before or after joining a channel.
- During a call, you can call this method as many times as necessary to update the display mode of the local video view.
+
@param canvas Pointer to the local video view and settings. See VideoCanvas.
@return
- 0: Success.
@@ -4855,17 +5713,19 @@ class IRtcEngine
virtual int startPreview() = 0;
/** Prioritizes a remote user's stream.
-
- Use this method with the \ref IRtcEngine::setRemoteSubscribeFallbackOption "setRemoteSubscribeFallbackOption" method. If the fallback function is enabled for a subscribed stream, the SDK ensures the high-priority user gets the best possible stream quality.
-
- @note The Agora SDK supports setting @p userPriority as high for one user only.
-
- @param uid The ID of the remote user.
- @param userPriority Sets the priority of the remote user. See #PRIORITY_TYPE.
-
- @return
- - 0: Success.
- - < 0: Failure.
+ *
+ * The SDK ensures the high-priority user gets the best possible stream quality.
+ *
+ * @note
+ * - The Agora SDK supports setting @p userPriority as high for one user only.
+ * - Ensure that you call this method before joining a channel.
+ *
+ * @param uid The ID of the remote user.
+ * @param userPriority Sets the priority of the remote user. See #PRIORITY_TYPE.
+ *
+ * @return
+ * - 0: Success.
+ * - < 0: Failure.
*/
virtual int setRemoteUserPriority(uid_t uid, PRIORITY_TYPE userPriority) = 0;
@@ -4897,19 +5757,21 @@ class IRtcEngine
/** Disables/Re-enables the local audio function.
- The audio function is enabled by default. This method disables or re-enables the local audio function, that is, to stop or restart local audio capturing.
+ The audio function is enabled by default. This method disables or re-enables the local audio function, that is, to stop or restart local audio capturing.
- This method does not affect receiving or playing the remote audio streams,and enableLocalAudio(false) is applicable to scenarios where the user wants to
- receive remote audio streams without sending any audio stream to other users in the channel.
+ This method does not affect receiving or playing the remote audio streams,and enableLocalAudio(false) is applicable to scenarios where the user wants to
+ receive remote audio streams without sending any audio stream to other users in the channel.
- The SDK triggers the \ref IRtcEngineEventHandler::onMicrophoneEnabled "onMicrophoneEnabled" callback once the local audio function is disabled or enabled.
+ Once the local audio function is disabled or re-enabled, the SDK triggers the \ref agora::rtc::IRtcEngineEventHandler::onLocalAudioStateChanged "onLocalAudioStateChanged" callback,
+ which reports `LOCAL_AUDIO_STREAM_STATE_STOPPED(0)` or `LOCAL_AUDIO_STREAM_STATE_RECORDING(1)`.
@note
- This method is different from the \ref agora::rtc::IRtcEngine::muteLocalAudioStream "muteLocalAudioStream" method:
- - \ref agora::rtc::IRtcEngine::enableLocalAudio "enableLocalAudio": Disables/Re-enables the local audio capturing and processing.
+ - This method is different from the \ref agora::rtc::IRtcEngine::muteLocalAudioStream "muteLocalAudioStream" method:
+ - \ref agora::rtc::IRtcEngine::enableLocalAudio "enableLocalAudio": Disables/Re-enables the local audio capturing and processing.
If you disable or re-enable local audio recording using the `enableLocalAudio` method, the local user may hear a pause in the remote audio playback.
- \ref agora::rtc::IRtcEngine::muteLocalAudioStream "muteLocalAudioStream": Sends/Stops sending the local audio streams.
-
+ - You can call this method either before or after joining a channel.
+
@param enabled Sets whether to disable/re-enable the local audio function:
- true: (Default) Re-enable the local audio function, that is, to start the local audio capturing device (for example, the microphone).
- false: Disable the local audio function, that is, to stop local audio capturing.
@@ -4932,17 +5794,16 @@ class IRtcEngine
*/
virtual int disableAudio() = 0;
- /** Sets the audio parameters and application scenarios.
+ /** Sets the audio parameters and application scenarios.
@note
- - The *setAudioProfile* method must be called before the \ref IRtcEngine::joinChannel "joinChannel" method.
- - In the Communication and Live-broadcast profiles, the bitrate may be different from your settings due to network self-adaptation.
+ - The `setAudioProfile` method must be called before the \ref IRtcEngine::joinChannel "joinChannel" method.
+ - In the `COMMUNICATION` and `LIVE_BROADCASTING` profiles, the bitrate may be different from your settings due to network self-adaptation.
- In scenarios requiring high-quality audio, for example, a music teaching scenario, we recommend setting profile as AUDIO_PROFILE_MUSIC_HIGH_QUALITY (4) and scenario as AUDIO_SCENARIO_GAME_STREAMING (3).
@param profile Sets the sample rate, bitrate, encoding mode, and the number of channels. See #AUDIO_PROFILE_TYPE.
- @param scenario Sets the audio application scenario. See #AUDIO_SCENARIO_TYPE.
- Under different audio scenarios, the device uses different volume tracks,
- i.e. either the in-call volume or the media volume. For details, see
+ @param scenario Sets the audio application scenario. See #AUDIO_SCENARIO_TYPE.
+ Under different audio scenarios, the device uses different volume types. For details, see
[What is the difference between the in-call volume and the media volume?](https://docs.agora.io/en/faq/system_volume).
@return
@@ -4953,9 +5814,10 @@ class IRtcEngine
/** Stops/Resumes sending the local audio stream.
A successful \ref agora::rtc::IRtcEngine::muteLocalAudioStream "muteLocalAudioStream" method call triggers the \ref agora::rtc::IRtcEngineEventHandler::onUserMuteAudio "onUserMuteAudio" callback on the remote client.
- @note
+
+ @note
- When @p mute is set as @p true, this method does not disable the microphone, which does not affect any ongoing recording.
- - If you call \ref agora::rtc::IRtcEngine::setChannelProfile "setChannelProfile" after this method, the SDK resets whether or not to mute the local audio according to the channel profile and user role. Therefore, we recommend calling this method after the `setChannelProfile` method.
+ - You can call this method either before or after joining a channel. If you call \ref agora::rtc::IRtcEngine::setChannelProfile "setChannelProfile" after this method, the SDK resets whether or not to mute the local audio according to the channel profile and user role. Therefore, we recommend calling this method after the `setChannelProfile` method.
@param mute Sets whether to send/stop sending the local audio stream:
- true: Stops sending the local audio stream.
@@ -4968,6 +5830,8 @@ class IRtcEngine
virtual int muteLocalAudioStream(bool mute) = 0;
/** Stops/Resumes receiving all remote users' audio streams.
+ @note You can call this method either before or after joining a channel.
+
@param mute Sets whether to receive/stop receiving all remote users' audio streams.
- true: Stops receiving all remote users' audio streams.
- false: (Default) Receives all remote users' audio streams.
@@ -4976,14 +5840,14 @@ class IRtcEngine
- 0: Success.
- < 0: Failure.
*/
- virtual int muteAllRemoteAudioStreams(bool mute) = 0;
+ virtual int muteAllRemoteAudioStreams(bool mute) = 0;
/** Stops/Resumes receiving all remote users' audio streams by default.
-
+
You can call this method either before or after joining a channel. If you call `setDefaultMuteAllRemoteAudioStreams (true)` after joining a channel, the remote audio streams of all subsequent users are not received.
- @note If you want to resume receiving the audio stream, call \ref agora::rtc::IRtcEngine::muteRemoteAudioStream "muteRemoteAudioStream (false)",
- and specify the ID of the remote user whose audio stream you want to receive.
- To receive the audio streams of multiple remote users, call `muteRemoteAudioStream (false)` as many times.
+ @note If you want to resume receiving the audio stream, call \ref agora::rtc::IRtcEngine::muteRemoteAudioStream "muteRemoteAudioStream (false)",
+ and specify the ID of the remote user whose audio stream you want to receive.
+ To receive the audio streams of multiple remote users, call `muteRemoteAudioStream (false)` as many times.
Calling `setDefaultMuteAllRemoteAudioStreams (false)` resumes receiving the audio streams of subsequent users only.
@param mute Sets whether to receive/stop receiving all remote users' audio streams by default:
@@ -4994,12 +5858,12 @@ class IRtcEngine
- 0: Success.
- < 0: Failure.
*/
- virtual int setDefaultMuteAllRemoteAudioStreams(bool mute) = 0;
-
+ virtual int setDefaultMuteAllRemoteAudioStreams(bool mute) = 0;
+
/** Adjusts the playback volume of a specified remote user.
You can call this method as many times as necessary to adjust the playback volume of different remote users, or to repeatedly adjust the playback volume of the same remote user.
-
+
@note
- Call this method after joining a channel.
- The playback volume here refers to the mixed volume of a specified remote user.
@@ -5012,31 +5876,34 @@ class IRtcEngine
@return
- 0: Success.
- - < 0: Failure.
+ - < 0: Failure.
*/
virtual int adjustUserPlaybackSignalVolume(unsigned int uid, int volume) = 0;
- /** Stops/Resumes receiving a specified remote user's audio stream.
+ /** Stops/Resumes receiving a specified remote user's audio stream.
- @note If you called the \ref agora::rtc::IRtcEngine::muteAllRemoteAudioStreams "muteAllRemoteAudioStreams" method and set @p mute as @p true to stop receiving all remote users' audio streams, call the *muteAllRemoteAudioStreams* method and set @p mute as @p false before calling this method. The *muteAllRemoteAudioStreams* method sets all remote audio streams, while the *muteRemoteAudioStream* method sets a specified remote audio stream.
+ @note
+ - You can call this method either before or after joining a channel. If you call it before joining a channel,
+ you need to maintain the `uid` of the remote user on your app level.
+ - If you called the \ref agora::rtc::IRtcEngine::muteAllRemoteAudioStreams "muteAllRemoteAudioStreams" method and set @p mute as @p true to stop receiving all remote users' audio streams, call the *muteAllRemoteAudioStreams* method and set @p mute as @p false before calling this method. The *muteAllRemoteAudioStreams* method sets all remote audio streams, while the *muteRemoteAudioStream* method sets a specified remote audio stream.
- @param userId User ID of the specified remote user sending the audio.
- @param mute Sets whether to receive/stop receiving a specified remote user's audio stream:
- - true: Stops receiving the specified remote user's audio stream.
- - false: (Default) Receives the specified remote user's audio stream.
+ @param userId User ID of the specified remote user sending the audio.
+ @param mute Sets whether to receive/stop receiving a specified remote user's audio stream:
+ - true: Stops receiving the specified remote user's audio stream.
+ - false: (Default) Receives the specified remote user's audio stream.
- @return
- - 0: Success.
- - < 0: Failure.
+ @return
+ - 0: Success.
+ - < 0: Failure.
- */
- virtual int muteRemoteAudioStream(uid_t userId, bool mute) = 0;
+ */
+ virtual int muteRemoteAudioStream(uid_t userId, bool mute) = 0;
/** Stops/Resumes sending the local video stream.
A successful \ref agora::rtc::IRtcEngine::muteLocalVideoStream "muteLocalVideoStream" method call triggers the \ref agora::rtc::IRtcEngineEventHandler::onUserMuteVideo "onUserMuteVideo" callback on the remote client.
@note
- When set to *true*, this method does not disable the camera which does not affect the retrieval of the local video streams. This method executes faster than the \ref agora::rtc::IRtcEngine::enableLocalVideo "enableLocalVideo" method which controls the sending of the local video stream.
- - If you call \ref agora::rtc::IRtcEngine::setChannelProfile "setChannelProfile" after this method, the SDK resets whether or not to mute the local video according to the channel profile and user role. Therefore, we recommend calling this method after the `setChannelProfile` method.
+ - You can call this method either before or after joining a channel. If you call \ref agora::rtc::IRtcEngine::setChannelProfile "setChannelProfile" after this method, the SDK resets whether or not to mute the local video according to the channel profile and user role. Therefore, we recommend calling this method after the `setChannelProfile` method.
@param mute Sets whether to send/stop sending the local video stream:
- true: Stop sending the local video stream.
@@ -5046,7 +5913,7 @@ class IRtcEngine
- 0: Success.
- < 0: Failure.
*/
- virtual int muteLocalVideoStream(bool mute) = 0;
+ virtual int muteLocalVideoStream(bool mute) = 0;
/** Enables/Disables the local video capture.
This method disables or re-enables the local video capturer, and does not affect receiving the remote video stream.
@@ -5055,7 +5922,9 @@ class IRtcEngine
After the local video capturer is successfully disabled or re-enabled, the SDK triggers the \ref agora::rtc::IRtcEngineEventHandler::onUserEnableLocalVideo "onUserEnableLocalVideo" callback on the remote client.
- @note This method affects the internal engine and can be called after the \ref agora::rtc::IRtcEngine::leaveChannel "leaveChannel" method.
+ @note
+ - You can call this method either before or after joining a channel.
+ - This method affects the internal engine and can be called after the \ref agora::rtc::IRtcEngine::leaveChannel "leaveChannel" method.
@param enabled Sets whether to disable/re-enable the local video, including the capturer, renderer, and sender:
- true: (Default) Re-enable the local video.
@@ -5065,9 +5934,11 @@ class IRtcEngine
- 0: Success.
- < 0: Failure.
*/
- virtual int enableLocalVideo(bool enabled) = 0;
+ virtual int enableLocalVideo(bool enabled) = 0;
/** Stops/Resumes receiving all video stream from a specified remote user.
+ @note You can call this method either before or after joining a channel.
+
@param mute Sets whether to receive/stop receiving all remote users' video streams:
- true: Stop receiving all remote users' video streams.
- false: (Default) Receive all remote users' video streams.
@@ -5076,9 +5947,9 @@ class IRtcEngine
- 0: Success.
- < 0: Failure.
*/
- virtual int muteAllRemoteVideoStreams(bool mute) = 0;
+ virtual int muteAllRemoteVideoStreams(bool mute) = 0;
/** Stops/Resumes receiving all remote users' video streams by default.
-
+
You can call this method either before or after joining a channel. If you call `setDefaultMuteAllRemoteVideoStreams (true)` after joining a channel, the remote video streams of all subsequent users are not received.
@note If you want to resume receiving the video stream, call \ref agora::rtc::IRtcEngine::muteRemoteVideoStream "muteRemoteVideoStream (false)", and specify the ID of the remote user whose video stream you want to receive. To receive the video streams of multiple remote users, call `muteRemoteVideoStream (false)` as many times. Calling `setDefaultMuteAllRemoteVideoStreams (false)` resumes receiving the video streams of subsequent users only.
@@ -5091,10 +5962,13 @@ class IRtcEngine
- 0: Success.
- < 0: Failure.
*/
- virtual int setDefaultMuteAllRemoteVideoStreams(bool mute) = 0;
+ virtual int setDefaultMuteAllRemoteVideoStreams(bool mute) = 0;
/** Stops/Resumes receiving the video stream from a specified remote user.
- @note If you called the \ref agora::rtc::IRtcEngine::muteAllRemoteVideoStreams "muteAllRemoteVideoStreams" method and set @p mute as @p true to stop receiving all remote video streams, call the *muteAllRemoteVideoStreams* method and set @p mute as @p false before calling this method.
+ @note
+ - You can call this method either before or after joining a channel. If you call it before joining a channel, you
+ need to maintain the `uid` of the remote user on your app level.
+ - If you called the \ref agora::rtc::IRtcEngine::muteAllRemoteVideoStreams "muteAllRemoteVideoStreams" method and set @p mute as @p true to stop receiving all remote video streams, call the *muteAllRemoteVideoStreams* method and set @p mute as @p false before calling this method.
@param userId User ID of the specified remote user.
@param mute Sets whether to stop/resume receiving the video stream from a specified remote user:
@@ -5105,70 +5979,82 @@ class IRtcEngine
- 0: Success.
- < 0: Failure.
*/
- virtual int muteRemoteVideoStream(uid_t userId, bool mute) = 0;
+ virtual int muteRemoteVideoStream(uid_t userId, bool mute) = 0;
/** Sets the stream type of the remote video.
- Under limited network conditions, if the publisher has not disabled the dual-stream mode using `enableDualStreamMode(false)`,
- the receiver can choose to receive either the high-quality video stream (the high resolution, and high bitrate video stream) or
+ Under limited network conditions, if the publisher has not disabled the dual-stream mode using `enableDualStreamMode(false)`,
+ the receiver can choose to receive either the high-quality video stream (the high resolution, and high bitrate video stream) or
the low-video stream (the low resolution, and low bitrate video stream).
- By default, users receive the high-quality video stream. Call this method if you want to switch to the low-video stream.
- This method allows the app to adjust the corresponding video stream type based on the size of the video window to
+ By default, users receive the high-quality video stream. Call this method if you want to switch to the low-video stream.
+ This method allows the app to adjust the corresponding video stream type based on the size of the video window to
reduce the bandwidth and resources.
- The aspect ratio of the low-video stream is the same as the high-quality video stream. Once the resolution of the high-quality video
+ The aspect ratio of the low-video stream is the same as the high-quality video stream. Once the resolution of the high-quality video
stream is set, the system automatically sets the resolution, frame rate, and bitrate of the low-video stream.
The method result returns in the \ref agora::rtc::IRtcEngineEventHandler::onApiCallExecuted "onApiCallExecuted" callback.
+ @note You can call this method either before or after joining a channel. If you call both
+ \ref IRtcEngine::setRemoteVideoStreamType "setRemoteVideoStreamType" and
+ \ref IRtcEngine::setRemoteDefaultVideoStreamType "setRemoteDefaultVideoStreamType", the SDK applies the settings in
+ the \ref IRtcEngine::setRemoteVideoStreamType "setRemoteVideoStreamType" method.
+
@param userId ID of the remote user sending the video stream.
@param streamType Sets the video-stream type. See #REMOTE_VIDEO_STREAM_TYPE.
@return
- 0: Success.
- < 0: Failure.
*/
- virtual int setRemoteVideoStreamType(uid_t userId, REMOTE_VIDEO_STREAM_TYPE streamType) = 0;
+ virtual int setRemoteVideoStreamType(uid_t userId, REMOTE_VIDEO_STREAM_TYPE streamType) = 0;
/** Sets the default stream type of remote videos.
- Under limited network conditions, if the publisher has not disabled the dual-stream mode using `enableDualStreamMode(false)`,
- the receiver can choose to receive either the high-quality video stream (the high resolution, and high bitrate video stream) or
+ Under limited network conditions, if the publisher has not disabled the dual-stream mode using `enableDualStreamMode(false)`,
+ the receiver can choose to receive either the high-quality video stream (the high resolution, and high bitrate video stream) or
the low-video stream (the low resolution, and low bitrate video stream).
- By default, users receive the high-quality video stream. Call this method if you want to switch to the low-video stream.
- This method allows the app to adjust the corresponding video stream type based on the size of the video window to
+ By default, users receive the high-quality video stream. Call this method if you want to switch to the low-video stream.
+ This method allows the app to adjust the corresponding video stream type based on the size of the video window to
reduce the bandwidth and resources. The aspect ratio of the low-video stream is the same as the high-quality video stream.
Once the resolution of the high-quality video
stream is set, the system automatically sets the resolution, frame rate, and bitrate of the low-video stream.
The method result returns in the \ref agora::rtc::IRtcEngineEventHandler::onApiCallExecuted "onApiCallExecuted" callback.
+ @note You can call this method either before or after joining a channel. If you call both
+ \ref IRtcEngine::setRemoteVideoStreamType "setRemoteVideoStreamType" and
+ \ref IRtcEngine::setRemoteDefaultVideoStreamType "setRemoteDefaultVideoStreamType", the SDK applies the settings in
+ the \ref IRtcEngine::setRemoteVideoStreamType "setRemoteVideoStreamType" method.
+
@param streamType Sets the default video-stream type. See #REMOTE_VIDEO_STREAM_TYPE.
@return
- 0: Success.
- < 0: Failure.
*/
- virtual int setRemoteDefaultVideoStreamType(REMOTE_VIDEO_STREAM_TYPE streamType) = 0;
+ virtual int setRemoteDefaultVideoStreamType(REMOTE_VIDEO_STREAM_TYPE streamType) = 0;
/** Enables the \ref agora::rtc::IRtcEngineEventHandler::onAudioVolumeIndication "onAudioVolumeIndication" callback at a set time interval to report on which users are speaking and the speakers' volume.
Once this method is enabled, the SDK returns the volume indication in the \ref agora::rtc::IRtcEngineEventHandler::onAudioVolumeIndication "onAudioVolumeIndication" callback at the set time interval, whether or not any user is speaking in the channel.
+ @note You can call this method either before or after joining a channel.
+
@param interval Sets the time interval between two consecutive volume indications:
- ≤ 0: Disables the volume indication.
- > 0: Time interval (ms) between two consecutive volume indications. We recommend setting @p interval > 200 ms. Do not set @p interval < 10 ms, or the *onAudioVolumeIndication* callback will not be triggered.
@param smooth Smoothing factor sets the sensitivity of the audio volume indicator. The value ranges between 0 and 10. The greater the value, the more sensitive the indicator. The recommended value is 3.
@param report_vad
-
+
- true: Enable the voice activity detection of the local user. Once it is enabled, the `vad` parameter of the `onAudioVolumeIndication` callback reports the voice activity status of the local user.
- false: (Default) Disable the voice activity detection of the local user. Once it is disabled, the `vad` parameter of the `onAudioVolumeIndication` callback does not report the voice activity status of the local user, except for the scenario where the engine automatically detects the voice activity of the local user.
@return
- 0: Success.
- < 0: Failure.
*/
- virtual int enableAudioVolumeIndication(int interval, int smooth, bool report_vad) = 0;
+ virtual int enableAudioVolumeIndication(int interval, int smooth, bool report_vad) = 0;
/** @deprecated Starts an audio recording.
-
+
Use \ref IRtcEngine::startAudioRecording(const char* filePath, int sampleRate, AUDIO_RECORDING_QUALITY_TYPE quality) "startAudioRecording"2 instead.
The SDK allows recording during a call. Supported formats:
@@ -5189,20 +6075,20 @@ class IRtcEngine
- 0: Success.
- < 0: Failure.
*/
- virtual int startAudioRecording(const char* filePath, AUDIO_RECORDING_QUALITY_TYPE quality) = 0;
+ virtual int startAudioRecording(const char* filePath, AUDIO_RECORDING_QUALITY_TYPE quality) = 0;
/** Starts an audio recording on the client.
- *
- * The SDK allows recording during a call. After successfully calling this method, you can record the audio of all the users in the channel and get an audio recording file.
+ *
+ * The SDK allows recording during a call. After successfully calling this method, you can record the audio of all the users in the channel and get an audio recording file.
* Supported formats of the recording file are as follows:
* - .wav: Large file size with high fidelity.
* - .aac: Small file size with low fidelity.
- *
+ *
* @note
* - Ensure that the directory you use to save the recording file exists and is writable.
* - This method is usually called after the `joinChannel` method. The recording automatically stops when you call the `leaveChannel` method.
* - For better recording effects, set quality as #AUDIO_RECORDING_QUALITY_MEDIUM or #AUDIO_RECORDING_QUALITY_HIGH when `sampleRate` is 44.1 kHz or 48 kHz.
- *
+ *
* @param filePath Pointer to the absolute file path of the recording file. The string of the file name is in UTF-8, such as c:/music/audio.aac.
* @param sampleRate Sample rate (kHz) of the recording file. Supported values are as follows:
* - 16
@@ -5210,12 +6096,12 @@ class IRtcEngine
* - 44.1
* - 48
* @param quality Sets the audio recording quality. See #AUDIO_RECORDING_QUALITY_TYPE.
- *
+ *
* @return
* - 0: Success.
* - < 0: Failure.
*/
- virtual int startAudioRecording(const char* filePath, int sampleRate, AUDIO_RECORDING_QUALITY_TYPE quality) = 0;
+ virtual int startAudioRecording(const char* filePath, int sampleRate, AUDIO_RECORDING_QUALITY_TYPE quality) = 0;
/** Stops an audio recording on the client.
You can call this method before calling the \ref agora::rtc::IRtcEngine::leaveChannel "leaveChannel" method else, the recording automatically stops when the \ref agora::rtc::IRtcEngine::leaveChannel "leaveChannel" method is called.
@@ -5224,7 +6110,7 @@ class IRtcEngine
- 0: Success
- < 0: Failure.
*/
- virtual int stopAudioRecording() = 0;
+ virtual int stopAudioRecording() = 0;
/** Starts playing and mixing the music file.
This method mixes the specified local audio file with the audio stream from the microphone, or replaces the microphone's audio stream with the specified local audio file. You can choose whether the other user can hear the local audio playback and specify the number of playback loops. This method also supports online music playback.
@@ -5253,7 +6139,7 @@ class IRtcEngine
- 0: Success.
- < 0: Failure.
*/
- virtual int startAudioMixing(const char* filePath, bool loopback, bool replace, int cycle) = 0;
+ virtual int startAudioMixing(const char* filePath, bool loopback, bool replace, int cycle) = 0;
/** Stops playing and mixing the music file.
Call this method when you are in a channel.
@@ -5262,7 +6148,7 @@ class IRtcEngine
- 0: Success.
- < 0: Failure.
*/
- virtual int stopAudioMixing() = 0;
+ virtual int stopAudioMixing() = 0;
/** Pauses playing and mixing the music file.
Call this method when you are in a channel.
@@ -5271,7 +6157,7 @@ class IRtcEngine
- 0: Success.
- < 0: Failure.
*/
- virtual int pauseAudioMixing() = 0;
+ virtual int pauseAudioMixing() = 0;
/** Resumes playing and mixing the music file.
Call this method when you are in a channel.
@@ -5280,7 +6166,7 @@ class IRtcEngine
- 0: Success.
- < 0: Failure.
*/
- virtual int resumeAudioMixing() = 0;
+ virtual int resumeAudioMixing() = 0;
/** **DEPRECATED** Agora does not recommend using this method.
Sets the high-quality audio preferences. Call this method and set all parameters before joining a channel.
@@ -5304,9 +6190,9 @@ class IRtcEngine
virtual int setHighQualityAudioParameters(bool fullband, bool stereo, bool fullBitrate) = 0;
/** Adjusts the volume during audio mixing.
- Call this method when you are in a channel.
-
- @note Calling this method does not affect the volume of audio effect file playback invoked by the \ref agora::rtc::IRtcEngine::playEffect "playEffect" method.
+ @note
+ - Calling this method does not affect the volume of audio effect file playback invoked by the \ref agora::rtc::IRtcEngine::playEffect "playEffect" method.
+ - Ensure that this method is called after \ref IRtcEngine::startAudioMixing "startAudioMixing".
@param volume Audio mixing volume. The value ranges between 0 and 100 (default).
@@ -5314,10 +6200,10 @@ class IRtcEngine
- 0: Success.
- < 0: Failure.
*/
- virtual int adjustAudioMixingVolume(int volume) = 0;
+ virtual int adjustAudioMixingVolume(int volume) = 0;
/** Adjusts the audio mixing volume for local playback.
- @note Call this method when you are in a channel.
+ @note Ensure that this method is called after \ref IRtcEngine::startAudioMixing "startAudioMixing".
@param volume Audio mixing volume for local playback. The value ranges between 0 and 100 (default).
@@ -5339,7 +6225,7 @@ class IRtcEngine
virtual int getAudioMixingPlayoutVolume() = 0;
/** Adjusts the audio mixing volume for publishing (for remote users).
- @note Call this method when you are in a channel.
+ @note Ensure that this method is called after \ref IRtcEngine::startAudioMixing "startAudioMixing".
@param volume Audio mixing volume for publishing. The value ranges between 0 and 100 (default).
@@ -5368,7 +6254,7 @@ class IRtcEngine
- ≥ 0: The audio mixing duration, if this method call succeeds.
- < 0: Failure.
*/
- virtual int getAudioMixingDuration() = 0;
+ virtual int getAudioMixingDuration() = 0;
/** Retrieves the playback position (ms) of the music file.
Call this method when you are in a channel.
@@ -5377,16 +6263,18 @@ class IRtcEngine
- ≥ 0: The current playback position of the audio mixing, if this method call succeeds.
- < 0: Failure.
*/
- virtual int getAudioMixingCurrentPosition() = 0;
+ virtual int getAudioMixingCurrentPosition() = 0;
/** Sets the playback position of the music file to a different starting position (the default plays from the beginning).
+ @note Ensure that this method is called after \ref IRtcEngine::startAudioMixing "startAudioMixing".
+
@param pos The playback starting position (ms) of the music file.
@return
- 0: Success.
- < 0: Failure.
*/
- virtual int setAudioMixingPosition(int pos /*in ms*/) = 0;
+ virtual int setAudioMixingPosition(int pos /*in ms*/) = 0;
/** Sets the pitch of the local music file.
* @since v3.0.1
*
@@ -5409,23 +6297,29 @@ class IRtcEngine
The value ranges between 0.0 and 100.0.
+ @note Ensure that this method is called after \ref IRtcEngine::playEffect "playEffect".
+
@return
- ≥ 0: Volume of the audio effects, if this method call succeeds.
- < 0: Failure.
*/
- virtual int getEffectsVolume() = 0;
+ virtual int getEffectsVolume() = 0;
/** Sets the volume of the audio effects.
+ @note Ensure that this method is called after \ref IRtcEngine::playEffect "playEffect".
+
@param volume Sets the volume of the audio effects. The value ranges between 0 and 100 (default).
@return
- 0: Success.
- < 0: Failure.
*/
- virtual int setEffectsVolume(int volume) = 0;
+ virtual int setEffectsVolume(int volume) = 0;
/** Sets the volume of a specified audio effect.
+ @note Ensure that this method is called after \ref IRtcEngine::playEffect "playEffect".
+
@param soundId ID of the audio effect. Each audio effect has a unique ID.
@param volume Sets the volume of the specified audio effect. The value ranges between 0 and 100 (default).
@@ -5433,13 +6327,18 @@ class IRtcEngine
- 0: Success.
- < 0: Failure.
*/
- virtual int setVolumeOfEffect(int soundId, int volume) = 0;
+ virtual int setVolumeOfEffect(int soundId, int volume) = 0;
#if defined(__ANDROID__) || (defined(__APPLE__) && TARGET_OS_IOS)
/**
- * Enables/Disables face detection for the local user. Applies to Android and iOS only.
+ * Enables/Disables face detection for the local user.
+ *
* @since v3.0.1
*
+ * @note
+ * - Applies to Android and iOS only.
+ * - You can call this method either before or after joining a channel.
+ *
* Once face detection is enabled, the SDK triggers the \ref IRtcEngineEventHandler::onFacePositionChanged "onFacePositionChanged" callback
* to report the face information of the local user, which includes the following aspects:
* - The width and height of the local video.
@@ -5466,6 +6365,7 @@ class IRtcEngine
@note
- If the audio effect is preloaded into the memory through the \ref IRtcEngine::preloadEffect "preloadEffect" method, the value of @p soundID must be the same as that in the *preloadEffect* method.
- Playing multiple online audio effect files simultaneously is not supported on macOS and Windows.
+ - Ensure that you call this method after joining a channel.
@param filePath Specifies the absolute path (including the suffixes of the filename) to the local audio effect file or the URL of the online audio effect file, for example, c:/music/audio.mp4. Supported audio formats: mp3, mp4, m4a, aac, 3gp, mkv and wav.
@param loopCount Sets the number of times the audio effect loops:
@@ -5486,7 +6386,7 @@ class IRtcEngine
- 0: Success.
- < 0: Failure.
*/
- virtual int playEffect(int soundId, const char* filePath, int loopCount, double pitch, double pan, int gain, bool publish = false) = 0;
+ virtual int playEffect(int soundId, const char* filePath, int loopCount, double pitch, double pan, int gain, bool publish = false) = 0;
/** Stops playing a specified audio effect.
@param soundId ID of the audio effect to stop playing. Each audio effect has a unique ID.
@@ -5495,14 +6395,14 @@ class IRtcEngine
- 0: Success.
- < 0: Failure.
*/
- virtual int stopEffect(int soundId) = 0;
+ virtual int stopEffect(int soundId) = 0;
/** Stops playing all audio effects.
@return
- 0: Success.
- < 0: Failure.
*/
- virtual int stopAllEffects() = 0;
+ virtual int stopAllEffects() = 0;
/** Preloads a specified audio effect file into the memory.
@@ -5519,7 +6419,7 @@ class IRtcEngine
- 0: Success.
- < 0: Failure.
*/
- virtual int preloadEffect(int soundId, const char* filePath) = 0;
+ virtual int preloadEffect(int soundId, const char* filePath) = 0;
/** Releases a specified preloaded audio effect from the memory.
@param soundId ID of the audio effect. Each audio effect has a unique ID.
@@ -5527,7 +6427,7 @@ class IRtcEngine
- 0: Success.
- < 0: Failure.
*/
- virtual int unloadEffect(int soundId) = 0;
+ virtual int unloadEffect(int soundId) = 0;
/** Pauses a specified audio effect.
@param soundId ID of the audio effect. Each audio effect has a unique ID.
@@ -5535,14 +6435,14 @@ class IRtcEngine
- 0: Success.
- < 0: Failure.
*/
- virtual int pauseEffect(int soundId) = 0;
+ virtual int pauseEffect(int soundId) = 0;
/** Pauses all audio effects.
@return
- 0: Success.
- < 0: Failure.
*/
- virtual int pauseAllEffects() = 0;
+ virtual int pauseAllEffects() = 0;
/** Resumes playing a specified audio effect.
@param soundId ID of the audio effect. Each audio effect has a unique ID.
@@ -5550,14 +6450,14 @@ class IRtcEngine
- 0: Success.
- < 0: Failure.
*/
- virtual int resumeEffect(int soundId) = 0;
+ virtual int resumeEffect(int soundId) = 0;
/** Resumes playing all audio effects.
@return
- 0: Success.
- < 0: Failure.
*/
- virtual int resumeAllEffects() = 0;
+ virtual int resumeAllEffects() = 0;
/** Enables/Disables stereo panning for remote users.
Ensure that you call this method before joinChannel to enable stereo panning for remote users so that the local user can track the position of a remote user by calling \ref agora::rtc::IRtcEngine::setRemoteVoicePosition "setRemoteVoicePosition".
@@ -5578,6 +6478,7 @@ class IRtcEngine
@note
- For this method to work, enable stereo panning for remote users by calling the \ref agora::rtc::IRtcEngine::enableSoundPositionIndication "enableSoundPositionIndication" method before joining a channel.
- This method requires hardware support. For the best sound positioning, we recommend using a stereo speaker.
+ - Ensure that you call this method after joining a channel.
@param uid The ID of the remote user.
@param pan The sound position of the remote user. The value ranges from -1.0 to 1.0:
@@ -5594,26 +6495,32 @@ class IRtcEngine
/** Changes the voice pitch of the local speaker.
+ @note You can call this method either before or after joining a channel.
+
@param pitch Sets the voice pitch. The value ranges between 0.5 and 2.0. The lower the value, the lower the voice pitch. The default value is 1.0 (no change to the local voice pitch).
@return
- 0: Success.
- < 0: Failure.
*/
- virtual int setLocalVoicePitch(double pitch) = 0;
+ virtual int setLocalVoicePitch(double pitch) = 0;
/** Sets the local voice equalization effect.
- @param bandFrequency Sets the band frequency. The value ranges between 0 and 9, representing the respective 10-band center frequencies of the voice effects, including 31, 62, 125, 500, 1k, 2k, 4k, 8k, and 16k Hz. See #AUDIO_EQUALIZATION_BAND_FREQUENCY.
+ @note You can call this method either before or after joining a channel.
+
+ @param bandFrequency Sets the band frequency. The value ranges between 0 and 9, representing the respective 10-band center frequencies of the voice effects, including 31, 62, 125, 250, 500, 1k, 2k, 4k, 8k, and 16k Hz. See #AUDIO_EQUALIZATION_BAND_FREQUENCY.
@param bandGain Sets the gain of each band in dB. The value ranges between -15 and 15.
@return
- 0: Success.
- < 0: Failure.
*/
- virtual int setLocalVoiceEqualization(AUDIO_EQUALIZATION_BAND_FREQUENCY bandFrequency, int bandGain) = 0;
+ virtual int setLocalVoiceEqualization(AUDIO_EQUALIZATION_BAND_FREQUENCY bandFrequency, int bandGain) = 0;
/** Sets the local voice reverberation.
v2.4.0 adds the \ref agora::rtc::IRtcEngine::setLocalVoiceReverbPreset "setLocalVoiceReverbPreset" method, a more user-friendly method for setting the local voice reverberation. You can use this method to set the local reverberation effect, such as pop music, R&B, rock music, and hip-hop.
+ @note You can call this method either before or after joining a channel.
+
@param reverbKey Sets the reverberation key. See #AUDIO_REVERB_TYPE.
@param value Sets the value of the reverberation key.
@@ -5621,10 +6528,13 @@ class IRtcEngine
- 0: Success.
- < 0: Failure.
*/
- virtual int setLocalVoiceReverb(AUDIO_REVERB_TYPE reverbKey, int value) = 0;
+ virtual int setLocalVoiceReverb(AUDIO_REVERB_TYPE reverbKey, int value) = 0;
/** Sets the local voice changer option.
- This method can be used to set the local voice effect for users in a Communication channel or broadcasters in a live broadcast channel.
+ @deprecated Deprecated from v3.2.0. Use \ref IRtcEngine::setAudioEffectPreset "setAudioEffectPreset" or
+ \ref IRtcEngine::setVoiceBeautifierPreset "setVoiceBeautifierPreset" instead.
+
+ This method can be used to set the local voice effect for users in a `COMMUNICATION` channel or hosts in a `LIVE_BROADCASTING` channel.
Voice changer options include the following voice effects:
- `VOICE_CHANGER_XXX`: Changes the local voice to an old man, a little boy, or the Hulk. Applies to the voice talk scenario.
@@ -5634,14 +6544,15 @@ class IRtcEngine
- For a female voice: Adds freshness or vitality to the voice.
@note
- - To achieve better voice effect quality, Agora recommends setting the profile parameter in `setAudioProfile` as `AUDIO_PROFILE_MUSIC_HIGH_QUALITY(4)` or `AUDIO_PROFILE_MUSIC_HIGH_QUALITY_STEREO(5)`.
+ - To achieve better voice effect quality, Agora recommends setting the profile parameter in \ref IRtcEngine::setAudioProfile "setAudioProfile" as #AUDIO_PROFILE_MUSIC_HIGH_QUALITY (4) or #AUDIO_PROFILE_MUSIC_HIGH_QUALITY_STEREO (5)
- This method works best with the human voice, and Agora does not recommend using it for audio containing music and a human voice.
- - Do not use this method with `setLocalVoiceReverbPreset`, because the method called later overrides the one called earlier. For detailed considerations, see the advanced guide *Voice Changer and Reverberation*.
+ - Do not use this method with \ref IRtcEngine::setLocalVoiceReverbPreset "setLocalVoiceReverbPreset" , because the method called later overrides the one called earlier. For detailed considerations, see the advanced guide *Voice Changer and Reverberation*.
+ - You can call this method either before or after joining a channel.
- @param voiceChanger Sets the local voice changer option. The default value is `VOICE_CHANGER_OFF`, which means the original voice. See details in #VOICE_CHANGER_PRESET.
+ @param voiceChanger Sets the local voice changer option. The default value is #VOICE_CHANGER_OFF, which means the original voice. See details in #VOICE_CHANGER_PRESET
Gender-based beatification effect works best only when assigned a proper gender:
- - For male: `GENERAL_BEAUTY_VOICE_MALE_MAGNETIC`.
- - For female: `GENERAL_BEAUTY_VOICE_FEMALE_FRESH` or `GENERAL_BEAUTY_VOICE_FEMALE_VITALITY`.
+ - For male: #GENERAL_BEAUTY_VOICE_MALE_MAGNETIC
+ - For female: #GENERAL_BEAUTY_VOICE_FEMALE_FRESH or #GENERAL_BEAUTY_VOICE_FEMALE_VITALITY
Failure to do so can lead to voice distortion.
@return
@@ -5651,7 +6562,10 @@ class IRtcEngine
virtual int setLocalVoiceChanger(VOICE_CHANGER_PRESET voiceChanger) = 0;
/** Sets the local voice reverberation option, including the virtual stereo.
*
- * This method sets the local voice reverberation for users in a Communication channel or broadcasters in a Live-broadcast channel.
+ * @deprecated Deprecated from v3.2.0. Use \ref IRtcEngine::setAudioEffectPreset "setAudioEffectPreset" or
+ * \ref IRtcEngine::setVoiceBeautifierPreset "setVoiceBeautifierPreset" instead.
+ *
+ * This method sets the local voice reverberation for users in a `COMMUNICATION` channel or hosts in a `LIVE_BROADCASTING` channel.
* After successfully calling this method, all users in the channel can hear the voice with reverberation.
*
* @note
@@ -5661,56 +6575,214 @@ class IRtcEngine
* - This method works best with the human voice, and Agora does not recommend using it for audio containing music and a human voice.
* - Do not use this method with `setLocalVoiceChanger`, because the method called later overrides the one called earlier.
* For detailed considerations, see the advanced guide *Voice Changer and Reverberation*.
-
- @param reverbPreset The local voice reverberation option. The default value is `AUDIO_REVERB_OFF`,
- which means the original voice. See #AUDIO_REVERB_PRESET.
- To achieve better voice effects, Agora recommends the enumeration whose name begins with `AUDIO_REVERB_FX`.
-
- @return
- - 0: Success.
- - < 0: Failure.
+ * - You can call this method either before or after joining a channel.
+ *
+ * @param reverbPreset The local voice reverberation option. The default value is `AUDIO_REVERB_OFF`,
+ * which means the original voice. See #AUDIO_REVERB_PRESET.
+ * To achieve better voice effects, Agora recommends the enumeration whose name begins with `AUDIO_REVERB_FX`.
+ *
+ * @return
+ * - 0: Success.
+ * - < 0: Failure.
*/
virtual int setLocalVoiceReverbPreset(AUDIO_REVERB_PRESET reverbPreset) = 0;
-
- /** Specifies an SDK output log file.
-
- The log file records all SDK operations during runtime. If it does not exist, the SDK creates one.
-
- @note
- - The default log file is located at: `C: \Users\\AppData\Local\Agora\`.
- - Ensure that you call this method immediately after calling the \ref agora::rtc::IRtcEngine::initialize "initialize" method, otherwise the output log may not be complete.
-
- @param filePath File path of the log file. The string of the log file is in UTF-8.
-
- @return
- - 0: Success.
- - < 0: Failure.
+ /** Sets an SDK preset voice beautifier effect.
+ *
+ * @since v3.2.0
+ *
+ * Call this method to set an SDK preset voice beautifier effect for the local user who sends an audio stream. After
+ * setting a voice beautifier effect, all users in the channel can hear the effect.
+ *
+ * You can set different voice beautifier effects for different scenarios. See *Set the Voice Beautifier and Audio Effects*.
+ *
+ * To achieve better audio effect quality, Agora recommends calling \ref IRtcEngine::setAudioProfile "setAudioProfile" and
+ * setting the `scenario` parameter to `AUDIO_SCENARIO_GAME_STREAMING(3)` and the `profile` parameter to
+ * `AUDIO_PROFILE_MUSIC_HIGH_QUALITY(4)` or `AUDIO_PROFILE_MUSIC_HIGH_QUALITY_STEREO(5)` before calling this method.
+ *
+ * @note
+ * - You can call this method either before or after joining a channel.
+ * - Do not set the `profile` parameter of \ref IRtcEngine::setAudioProfile "setAudioProfile" to `AUDIO_PROFILE_SPEECH_STANDARD(1)`
+ * or `AUDIO_PROFILE_IOT(6)`; otherwise, this method call fails.
+ * - This method works best with the human voice. Agora does not recommend using this method for audio containing music.
+ * - After calling this method, Agora recommends not calling the following methods, because they can override \ref IRtcEngine::setAudioEffectParameters "setAudioEffectParameters":
+ * - \ref IRtcEngine::setAudioEffectPreset "setAudioEffectPreset"
+ * - \ref IRtcEngine::setVoiceBeautifierPreset "setVoiceBeautifierPreset"
+ * - \ref IRtcEngine::setLocalVoiceReverbPreset "setLocalVoiceReverbPreset"
+ * - \ref IRtcEngine::setLocalVoiceChanger "setLocalVoiceChanger"
+ * - \ref IRtcEngine::setLocalVoicePitch "setLocalVoicePitch"
+ * - \ref IRtcEngine::setLocalVoiceEqualization "setLocalVoiceEqualization"
+ * - \ref IRtcEngine::setLocalVoiceReverb "setLocalVoiceReverb"
+ *
+ * @param preset The options for SDK preset voice beautifier effects: #VOICE_BEAUTIFIER_PRESET.
+ *
+ * @return
+ * - 0: Success.
+ * - < 0: Failure.
*/
- virtual int setLogFile(const char* filePath) = 0;
+ virtual int setVoiceBeautifierPreset(VOICE_BEAUTIFIER_PRESET preset) = 0;
+ /** Sets an SDK preset audio effect.
+ *
+ * @since v3.2.0
+ *
+ * Call this method to set an SDK preset audio effect for the local user who sends an audio stream. This audio effect
+ * does not change the gender characteristics of the original voice. After setting an audio effect, all users in the
+ * channel can hear the effect.
+ *
+ * You can set different audio effects for different scenarios. See *Set the Voice Beautifier and Audio Effects*.
+ *
+ * To achieve better audio effect quality, Agora recommends calling \ref IRtcEngine::setAudioProfile "setAudioProfile"
+ * and setting the `scenario` parameter to `AUDIO_SCENARIO_GAME_STREAMING(3)` before calling this method.
+ *
+ * @note
+ * - You can call this method either before or after joining a channel.
+ * - Do not set the profile `parameter` of `setAudioProfile` to `AUDIO_PROFILE_SPEECH_STANDARD(1)` or `AUDIO_PROFILE_IOT(6)`;
+ * otherwise, this method call fails.
+ * - This method works best with the human voice. Agora does not recommend using this method for audio containing music.
+ * - If you call this method and set the `preset` parameter to enumerators except `ROOM_ACOUSTICS_3D_VOICE` or `PITCH_CORRECTION`,
+ * do not call \ref IRtcEngine::setAudioEffectParameters "setAudioEffectParameters"; otherwise, `setAudioEffectParameters`
+ * overrides this method.
+ * - After calling this method, Agora recommends not calling the following methods, because they can override `setAudioEffectPreset`:
+ * - \ref IRtcEngine::setVoiceBeautifierPreset "setVoiceBeautifierPreset"
+ * - \ref IRtcEngine::setLocalVoiceReverbPreset "setLocalVoiceReverbPreset"
+ * - \ref IRtcEngine::setLocalVoiceChanger "setLocalVoiceChanger"
+ * - \ref IRtcEngine::setLocalVoicePitch "setLocalVoicePitch"
+ * - \ref IRtcEngine::setLocalVoiceEqualization "setLocalVoiceEqualization"
+ * - \ref IRtcEngine::setLocalVoiceReverb "setLocalVoiceReverb"
+ *
+ * @param preset The options for SDK preset audio effects. See #AUDIO_EFFECT_PRESET.
+ *
+ * @return
+ * - 0: Success.
+ * - < 0: Failure.
+ */
+ virtual int setAudioEffectPreset(AUDIO_EFFECT_PRESET preset) = 0;
+ /** Sets parameters for SDK preset audio effects.
+ *
+ * @since v3.2.0
+ *
+ * Call this method to set the following parameters for the local user who send an audio stream:
+ * - 3D voice effect: Sets the cycle period of the 3D voice effect.
+ * - Pitch correction effect: Sets the basic mode and tonic pitch of the pitch correction effect. Different songs
+ * have different modes and tonic pitches. Agora recommends bounding this method with interface elements to enable
+ * users to adjust the pitch correction interactively.
+ *
+ * After setting parameters, all users in the channel can hear the relevant effect.
+ *
+ * You can call this method directly or after \ref IRtcEngine::setAudioEffectPreset "setAudioEffectPreset". If you
+ * call this method after \ref IRtcEngine::setAudioEffectPreset "setAudioEffectPreset", ensure that you set the preset
+ * parameter of `setAudioEffectPreset` to `ROOM_ACOUSTICS_3D_VOICE` or `PITCH_CORRECTION` and then call this method
+ * to set the same enumerator; otherwise, this method overrides `setAudioEffectPreset`.
+ *
+ * @note
+ * - You can call this method either before or after joining a channel.
+ * - To achieve better audio effect quality, Agora recommends calling \ref IRtcEngine::setAudioProfile "setAudioProfile"
+ * and setting the `scenario` parameter to `AUDIO_SCENARIO_GAME_STREAMING(3)` before calling this method.
+ * - Do not set the `profile` parameter of \ref IRtcEngine::setAudioProfile "setAudioProfile" to `AUDIO_PROFILE_SPEECH_STANDARD(1)` or
+ * `AUDIO_PROFILE_IOT(6)`; otherwise, this method call fails.
+ * - This method works best with the human voice. Agora does not recommend using this method for audio containing music.
+ * - After calling this method, Agora recommends not calling the following methods, because they can override `setAudioEffectParameters`:
+ * - \ref IRtcEngine::setAudioEffectPreset "setAudioEffectPreset"
+ * - \ref IRtcEngine::setVoiceBeautifierPreset "setVoiceBeautifierPreset"
+ * - \ref IRtcEngine::setLocalVoiceReverbPreset "setLocalVoiceReverbPreset"
+ * - \ref IRtcEngine::setLocalVoiceChanger "setLocalVoiceChanger"
+ * - \ref IRtcEngine::setLocalVoicePitch "setLocalVoicePitch"
+ * - \ref IRtcEngine::setLocalVoiceEqualization "setLocalVoiceEqualization"
+ * - \ref IRtcEngine::setLocalVoiceReverb "setLocalVoiceReverb"
+ *
+ * @param preset The options for SDK preset audio effects:
+ * - 3D voice effect: `ROOM_ACOUSTICS_3D_VOICE`.
+ * - Call \ref IRtcEngine::setAudioProfile "setAudioProfile" and set the `profile` parameter to `AUDIO_PROFILE_MUSIC_STANDARD_STEREO(3)`
+ * or `AUDIO_PROFILE_MUSIC_HIGH_QUALITY_STEREO(5)` before setting this enumerator; otherwise, the enumerator setting does not take effect.
+ * - If the 3D voice effect is enabled, users need to use stereo audio playback devices to hear the anticipated voice effect.
+ * - Pitch correction effect: `PITCH_CORRECTION`. To achieve better audio effect quality, Agora recommends calling
+ * \ref IRtcEngine::setAudioProfile "setAudioProfile" and setting the `profile` parameter to `AUDIO_PROFILE_MUSIC_HIGH_QUALITY(4)` or
+ * `AUDIO_PROFILE_MUSIC_HIGH_QUALITY_STEREO(5)` before setting this enumerator.
+ * @param param1
+ * - If you set `preset` to `ROOM_ACOUSTICS_3D_VOICE`, the `param1` sets the cycle period of the 3D voice effect.
+ * The value range is [1,60] and the unit is a second. The default value is 10 seconds, indicating that the voice moves
+ * around you every 10 seconds.
+ * - If you set `preset` to `PITCH_CORRECTION`, `param1` sets the basic mode of the pitch correction effect:
+ * - `1`: (Default) Natural major scale.
+ * - `2`: Natural minor scale.
+ * - `3`: Japanese pentatonic scale.
+ * @param param2
+ * - If you set `preset` to `ROOM_ACOUSTICS_3D_VOICE`, you do not need to set `param2`.
+ * - If you set `preset` to `PITCH_CORRECTION`, `param2` sets the tonic pitch of the pitch correction effect:
+ * - `1`: A
+ * - `2`: A#
+ * - `3`: B
+ * - `4`: (Default) C
+ * - `5`: C#
+ * - `6`: D
+ * - `7`: D#
+ * - `8`: E
+ * - `9`: F
+ * - `10`: F#
+ * - `11`: G
+ * - `12`: G#
+ *
+ * @return
+ * - 0: Success.
+ * - < 0: Failure.
+ */
+ virtual int setAudioEffectParameters(AUDIO_EFFECT_PRESET preset, int param1, int param2) = 0;
+ /** Sets the log files that the SDK outputs.
+ *
+ * By default, the SDK outputs five log files, `agorasdk.log`, `agorasdk_1.log`, `agorasdk_2.log`, `agorasdk_3.log`, `agorasdk_4.log`, each with a default size of 1024 KB.
+ * These log files are encoded in UTF-8. The SDK writes the latest logs in `agorasdk.log`. When `agorasdk.log` is full, the SDK deletes the log file with the earliest
+ * modification time among the other four, renames `agorasdk.log` to the name of the deleted log file, and create a new `agorasdk.log` to record latest logs.
+ *
+ * @note Ensure that you call this method immediately after calling \ref agora::rtc::IRtcEngine::initialize "initialize" , otherwise the output logs may not be complete.
+ *
+ * @see \ref IRtcEngine::setLogFileSize "setLogFileSize"
+ * @see \ref IRtcEngine::setLogFilter "setLogFilter"
+ *
+ * @param filePath The absolute path of log files. The default file path is `C: \Users\\AppData\Local\Agora\\agorasdk.log`.
+ * Ensure that the directory for the log files exists and is writable. You can use this parameter to rename the log files.
+ *
+ * @return
+ * - 0: Success.
+ * - < 0: Failure.
+ */
+ virtual int setLogFile(const char* filePath) = 0;
/** Sets the output log level of the SDK.
You can use one or a combination of the log filter levels. The log level follows the sequence of OFF, CRITICAL, ERROR, WARNING, INFO, and DEBUG. Choose a level to see the logs preceding that level.
If you set the log level to WARNING, you see the logs within levels CRITICAL, ERROR, and WARNING.
+ @see \ref IRtcEngine::setLogFile "setLogFile"
+ @see \ref IRtcEngine::setLogFileSize "setLogFileSize"
+
@param filter Sets the log filter level. See #LOG_FILTER_TYPE.
@return
- 0: Success.
- < 0: Failure.
*/
- virtual int setLogFilter(unsigned int filter) = 0;
- /** Sets the log file size (KB).
-
- The SDK has two log files, each with a default size of 512 KB. If you set @p fileSizeInBytes as 1024 KB, the SDK outputs log files with a total maximum size of 2 MB. If the total size of the log files exceed the set value, the new output log files overwrite the old output log files.
-
- @param fileSizeInKBytes The SDK log file size (KB).
- @return
- - 0: Success.
- - < 0: Failure.
+ virtual int setLogFilter(unsigned int filter) = 0;
+ /** Sets the size of a log file that the SDK outputs.
+ *
+ *
+ * @note If you want to set the log file size, ensure that you call
+ * this method before \ref IRtcEngine::setLogFile "setLogFile", or the logs are cleared.
+ *
+ * By default, the SDK outputs five log files, `agorasdk.log`, `agorasdk_1.log`, `agorasdk_2.log`, `agorasdk_3.log`, `agorasdk_4.log`, each with a default size of 1024 KB.
+ * These log files are encoded in UTF-8. The SDK writes the latest logs in `agorasdk.log`. When `agorasdk.log` is full, the SDK deletes the log file with the earliest
+ * modification time among the other four, renames `agorasdk.log` to the name of the deleted log file, and create a new `agorasdk.log` to record latest logs.
+ *
+ * @see \ref IRtcEngine::setLogFile "setLogFile"
+ * @see \ref IRtcEngine::setLogFilter "setLogFilter"
+ *
+ * @param fileSizeInKBytes The size (KB) of a log file. The default value is 1024 KB. If you set `fileSizeInKByte` to 1024 KB,
+ * the SDK outputs at most 5 MB log files; if you set it to less than 1024 KB, the maximum size of a log file is still 1024 KB.
+ *
+ * @return
+ * - 0: Success.
+ * - < 0: Failure.
*/
virtual int setLogFileSize(unsigned int fileSizeInKBytes) = 0;
- /**
+ /**
@deprecated This method is deprecated, use the \ref IRtcEngine::setLocalRenderMode(RENDER_MODE_TYPE renderMode, VIDEO_MIRROR_MODE_TYPE mirrorMode) "setLocalRenderMode"2 method instead.
Sets the local video display mode.
@@ -5721,26 +6793,26 @@ class IRtcEngine
- 0: Success.
- < 0: Failure.
*/
- virtual int setLocalRenderMode(RENDER_MODE_TYPE renderMode) = 0;
+ virtual int setLocalRenderMode(RENDER_MODE_TYPE renderMode) = 0;
/** Updates the display mode of the local video view.
@since v3.0.0
After initializing the local video view, you can call this method to update its rendering and mirror modes. It affects only the video view that the local user sees, not the published local video stream.
-
+
@note
- Ensure that you have called the \ref IRtcEngine::setupLocalVideo "setupLocalVideo" method to initialize the local video view before calling this method.
- During a call, you can call this method as many times as necessary to update the display mode of the local video view.
@param renderMode The rendering mode of the local video view. See #RENDER_MODE_TYPE.
- @param mirrorMode
- - The mirror mode of the local video view. See #VIDEO_MIRROR_MODE_TYPE.
+ @param mirrorMode
+ - The mirror mode of the local video view. See #VIDEO_MIRROR_MODE_TYPE.
- **Note**: If you use a front camera, the SDK enables the mirror mode by default; if you use a rear camera, the SDK disables the mirror mode by default.
@return
- 0: Success.
- < 0: Failure.
*/
virtual int setLocalRenderMode(RENDER_MODE_TYPE renderMode, VIDEO_MIRROR_MODE_TYPE mirrorMode) = 0;
- /**
+ /**
@deprecated This method is deprecated, use the \ref IRtcEngine::setRemoteRenderMode(uid_t userId, RENDER_MODE_TYPE renderMode, VIDEO_MIRROR_MODE_TYPE mirrorMode) "setRemoteRenderMode"2 method instead.
Sets the video display mode of a specified remote user.
@@ -5752,9 +6824,9 @@ class IRtcEngine
- 0: Success.
- < 0: Failure.
*/
- virtual int setRemoteRenderMode(uid_t userId, RENDER_MODE_TYPE renderMode) = 0;
+ virtual int setRemoteRenderMode(uid_t userId, RENDER_MODE_TYPE renderMode) = 0;
/** Updates the display mode of the video view of a remote user.
-
+
@since v3.0.0
After initializing the video view of a remote user, you can call this method to update its rendering and mirror modes. This method affects only the video view that the local user sees.
@@ -5764,43 +6836,44 @@ class IRtcEngine
@param userId The ID of the remote user.
@param renderMode The rendering mode of the remote video view. See #RENDER_MODE_TYPE.
- @param mirrorMode
+ @param mirrorMode
- The mirror mode of the remote video view. See #VIDEO_MIRROR_MODE_TYPE.
- **Note**: The SDK disables the mirror mode by default.
-
+
@return
- 0: Success.
- < 0: Failure.
*/
virtual int setRemoteRenderMode(uid_t userId, RENDER_MODE_TYPE renderMode, VIDEO_MIRROR_MODE_TYPE mirrorMode) = 0;
- /**
- @deprecated This method is deprecated, use the \ref IRtcEngine::setupLocalVideo "setupLocalVideo"
+ /**
+ @deprecated This method is deprecated, use the \ref IRtcEngine::setupLocalVideo "setupLocalVideo"
or \ref IRtcEngine::setLocalRenderMode(RENDER_MODE_TYPE renderMode, VIDEO_MIRROR_MODE_TYPE mirrorMode) "setLocalRenderMode" method instead.
Sets the local video mirror mode.
- You must call this method before calling the \ref agora::rtc::IRtcEngine::startPreview "startPreview" method, otherwise the mirror mode will not work.
-
- @warning
- - Call this method after calling the \ref agora::rtc::IRtcEngine::setupLocalVideo "setupLocalVideo" method to initialize the local video view.
- - During a call, you can call this method as many times as necessary to update the mirror mode of the local video view.
+ @warning Call this method after calling the \ref agora::rtc::IRtcEngine::setupLocalVideo "setupLocalVideo" method to initialize the local video view.
@param mirrorMode Sets the local video mirror mode. See #VIDEO_MIRROR_MODE_TYPE.
@return
- 0: Success.
- < 0: Failure.
*/
- virtual int setLocalVideoMirrorMode(VIDEO_MIRROR_MODE_TYPE mirrorMode) = 0;
- /** Sets the stream mode to the single-stream (default) or dual-stream mode. (Live broadcast only.)
+ virtual int setLocalVideoMirrorMode(VIDEO_MIRROR_MODE_TYPE mirrorMode) = 0;
+ /** Sets the stream mode to the single-stream (default) or dual-stream mode. (`LIVE_BROADCASTING` only.)
If the dual-stream mode is enabled, the receiver can choose to receive the high stream (high-resolution and high-bitrate video stream), or the low stream (low-resolution and low-bitrate video stream).
+ @note You can call this method either before or after joining a channel.
+
@param enabled Sets the stream mode:
- true: Dual-stream mode.
- - false: (Default) Single-stream mode.
+ - false: Single-stream mode.
*/
- virtual int enableDualStreamMode(bool enabled) = 0;
- /** Sets the external audio source. Please call this method before \ref agora::rtc::IRtcEngine::joinChannel "joinChannel".
+ virtual int enableDualStreamMode(bool enabled) = 0;
+ /** Sets the external audio source.
+
+ @note Please call this method before \ref agora::rtc::IRtcEngine::joinChannel "joinChannel"
+ and \ref IRtcEngine::startPreview "startPreview".
@param enabled Sets whether to enable/disable the external audio source:
- true: Enables the external audio source.
@@ -5809,12 +6882,12 @@ class IRtcEngine
@param channels Sets the number of audio channels of the external audio source:
- 1: Mono.
- 2: Stereo.
-
+
@return
- 0: Success.
- < 0: Failure.
*/
- virtual int setExternalAudioSource(bool enabled, int sampleRate, int channels) = 0;
+ virtual int setExternalAudioSource(bool enabled, int sampleRate, int channels) = 0;
/** Sets the external audio sink.
* This method applies to scenarios where you want to use external audio
* data for playback. After enabling the external audio sink, you can call
@@ -5822,9 +6895,10 @@ class IRtcEngine
* it, and play it with the audio effects that you want.
*
* @note
- * Once you enable the external audio sink, the app will not retrieve any
+ * - Once you enable the external audio sink, the app will not retrieve any
* audio data from the
* \ref agora::media::IAudioFrameObserver::onPlaybackAudioFrame "onPlaybackAudioFrame" callback.
+ * - Ensure that you call this method before joining a channel.
*
* @param enabled
* - true: Enables the external audio sink.
@@ -5840,8 +6914,9 @@ class IRtcEngine
* - < 0: Failure.
*/
virtual int setExternalAudioSink(bool enabled, int sampleRate, int channels) = 0;
- /** Sets the audio recording format for the \ref agora::media::IAudioFrameObserver::onRecordAudioFrame "onRecordAudioFrame" callback.
-
+ /** Sets the audio recording format for the \ref agora::media::IAudioFrameObserver::onRecordAudioFrame "onRecordAudioFrame" callback.
+
+ @note Ensure that you call this method before joining a channel.
@param sampleRate Sets the sample rate (@p samplesPerSec) returned in the *onRecordAudioFrame* callback, which can be set as 8000, 16000, 32000, 44100, or 48000 Hz.
@param channel Sets the number of audio channels (@p channels) returned in the *onRecordAudioFrame* callback:
@@ -5851,48 +6926,52 @@ class IRtcEngine
@param samplesPerCall Sets the number of samples returned in the *onRecordAudioFrame* callback. `samplesPerCall` is usually set as 1024 for RTMP streaming.
- @note The SDK triggers the `onRecordAudioFrame` callback according to the sample interval. Ensure that the sample interval 鈮 0.01 (s). And, Sample interval (sec) = `samplePerCall`/(`sampleRate` 脳 `channel`).
+ @note The SDK triggers the `onRecordAudioFrame` callback according to the sample interval. Ensure that the sample interval 鈮 0.01 (s). And, Sample interval (sec) = `samplePerCall`/(`sampleRate` 脳 `channel`).
@return
- 0: Success.
- < 0: Failure.
*/
- virtual int setRecordingAudioFrameParameters(int sampleRate, int channel, RAW_AUDIO_FRAME_OP_MODE_TYPE mode, int samplesPerCall) = 0;
+ virtual int setRecordingAudioFrameParameters(int sampleRate, int channel, RAW_AUDIO_FRAME_OP_MODE_TYPE mode, int samplesPerCall) = 0;
/** Sets the audio playback format for the \ref agora::media::IAudioFrameObserver::onPlaybackAudioFrame "onPlaybackAudioFrame" callback.
-
-
+
+ @note Ensure that you call this method before joining a channel.
+
@param sampleRate Sets the sample rate (@p samplesPerSec) returned in the *onPlaybackAudioFrame* callback, which can be set as 8000, 16000, 32000, 44100, or 48000 Hz.
@param channel Sets the number of channels (@p channels) returned in the *onPlaybackAudioFrame* callback:
- 1: Mono
- 2: Stereo
@param mode Sets the use mode (see #RAW_AUDIO_FRAME_OP_MODE_TYPE) of the *onPlaybackAudioFrame* callback.
@param samplesPerCall Sets the number of samples returned in the *onPlaybackAudioFrame* callback. `samplesPerCall` is usually set as 1024 for RTMP streaming.
-
+
@note The SDK triggers the `onPlaybackAudioFrame` callback according to the sample interval. Ensure that the sample interval 鈮 0.01 (s). And, Sample interval (sec) = `samplePerCall`/(`sampleRate` 脳 `channel`).
-
+
@return
- 0: Success.
- < 0: Failure.
*/
- virtual int setPlaybackAudioFrameParameters(int sampleRate, int channel, RAW_AUDIO_FRAME_OP_MODE_TYPE mode, int samplesPerCall) = 0;
+ virtual int setPlaybackAudioFrameParameters(int sampleRate, int channel, RAW_AUDIO_FRAME_OP_MODE_TYPE mode, int samplesPerCall) = 0;
/** Sets the mixed audio format for the \ref agora::media::IAudioFrameObserver::onMixedAudioFrame "onMixedAudioFrame" callback.
-
-
+
+ @note Ensure that you call this method before joining a channel.
+
@param sampleRate Sets the sample rate (@p samplesPerSec) returned in the *onMixedAudioFrame* callback, which can be set as 8000, 16000, 32000, 44100, or 48000 Hz.
@param samplesPerCall Sets the number of samples (`samples`) returned in the *onMixedAudioFrame* callback. `samplesPerCall` is usually set as 1024 for RTMP streaming.
-
+
@note The SDK triggers the `onMixedAudioFrame` callback according to the sample interval. Ensure that the sample interval 鈮 0.01 (s). And, Sample interval (sec) = `samplePerCall`/(`sampleRate` 脳 `channels`).
-
+
@return
- 0: Success.
- < 0: Failure.
*/
- virtual int setMixedAudioFrameParameters(int sampleRate, int samplesPerCall) = 0;
+ virtual int setMixedAudioFrameParameters(int sampleRate, int samplesPerCall) = 0;
/** Adjusts the recording volume.
- @param volume Recording volume. To avoid echoes and
- improve call quality, Agora recommends setting the value of volume between
- 0 and 100. If you need to set the value higher than 100, contact
+ @note You can call this method either before or after joining a channel.
+
+ @param volume Recording volume. To avoid echoes and
+ improve call quality, Agora recommends setting the value of volume between
+ 0 and 100. If you need to set the value higher than 100, contact
support@agora.io first.
- 0: Mute.
- 100: Original volume.
@@ -5902,16 +6981,17 @@ class IRtcEngine
- 0: Success.
- < 0: Failure.
*/
- virtual int adjustRecordingSignalVolume(int volume) = 0;
+ virtual int adjustRecordingSignalVolume(int volume) = 0;
/** Adjusts the playback volume of all remote users.
-
- @note
+
+ @note
- This method adjusts the playback volume that is the mixed volume of all remote users.
+ - You can call this method either before or after joining a channel.
- (Since v2.3.2) To mute the local audio playback, call both the `adjustPlaybackSignalVolume` and \ref IRtcEngine::adjustAudioMixingVolume "adjustAudioMixingVolume" methods and set the volume as `0`.
- @param volume The playback volume of all remote users. To avoid echoes and
- improve call quality, Agora recommends setting the value of volume between
- 0 and 100. If you need to set the value higher than 100, contact
+ @param volume The playback volume of all remote users. To avoid echoes and
+ improve call quality, Agora recommends setting the value of volume between
+ 0 and 100. If you need to set the value higher than 100, contact
support@agora.io first.
- 0: Mute.
- 100: Original volume.
@@ -5920,14 +7000,14 @@ class IRtcEngine
- 0: Success.
- < 0: Failure.
*/
- virtual int adjustPlaybackSignalVolume(int volume) = 0;
+ virtual int adjustPlaybackSignalVolume(int volume) = 0;
- /**
+ /**
@deprecated This method is deprecated. As of v3.0.0, the Native SDK automatically enables interoperability with the Web SDK, so you no longer need to call this method.
Enables interoperability with the Agora Web SDK.
- @note
- - This method applies only to the Live-broadcast profile. In the Communication profile, interoperability with the Agora Web SDK is enabled by default.
+ @note
+ - This method applies only to the `LIVE_BROADCASTING` profile. In the `COMMUNICATION` profile, interoperability with the Agora Web SDK is enabled by default.
- If the channel has Web SDK users, ensure that you call this method, or the video of the Native user will be a black screen for the Web user.
@param enabled Sets whether to enable/disable interoperability with the Agora Web SDK:
@@ -5938,9 +7018,9 @@ class IRtcEngine
- 0: Success.
- < 0: Failure.
*/
- virtual int enableWebSdkInteroperability(bool enabled) = 0;
+ virtual int enableWebSdkInteroperability(bool enabled) = 0;
//only for live broadcast
- /** **DEPRECATED** Sets the preferences for the high-quality video. (Live broadcast only).
+ /** **DEPRECATED** Sets the preferences for the high-quality video. (`LIVE_BROADCASTING` only).
This method is deprecated as of v2.4.0.
@@ -5959,10 +7039,12 @@ class IRtcEngine
- Disable the upstream video but enable audio only when the network conditions deteriorate and cannot support both video and audio.
- Re-enable the video when the network conditions improve.
-
+
When the published video stream falls back to audio only or when the audio-only stream switches back to the video, the SDK triggers the \ref agora::rtc::IRtcEngineEventHandler::onLocalPublishFallbackToAudioOnly "onLocalPublishFallbackToAudioOnly" callback.
- @note Agora does not recommend using this method for CDN live streaming, because the remote CDN live user will have a noticeable lag when the published video stream falls back to audio only.
+ @note
+ - Agora does not recommend using this method for CDN live streaming, because the remote CDN live user will have a noticeable lag when the published video stream falls back to audio only.
+ - Ensure that you call this method before joining a channel.
@param option Sets the fallback option for the published video stream:
- #STREAM_FALLBACK_OPTION_DISABLED (0): (Default) No fallback behavior for the published video stream when the uplink network condition is poor. The stream quality is not guaranteed.
@@ -5981,6 +7063,8 @@ class IRtcEngine
When the remotely subscribed video stream falls back to audio only or when the audio-only stream switches back to the video stream, the SDK triggers the \ref agora::rtc::IRtcEngineEventHandler::onRemoteSubscribeFallbackToAudioOnly "onRemoteSubscribeFallbackToAudioOnly" callback.
+ @note Ensure that you call this method before joining a channel.
+
@param option Sets the fallback option for the remotely subscribed video stream. See #STREAM_FALLBACK_OPTIONS.
@return
- 0: Success.
@@ -5989,9 +7073,12 @@ class IRtcEngine
virtual int setRemoteSubscribeFallbackOption(STREAM_FALLBACK_OPTIONS option) = 0;
#if defined(__ANDROID__) || (defined(__APPLE__) && TARGET_OS_IOS)
- /** Switches between front and rear cameras.
+ /** Switches between front and rear cameras.
- @note This method is for Android and iOS only.
+ @note
+ - This method is for Android and iOS only.
+ - Ensure that you call this method after the camera starts, for example, by
+ calling \ref IRtcEngine::startPreview "startPreview" or \ref IRtcEngine::joinChannel "joinChannel".
@return
- 0: Success.
@@ -6002,12 +7089,12 @@ class IRtcEngine
/** Switches between front and rear cameras.
@note This method is for Android and iOS only.
- @note This method is private.
-
+ @note This method is private.
+
@param direction Sets the camera to be used:
- CAMERA_DIRECTION.CAMERA_REAR: Use the rear camera.
- CAMERA_DIRECTION.CAMERA_FRONT: Use the front camera.
-
+
@return
- 0: Success.
- < 0: Failure.
@@ -6020,16 +7107,16 @@ class IRtcEngine
If a user does not call this method, the audio is routed to the earpiece by default. If you need to change the default audio route after joining a channel, call the \ref IRtcEngine::setEnableSpeakerphone "setEnableSpeakerphone" method.
The default setting for each profile:
- - Communication: In a voice call, the default audio route is the earpiece. In a video call, the default audio route is the speakerphone. If a user who is in the Communication profile calls
- the \ref IRtcEngine.disableVideo "disableVideo" method or if the user calls
- the \ref IRtcEngine.muteLocalVideoStream "muteLocalVideoStream" and
- \ref IRtcEngine.muteAllRemoteVideoStreams "muteAllRemoteVideoStreams" methods, the
+ - `COMMUNICATION`: In a voice call, the default audio route is the earpiece. In a video call, the default audio route is the speakerphone. If a user who is in the `COMMUNICATION` profile calls
+ the \ref IRtcEngine.disableVideo "disableVideo" method or if the user calls
+ the \ref IRtcEngine.muteLocalVideoStream "muteLocalVideoStream" and
+ \ref IRtcEngine.muteAllRemoteVideoStreams "muteAllRemoteVideoStreams" methods, the
default audio route switches back to the earpiece automatically.
- - Live Broadcast: Speakerphone.
+ - `LIVE_BROADCASTING`: Speakerphone.
@note
- This method is for Android and iOS only.
- - This method is applicable only to the Communication profile.
+ - This method is applicable only to the `COMMUNICATION` profile.
- For iOS, this method only works in a voice call.
- Call this method before calling the \ref IRtcEngine::joinChannel "joinChannel" method.
@@ -6041,7 +7128,7 @@ class IRtcEngine
- 0: Success.
- < 0: Failure.
*/
- virtual int setDefaultAudioRouteToSpeakerphone(bool defaultToSpeaker) = 0;
+ virtual int setDefaultAudioRouteToSpeakerphone(bool defaultToSpeaker) = 0;
/** Enables/Disables the audio playback route to the speakerphone.
This method sets whether the audio is routed to the speakerphone or earpiece.
@@ -6062,31 +7149,41 @@ class IRtcEngine
- 0: Success.
- < 0: Failure.
*/
- virtual int setEnableSpeakerphone(bool speakerOn) = 0;
+ virtual int setEnableSpeakerphone(bool speakerOn) = 0;
/** Enables in-ear monitoring (for Android and iOS only).
- @param enabled Determines whether to enable in-ear monitoring.
- - true: Enable.
- - false: (Default) Disable.
-
+ *
+ * @note
+ * - Users must use wired earphones to hear their own voices.
+ * - You can call this method either before or after joining a channel.
+ *
+ * @param enabled Determines whether to enable in-ear monitoring.
+ * - true: Enable.
+ * - false: (Default) Disable.
+ *
* @return
- - 0: Success.
- - < 0: Failure.
+ * - 0: Success.
+ * - < 0: Failure.
*/
virtual int enableInEarMonitoring(bool enabled) = 0;
/** Sets the volume of the in-ear monitor.
-
- @param volume Sets the volume of the in-ear monitor. The value ranges between 0 and 100 (default).
-
- @note This method is for Android and iOS only.
-
- @return
- - 0: Success.
- - < 0: Failure.
+ *
+ * @note
+ * - This method is for Android and iOS only.
+ * - Users must use wired earphones to hear their own voices.
+ * - You can call this method either before or after joining a channel.
+ *
+ * @param volume Sets the volume of the in-ear monitor. The value ranges between 0 and 100 (default).
+ *
+ * @return
+ * - 0: Success.
+ * - < 0: Failure.
*/
- virtual int setInEarMonitoringVolume(int volume) = 0;
+ virtual int setInEarMonitoringVolume(int volume) = 0;
/** Checks whether the speakerphone is enabled.
- @note This method is for Android and iOS only.
+ @note
+ - This method is for Android and iOS only.
+ - You can call this method either before or after joining a channel.
@return
- 0: Success.
@@ -6105,6 +7202,7 @@ class IRtcEngine
@note
- This method is for iOS only.
- This method restricts the SDK鈥檚 manipulation of the audio session. Any operation to the audio session relies solely on the app, other apps, or third-party components.
+ - You can call this method either before or after joining a channel.
@param restriction The operational restriction (bit mask) of the SDK on the audio session. See #AUDIO_SESSION_OPERATION_RESTRICTION.
@@ -6120,6 +7218,8 @@ class IRtcEngine
If you enable loopback recording, the output of the sound card is mixed into the audio stream sent to the other end.
+ @note You can call this method either before or after joining a channel.
+
@param enabled Sets whether to enable/disable loopback recording.
- true: Enable loopback recording.
- false: (Default) Disable loopback recording.
@@ -6134,165 +7234,186 @@ class IRtcEngine
#if (defined(__APPLE__) && TARGET_OS_MAC && !TARGET_OS_IPHONE)
/** Shares the whole or part of a screen by specifying the display ID.
-
- @note This method is for macOS only.
-
- @param displayId The display ID of the screen to be shared. This parameter specifies which screen you want to share.
- @param regionRect (Optional) Sets the relative location of the region to the screen. NIL means sharing the whole screen. See Rectangle. If the specified region overruns the screen, the SDK shares only the region within it; if you set width or height as 0, the SDK shares the whole screen.
- @param captureParams Sets the screen sharing encoding parameters. See ScreenCaptureParameters.
-
-
- @return
- - 0: Success.
- - < 0: Failure:
- - #ERR_INVALID_ARGUMENT: the argument is invalid.
+ *
+ * @note
+ * - This method is for macOS only.
+ * - Ensure that you call this method after joining a channel.
+ *
+ * @param displayId The display ID of the screen to be shared. This parameter specifies which screen you want to share.
+ * @param regionRect (Optional) Sets the relative location of the region to the screen. NIL means sharing the whole screen. See Rectangle. If the specified region overruns the screen, the SDK shares only the region within it; if you set width or height as 0, the SDK shares the whole screen.
+ * @param captureParams Sets the screen sharing encoding parameters. See ScreenCaptureParameters.
+ *
+ *
+ * @return
+ * - 0: Success.
+ * - < 0: Failure:
+ * - #ERR_INVALID_STATE: the screen sharing state is invalid, probably because another screen or window is being
+ * shared. Call \ref IRtcEngine::stopScreenCapture "stopScreenCapture" to stop the current screen sharing.
+ * - #ERR_INVALID_ARGUMENT: the argument is invalid.
*/
virtual int startScreenCaptureByDisplayId(unsigned int displayId, const Rectangle& regionRect, const ScreenCaptureParameters& captureParams) = 0;
#endif
#if defined(_WIN32)
/** Shares the whole or part of a screen by specifying the screen rect.
-
- @param screenRect Sets the relative location of the screen to the virtual screen. For information on how to get screenRect, see the advanced guide *Share Screen*.
- @param regionRect (Optional) Sets the relative location of the region to the screen. NULL means sharing the whole screen. See Rectangle. If the specified region overruns the screen, the SDK shares only the region within it; if you set width or height as 0, the SDK shares the whole screen.
- @param captureParams Sets the screen sharing encoding parameters. See ScreenCaptureParameters.
-
- @return
- - 0: Success.
- - < 0: Failure:
- - #ERR_INVALID_ARGUMENT : the argument is invalid.
+ *
+ * @note
+ * - Ensure that you call this method after joining a channel.
+ * - Applies to the Windows platform only.
+ *
+ * @param screenRect Sets the relative location of the screen to the virtual screen. For information on how to get screenRect, see the advanced guide *Share Screen*.
+ * @param regionRect (Optional) Sets the relative location of the region to the screen. NULL means sharing the whole screen. See Rectangle. If the specified region overruns the screen, the SDK shares only the region within it; if you set width or height as 0, the SDK shares the whole screen.
+ * @param captureParams Sets the screen sharing encoding parameters. See ScreenCaptureParameters.
+ *
+ * @return
+ * - 0: Success.
+ * - < 0: Failure:
+ * - #ERR_INVALID_STATE: the screen sharing state is invalid, probably because another screen or window is being
+ * shared. Call \ref IRtcEngine::stopScreenCapture "stopScreenCapture" to stop the current screen sharing.
+ * - #ERR_INVALID_ARGUMENT : the argument is invalid.
*/
virtual int startScreenCaptureByScreenRect(const Rectangle& screenRect, const Rectangle& regionRect, const ScreenCaptureParameters& captureParams) = 0;
#endif
/** Shares the whole or part of a window by specifying the window ID.
-
- Since v3.0.0, this method supports sharing with common Windows platforms. Agora tests the mainstream Windows applications, see details as follows:
-
-
-
- OS version |
- Software |
- Software name |
- Whether support |
-
-
- win10 |
- Chrome |
- 76.0.3809.100 |
- No |
-
-
- Office Word |
- 18.1903.1152.0 |
- Yes |
-
-
- Office Excel |
- No |
-
-
- Office PPT |
- No |
-
-
- WPS Word |
- 11.1.0.9145 |
- Yes |
-
-
- WPS Excel |
-
-
- WPS PPT |
-
-
- Media Player (come with the system) |
- All |
- Yes |
-
-
- win8 |
- Chrome |
- All |
- Yes |
-
-
- Office Word |
- All |
- Yes |
-
-
- Office Excel |
-
-
- Office PPT |
-
-
- WPS Word |
- 11.1.0.9098 |
- Yes |
-
-
- WPS Excel |
-
-
- WPS PPT |
-
-
- Media Player(come with the system) |
- All |
- Yes |
-
-
- win7 |
- Chrome |
- 73.0.3683.103 |
- No |
-
-
- Office Word |
- All |
- Yes |
-
-
- Office Excel |
-
-
- Office PPT |
-
-
- WPS Word |
- 11.1.0.9098 |
- No |
-
-
- WPS Excel |
-
-
- WPS PPT |
-
-
- Media Player(come with the system) |
- All |
- No |
-
-
-
- @param windowId The ID of the window to be shared. For information on how to get the windowId, see the advanced guide *Share Screen*.
- @param regionRect (Optional) The relative location of the region to the window. NULL/NIL means sharing the whole window. See Rectangle. If the specified region overruns the window, the SDK shares only the region within it; if you set width or height as 0, the SDK shares the whole window.
- @param captureParams Window sharing encoding parameters. See ScreenCaptureParameters.
-
- @return
- - 0: Success.
- - < 0: Failure:
- - #ERR_INVALID_ARGUMENT: the argument is invalid.
+ *
+ * @note
+ * - Ensure that you call this method after joining a channel.
+ * - Applies to the macOS and Windows platforms only.
+ *
+ * Since v3.0.0, this method supports window sharing of UWP (Universal Windows Platform) applications.
+ *
+ * Agora tests the mainstream UWP applications by using the lastest SDK, see details as follows:
+ *
+ *
+ *
+ * OS version |
+ * Software |
+ * Software name |
+ * Whether support |
+ *
+ *
+ * win10 |
+ * Chrome |
+ * 76.0.3809.100 |
+ * No |
+ *
+ *
+ * Office Word |
+ * 18.1903.1152.0 |
+ * Yes |
+ *
+ *
+ * Office Excel |
+ * No |
+ *
+ *
+ * Office PPT |
+ * Yes |
+ *
+ *
+ * WPS Word |
+ * 11.1.0.9145 |
+ * Yes |
+ *
+ *
+ * WPS Excel |
+ *
+ *
+ * WPS PPT |
+ *
+ *
+ * Media Player (come with the system) |
+ * All |
+ * Yes |
+ *
+ *
+ * win8 |
+ * Chrome |
+ * All |
+ * Yes |
+ *
+ *
+ * Office Word |
+ * All |
+ * Yes |
+ *
+ *
+ * Office Excel |
+ *
+ *
+ * Office PPT |
+ *
+ *
+ * WPS Word |
+ * 11.1.0.9098 |
+ * Yes |
+ *
+ *
+ * WPS Excel |
+ *
+ *
+ * WPS PPT |
+ *
+ *
+ * Media Player(come with the system) |
+ * All |
+ * Yes |
+ *
+ *
+ * win7 |
+ * Chrome |
+ * 73.0.3683.103 |
+ * No |
+ *
+ *
+ * Office Word |
+ * All |
+ * Yes |
+ *
+ *
+ * Office Excel |
+ *
+ *
+ * Office PPT |
+ *
+ *
+ * WPS Word |
+ * 11.1.0.9098 |
+ * No |
+ *
+ *
+ * WPS Excel |
+ *
+ *
+ * WPS PPT |
+ * 11.1.0.9098 |
+ * Yes |
+ *
+ *
+ * Media Player(come with the system) |
+ * All |
+ * No |
+ *
+ *
+ * @param windowId The ID of the window to be shared. For information on how to get the windowId, see the advanced guide *Share Screen*.
+ * @param regionRect (Optional) The relative location of the region to the window. NULL/NIL means sharing the whole window. See Rectangle. If the specified region overruns the window, the SDK shares only the region within it; if you set width or height as 0, the SDK shares the whole window.
+ * @param captureParams Window sharing encoding parameters. See ScreenCaptureParameters.
+ *
+ * @return
+ * - 0: Success.
+ * - < 0: Failure:
+ * - #ERR_INVALID_STATE: the screen sharing state is invalid, probably because another screen or window is being
+ * shared. Call \ref IRtcEngine::stopScreenCapture "stopScreenCapture" to stop the current screen sharing.
+ * - #ERR_INVALID_ARGUMENT: the argument is invalid.
*/
virtual int startScreenCaptureByWindowId(view_t windowId, const Rectangle& regionRect, const ScreenCaptureParameters& captureParams) = 0;
/** Sets the content hint for screen sharing.
- A content hint suggests the type of the content being shared, so that the SDK applies different optimization algorithm to different types of content.
+ A content hint suggests the type of the content being shared, so that the SDK applies different optimization algorithm to different types of content.
+
+ @note You can call this method either before or after you start screen sharing.
@param contentHint Sets the content hint for screen sharing. See VideoContentHint.
@@ -6374,6 +7495,25 @@ class IRtcEngine
- < 0: Failure.
*/
virtual int updateScreenCaptureRegion(const Rect *rect) = 0;
+
+#endif
+
+#if defined(_WIN32)
+ /** Sets a custom video source.
+ *
+ * During real-time communication, the Agora SDK enables the default video input device, that is, the built-in camera to
+ * capture video. If you need a custom video source, implement the IVideoSource class first, and call this method to add
+ * the custom video source to the SDK.
+ *
+ * @note You can call this method either before or after joining a channel.
+ *
+ * @param source The custom video source. See IVideoSource.
+ *
+ * @return
+ * - true: The custom video source is added to the SDK.
+ * - false: The custom video source is not added to the SDK.
+ */
+ virtual bool setVideoSource(IVideoSource *source) = 0;
#endif
/** Retrieves the current call ID.
@@ -6382,6 +7522,8 @@ class IRtcEngine
The \ref IRtcEngine::rate "rate" and \ref IRtcEngine::complain "complain" methods require the @p callId parameter retrieved from the *getCallId* method during a call. @p callId is passed as an argument into the \ref IRtcEngine::rate "rate" and \ref IRtcEngine::complain "complain" methods after the call ends.
+ @note Ensure that you call this method after joining a channel.
+
@param callId Pointer to the current call ID.
@return
@@ -6392,6 +7534,8 @@ class IRtcEngine
/** Allows a user to rate a call after the call ends.
+ @note Ensure that you call this method after joining a channel.
+
@param callId Pointer to the ID of the call, retrieved from the \ref IRtcEngine::getCallId "getCallId" method.
@param rating Rating of the call. The value is between 1 (lowest score) and 5 (highest score). If you set a value out of this range, the #ERR_INVALID_ARGUMENT (2) error returns.
@param description (Optional) Pointer to the description of the rating, with a string length of less than 800 bytes.
@@ -6404,6 +7548,8 @@ class IRtcEngine
/** Allows a user to complain about the call quality after a call ends.
+ @note Ensure that you call this method after joining a channel.
+
@param callId Pointer to the ID of the call, retrieved from the \ref IRtcEngine::getCallId "getCallId" method.
@param description (Optional) Pointer to the description of the complaint, with a string length of less than 800 bytes.
@@ -6462,7 +7608,7 @@ class IRtcEngine
@note
- This method consumes extra network traffic and may affect communication quality. We do not recommend calling this method together with enableLastmileTest.
- Do not call other methods before receiving the \ref IRtcEngineEventHandler::onLastmileQuality "onLastmileQuality" and \ref IRtcEngineEventHandler::onLastmileProbeResult "onLastmileProbeResult" callbacks. Otherwise, the callbacks may be interrupted.
- - In the Live-broadcast profile, a host should not call this method after joining a channel.
+ - In the `LIVE_BROADCASTING` profile, a host should not call this method after joining a channel.
@param config Sets the configurations of the last-mile network probe test. See LastmileProbeConfig.
@@ -6478,12 +7624,14 @@ class IRtcEngine
/** Retrieves the warning or error description.
@param code Warning code or error code returned in the \ref agora::rtc::IRtcEngineEventHandler::onWarning "onWarning" or \ref agora::rtc::IRtcEngineEventHandler::onError "onError" callback.
-
+
@return #WARN_CODE_TYPE or #ERROR_CODE_TYPE.
*/
virtual const char* getErrorDescription(int code) = 0;
- /** Enables built-in encryption with an encryption password before users join a channel.
+ /** **DEPRECATED** Enables built-in encryption with an encryption password before users join a channel.
+
+ Deprecated as of v3.1.0. Use the \ref agora::rtc::IRtcEngine::enableEncryption "enableEncryption" instead.
All users in a channel must use the same encryption password. The encryption password is automatically cleared once a user leaves the channel.
@@ -6501,7 +7649,9 @@ class IRtcEngine
*/
virtual int setEncryptionSecret(const char* secret) = 0;
- /** Sets the built-in encryption mode.
+ /** **DEPRECATED** Sets the built-in encryption mode.
+
+ @deprecated Deprecated as of v3.1.0. Use the \ref agora::rtc::IRtcEngine::enableEncryption "enableEncryption" instead.
The Agora SDK supports built-in encryption, which is set to the @p aes-128-xts mode by default. Call this method to use other encryption modes.
@@ -6523,14 +7673,41 @@ class IRtcEngine
*/
virtual int setEncryptionMode(const char* encryptionMode) = 0;
+ /** Enables/Disables the built-in encryption.
+ *
+ * @since v3.1.0
+ *
+ * In scenarios requiring high security, Agora recommends calling this method to enable the built-in encryption before joining a channel.
+ *
+ * All users in the same channel must use the same encryption mode and encryption key. Once all users leave the channel, the encryption key of this channel is automatically cleared.
+ *
+ * @note
+ * - If you enable the built-in encryption, you cannot use the RTMP streaming function.
+ * - Agora supports four encryption modes. If you choose an encryption mode (excepting `SM4_128_ECB` mode), you need to add an external encryption library when integrating the Android and iOS SDK. See the advanced guide *Channel Encryption*.
+ *
+ * @param enabled Whether to enable the built-in encryption:
+ * - true: Enable the built-in encryption.
+ * - false: Disable the built-in encryption.
+ * @param config Configurations of built-in encryption schemas. See EncryptionConfig.
+ *
+ * @return
+ * - 0: Success.
+ * - < 0: Failure.
+ * - -2(ERR_INVALID_ARGUMENT): An invalid parameter is used. Set the parameter with a valid value.
+ * - -4(ERR_NOT_SUPPORTED): The encryption mode is incorrect or the SDK fails to load the external encryption library. Check the enumeration or reload the external encryption library.
+ * - -7(ERR_NOT_INITIALIZED): The SDK is not initialized. Initialize the `IRtcEngine` instance before calling this method.
+ */
+ virtual int enableEncryption(bool enabled, const EncryptionConfig& config) = 0;
+
/** Registers a packet observer.
The Agora SDK allows your application to register a packet observer to receive callbacks for voice or video packet transmission.
-
+
@note
- The size of the packet sent to the network after processing should not exceed 1200 bytes, otherwise, the packet may fail to be sent.
- Ensure that both receivers and senders call this method, otherwise, you may meet聽undefined behaviors such as no voice and black screen.
- When you use CDN live streaming, recording or storage functions, Agora doesn't recommend calling this method.
+ - Call this method before joining a channel.
@param observer Pointer to the registered packet observer. See IPacketObserver.
@@ -6544,9 +7721,11 @@ class IRtcEngine
Each user can create up to five data streams during the lifecycle of the IRtcEngine.
- @note Set both the @p reliable and @p ordered parameters to true or false. Do not set one as true and the other as false.
+ @note
+ - Set both the @p reliable and @p ordered parameters to true or false. Do not set one as true and the other as false.
+ - Ensure that you call this method after joining a channel.
- @param streamId Pointer to the ID of the created data stream.
+ @param[out] streamId Pointer to the ID of the created data stream.
@param reliable Sets whether or not the recipients are guaranteed to receive the data stream from the sender within five seconds:
- true: The recipients receive the data stream from the sender within five seconds. If the recipient does not receive the data stream within five seconds, an error is reported to the application.
- false: There is no guarantee that the recipients receive the data stream within five seconds and no error message is reported for any delay or missing data stream.
@@ -6567,12 +7746,12 @@ class IRtcEngine
- Each client can send up to 6 kB of data per second.
- Each user can have up to five data streams simultaneously.
- A successful \ref agora::rtc::IRtcEngine::sendStreamMessage "sendStreamMessage" method call triggers the
+ A successful \ref agora::rtc::IRtcEngine::sendStreamMessage "sendStreamMessage" method call triggers the
\ref agora::rtc::IRtcEngineEventHandler::onStreamMessage "onStreamMessage" callback on the remote client, from which the remote user gets the stream message.
A failed \ref agora::rtc::IRtcEngine::sendStreamMessage "sendStreamMessage" method call triggers the
\ref agora::rtc::IRtcEngineEventHandler::onStreamMessage "onStreamMessage" callback on the remote client.
- @note This method applies only to the Communication profile or to the hosts in the Live-broadcast profile. If an audience in the Live-broadcast profile calls this method, the audience may be switched to a host.
+ @note This method applies only to the `COMMUNICATION` profile or to the hosts in the `LIVE_BROADCASTING` profile. If an audience in the `LIVE_BROADCASTING` profile calls this method, the audience may be switched to a host.
@param streamId ID of the sent data stream, returned in the \ref IRtcEngine::createDataStream "createDataStream" method.
@param data Pointer to the sent data.
@param length Length of the sent data.
@@ -6592,7 +7771,7 @@ class IRtcEngine
- Ensure that the user joins the channel before calling this method.
- Ensure that you enable the RTMP Converter service before using this function. See *Prerequisites* in the advanced guide *Push Streams to CDN*.
- This method adds only one stream RTMP URL address each time it is called.
- - This method applies to Live Broadcast only.
+ - This method applies to `LIVE_BROADCASTING` only.
@param url The CDN streaming URL in the RTMP format. The maximum length of this parameter is 1024 bytes. The RTMP URL address must not contain special characters, such as Chinese language characters.
@param transcodingEnabled Sets whether transcoding is enabled/disabled:
@@ -6615,7 +7794,7 @@ class IRtcEngine
@note
- This method removes only one RTMP URL address each time it is called.
- The RTMP URL address must not contain special characters, such as Chinese language characters.
- - This method applies to Live Broadcast only.
+ - This method applies to `LIVE_BROADCASTING` only.
@param url The RTMP URL address to be removed. The maximum length of this parameter is 1024 bytes.
@@ -6630,9 +7809,10 @@ class IRtcEngine
The SDK triggers the \ref agora::rtc::IRtcEngineEventHandler::onTranscodingUpdated "onTranscodingUpdated" callback when you call the `setLiveTranscoding` method to update the transcoding setting.
@note
- - This method applies to Live Broadcast only.
+ - This method applies to `LIVE_BROADCASTING` only.
- Ensure that you enable the RTMP Converter service before using this function. See *Prerequisites* in the advanced guide *Push Streams to CDN*.
- If you call the `setLiveTranscoding` method to update the transcoding setting for the first time, the SDK does not trigger the `onTranscodingUpdated` callback.
+ - Ensure that you call this method after joining a channel.
@param transcoding Sets the CDN live audio/video transcoding settings. See LiveTranscoding.
@@ -6655,7 +7835,7 @@ class IRtcEngine
@note
- The URL descriptions are different for the local video and CDN live streams:
- In a local video stream, `url` in RtcImage refers to the absolute path of the added watermark image file in the local video stream.
- - In a CDN live stream, `url` in RtcImage refers to the URL address of the added watermark image in the CDN live broadcast.
+ - In a CDN live stream, `url` in RtcImage refers to the URL address of the added watermark image in the CDN live streaming.
- The source file of the watermark image must be in the PNG file format. If the width and height of the PNG file differ from your settings in this method, the PNG file will be cropped to conform to your settings.
- The Agora SDK supports adding only one watermark image onto a local video or CDN live stream. The newly added watermark image replaces the previous one.
@@ -6667,7 +7847,7 @@ class IRtcEngine
/** Adds a watermark image to the local video.
- This method adds a PNG watermark image to the local video in a live broadcast. Once the watermark image is added, all the audience in the channel (CDN audience included),
+ This method adds a PNG watermark image to the local video in the live streaming. Once the watermark image is added, all the audience in the channel (CDN audience included),
and the recording device can see and capture it. Agora supports adding only one watermark image onto the local video, and the newly watermark image replaces the previous one.
The watermark position depends on the settings in the \ref IRtcEngine::setVideoEncoderConfiguration "setVideoEncoderConfiguration" method:
@@ -6677,7 +7857,7 @@ class IRtcEngine
@note
- Ensure that you have called the \ref agora::rtc::IRtcEngine::enableVideo "enableVideo" method to enable the video module before calling this method.
- - If you only want to add a watermark image to the local video for the audience in the CDN live broadcast channel to see and capture, you can call this method or the \ref agora::rtc::IRtcEngine::setLiveTranscoding "setLiveTranscoding" method.
+ - If you only want to add a watermark image to the local video for the audience in the CDN live streaming channel to see and capture, you can call this method or the \ref agora::rtc::IRtcEngine::setLiveTranscoding "setLiveTranscoding" method.
- This method supports adding a watermark image in the PNG file format only. Supported pixel formats of the PNG image are RGBA, RGB, Palette, Gray, and Alpha_gray.
- If the dimensions of the PNG image differ from your settings in this method, the image will be cropped or zoomed to conform to your settings.
- If you have enabled the local video preview by calling the \ref agora::rtc::IRtcEngine::startPreview "startPreview" method, you can use the `visibleInPreview` member in the WatermarkOptions class to set whether or not the watermark is visible in preview.
@@ -6700,22 +7880,18 @@ class IRtcEngine
*/
virtual int clearVideoWatermarks() = 0;
- /** @since v3.0.0
-
- Enables/Disables image enhancement and sets the options.
-
- @note
- - Call this method after calling the enableVideo method.
- - Currently this method does not apply for macOS.
-
- @param enabled Sets whether or not to enable image enhancement:
- - true: enables image enhancement.
- - false: disables image enhancement.
- @param options Sets the image enhancement option. See BeautyOptions.
+ /** Enables/Disables image enhancement and sets the options.
+ *
+ * @note Call this method after calling the \ref IRtcEngine::enableVideo "enableVideo" method.
+ *
+ * @param enabled Sets whether or not to enable image enhancement:
+ * - true: enables image enhancement.
+ * - false: disables image enhancement.
+ * @param options Sets the image enhancement option. See BeautyOptions.
*/
virtual int setBeautyEffectOptions(bool enabled, BeautyOptions options) = 0;
- /** Adds a voice or video stream URL address to a live broadcast.
+ /** Adds a voice or video stream URL address to the live streaming.
The \ref IRtcEngineEventHandler::onStreamPublished "onStreamPublished" callback returns the inject status. If this method call is successful, the server pulls the voice or video stream and injects it into a live channel. This is applicable to scenarios where all audience members in the channel can watch a live show and interact with each other.
@@ -6729,10 +7905,11 @@ class IRtcEngine
@note
- Ensure聽that聽you聽enable聽the聽RTMP聽Converter聽service聽before聽using聽this聽function.聽See聽*Prerequisites* in the advanced guide *Push Streams to CDN*.
- This method applies to the Native SDK v2.4.1 and later.
- - This method applies to the Live-Broadcast profile only.
+ - This method applies to the `LIVE_BROADCASTING` profile only.
- You can inject only one media stream into the channel at the same time.
+ - Ensure that you call this method after joining a channel.
- @param url Pointer to the URL address to be added to the ongoing live broadcast. Valid protocols are RTMP, HLS, and HTTP-FLV.
+ @param url Pointer to the URL address to be added to the ongoing streaming. Valid protocols are RTMP, HLS, and HTTP-FLV.
- Supported audio codec type: AAC.
- Supported video codec type: H264 (AVC).
@param config Pointer to the InjectStreamConfig object that contains the configuration of the added voice or video stream.
@@ -6742,7 +7919,7 @@ class IRtcEngine
- < 0: Failure.
- #ERR_INVALID_ARGUMENT (2): The injected URL does not exist. Call this method again to inject the stream and ensure that the URL is valid.
- #ERR_NOT_READY (3): The user is not in the channel.
- - #ERR_NOT_SUPPORTED (4): The channel profile is not live broadcast. Call the \ref agora::rtc::IRtcEngine::setChannelProfile "setChannelProfile" method and set the channel profile to live broadcast before calling this method.
+ - #ERR_NOT_SUPPORTED (4): The channel profile is not `LIVE_BROADCASTING`. Call the \ref agora::rtc::IRtcEngine::setChannelProfile "setChannelProfile" method and set the channel profile to `LIVE_BROADCASTING` before calling this method.
- #ERR_NOT_INITIALIZED (7): The SDK is not initialized. Ensure that the IRtcEngine object is initialized before calling this method.
*/
virtual int addInjectStreamUrl(const char* url, const InjectStreamConfig& config) = 0;
@@ -6760,7 +7937,7 @@ class IRtcEngine
* #RELAY_STATE_RUNNING (2) and #RELAY_OK (0), and the
* \ref agora::rtc::IRtcEngineEventHandler::onChannelMediaRelayEvent
* "onChannelMediaRelayEvent" callback returns
- * #RELAY_EVENT_PACKET_SENT_TO_DEST_CHANNEL (4), the broadcaster starts
+ * #RELAY_EVENT_PACKET_SENT_TO_DEST_CHANNEL (4), the host starts
* sending data to the destination channel.
* - If the
* \ref agora::rtc::IRtcEngineEventHandler::onChannelMediaRelayStateChanged
@@ -6770,8 +7947,8 @@ class IRtcEngine
*
* @note
* - Call this method after the \ref joinChannel() "joinChannel" method.
- * - This method takes effect only when you are a broadcaster in a
- * Live-broadcast channel.
+ * - This method takes effect only when you are a host in a
+ * `LIVE_BROADCASTING` channel.
* - After a successful method call, if you want to call this method
* again, ensure that you call the
* \ref stopChannelMediaRelay() "stopChannelMediaRelay" method to quit the
@@ -6786,7 +7963,7 @@ class IRtcEngine
* - 0: Success.
* - < 0: Failure.
*/
- virtual int startChannelMediaRelay(const ChannelMediaRelayConfiguration &configuration) = 0;
+ virtual int startChannelMediaRelay(const ChannelMediaRelayConfiguration &configuration) = 0;
/** Updates the channels for media stream relay. After a successful
* \ref startChannelMediaRelay() "startChannelMediaRelay" method call, if
* you want to relay the media stream to more channels, or leave the
@@ -6810,16 +7987,16 @@ class IRtcEngine
* - 0: Success.
* - < 0: Failure.
*/
- virtual int updateChannelMediaRelay(const ChannelMediaRelayConfiguration &configuration) = 0;
+ virtual int updateChannelMediaRelay(const ChannelMediaRelayConfiguration &configuration) = 0;
/** Stops the media stream relay.
*
- * Once the relay stops, the broadcaster quits all the destination
+ * Once the relay stops, the host quits all the destination
* channels.
*
* After a successful method call, the SDK triggers the
* \ref agora::rtc::IRtcEngineEventHandler::onChannelMediaRelayStateChanged
* "onChannelMediaRelayStateChanged" callback. If the callback returns
- * #RELAY_STATE_IDLE (0) and #RELAY_OK (0), the broadcaster successfully
+ * #RELAY_STATE_IDLE (0) and #RELAY_OK (0), the host successfully
* stops the relay.
*
* @note
@@ -6835,15 +8012,15 @@ class IRtcEngine
* - 0: Success.
* - < 0: Failure.
*/
- virtual int stopChannelMediaRelay() = 0;
+ virtual int stopChannelMediaRelay() = 0;
- /** Removes the voice or video stream URL address from a live broadcast.
+ /** Removes the voice or video stream URL address from the live streaming.
- This method removes the URL address (added by the \ref IRtcEngine::addInjectStreamUrl "addInjectStreamUrl" method) from the live broadcast.
+ This method removes the URL address (added by the \ref IRtcEngine::addInjectStreamUrl "addInjectStreamUrl" method) from the live streaming.
@note If this method is called successfully, the SDK triggers the \ref IRtcEngineEventHandler::onUserOffline "onUserOffline" callback and returns a stream uid of 666.
- @param url Pointer to the URL address of the added stream to be removed.
+ @param url Pointer to the URL address of the injected stream to be removed.
@return
- 0: Success.
@@ -6852,20 +8029,87 @@ class IRtcEngine
virtual int removeInjectStreamUrl(const char* url) = 0;
virtual bool registerEventHandler(IRtcEngineEventHandler *eventHandler) = 0;
virtual bool unregisterEventHandler(IRtcEngineEventHandler *eventHandler) = 0;
+ /** Agora supports reporting and analyzing customized messages.
+ *
+ * @since v3.1.0
+ *
+ * This function is in the beta stage with a free trial. The ability provided in its beta test version is reporting a maximum of 10 message pieces within 6 seconds, with each message piece not exceeding 256 bytes and each string not exceeding 100 bytes.
+ * To try out this function, contact [support@agora.io](mailto:support@agora.io) and discuss the format of customized messages with us.
+ */
+ virtual int sendCustomReportMessage(const char *id, const char* category, const char* event, const char* label, int value) = 0;
/** Gets the current connection state of the SDK.
+ @note You can call this method either before or after joining a channel.
+
@return #CONNECTION_STATE_TYPE.
*/
virtual CONNECTION_STATE_TYPE getConnectionState() = 0;
+ /// @cond
+ /** Enables/Disables the super-resolution algorithm for a remote user's video stream.
+ *
+ * @since v3.2.0
+ *
+ * The algorithm effectively improves the resolution of the specified remote user's video stream. When the original
+ * resolution of the remote video stream is a 脳 b pixels, you can receive and render the stream at a higher
+ * resolution (2a 脳 2b pixels) by enabling the algorithm.
+ *
+ * After calling this method, the SDK triggers the
+ * \ref IRtcEngineEventHandler::onUserSuperResolutionEnabled "onUserSuperResolutionEnabled" callback to report
+ * whether you have successfully enabled the super-resolution algorithm.
+ *
+ * @warning The super-resolution algorithm requires extra system resources.
+ * To balance the visual experience and system usage, the SDK poses the following restrictions:
+ * - The algorithm can only be used for a single user at a time.
+ * - On the Android platform, the original resolution of the remote video must not exceed 640 脳 360 pixels.
+ * - On the iOS platform, the original resolution of the remote video must not exceed 640 脳 480 pixels.
+ * If you exceed these limitations, the SDK triggers the \ref IRtcEngineEventHandler::onWarning "onWarning"
+ * callback with the corresponding warning codes:
+ * - #WARN_SUPER_RESOLUTION_STREAM_OVER_LIMITATION (1610): The origin resolution of the remote video is beyond the range where the super-resolution algorithm can be applied.
+ * - #WARN_SUPER_RESOLUTION_USER_COUNT_OVER_LIMITATION (1611): Another user is already using the super-resolution algorithm.
+ * - #WARN_SUPER_RESOLUTION_DEVICE_NOT_SUPPORTED (1612): The device does not support the super-resolution algorithm.
+ *
+ * @note
+ * - This method applies to Android and iOS only.
+ * - Requirements for the user's device:
+ * - Android: The following devices are known to support the method:
+ * - VIVO: V1821A, NEX S, 1914A, 1916A, and 1824BA
+ * - OPPO: PCCM00
+ * - OnePlus: A6000
+ * - Xiaomi: Mi 8, Mi 9, MIX3, and Redmi K20 Pro
+ * - SAMSUNG: SM-G9600, SM-G9650, SM-N9600, SM-G9708, SM-G960U, and SM-G9750
+ * - HUAWEI: SEA-AL00, ELE-AL00, VOG-AL00, YAL-AL10, HMA-AL00, and EVR-AN00
+ * - iOS: This method is supported on devices running iOS 12.0 or later. The following
+ * device models are known to support the method:
+ * - iPhone XR
+ * - iPhone XS
+ * - iPhone XS Max
+ * - iPhone 11
+ * - iPhone 11 Pro
+ * - iPhone 11 Pro Max
+ * - iPad Pro 11-inch (3rd Generation)
+ * - iPad Pro 12.9-inch (3rd Generation)
+ * - iPad Air 3 (3rd Generation)
+ *
+ * @param userId The ID of the remote user.
+ * @param enable Whether to enable the super-resolution algorithm:
+ * - true: Enable the super-resolution algorithm.
+ * - false: Disable the super-resolution algorithm.
+ *
+ * @return
+ * - 0: Success.
+ * - < 0: Failure.
+ */
+ virtual int enableRemoteSuperResolution(uid_t userId, bool enable) = 0;
+ /// @endcond
/** Registers the metadata observer.
Registers the metadata observer. You need to implement the IMetadataObserver class and specify the metadata type in this method. A successful call of this method triggers the \ref agora::rtc::IMetadataObserver::getMaxMetadataSize "getMaxMetadataSize" callback.
- This method enables you to add synchronized metadata in the video stream for more diversified live broadcast interactions, such as sending shopping links, digital coupons, and online quizzes.
+ This method enables you to add synchronized metadata in the video stream for more diversified live interactive streaming, such as sending shopping links, digital coupons, and online quizzes.
@note
- Call this method before the joinChannel method.
- - This method applies to the Live-broadcast channel profile.
+ - This method applies to the `LIVE_BROADCASTING` channel profile.
@param observer The IMetadataObserver class. See the definition of IMetadataObserver for details.
@param type See \ref IMetadataObserver::METADATA_TYPE "METADATA_TYPE". The SDK supports VIDEO_METADATA (0) only for now.
@@ -7060,7 +8304,7 @@ class IRtcEngineParameter
*/
virtual int setProfile(const char* profile, bool merge) = 0;
- virtual int convertPath(const char* filePath, agora::util::AString& value) = 0;
+ virtual int convertPath(const char* filePath, agora::util::AString& value) = 0;
};
class AAudioDeviceManager : public agora::util::AutoPtr
@@ -7068,7 +8312,7 @@ class AAudioDeviceManager : public agora::util::AutoPtr
public:
AAudioDeviceManager(IRtcEngine* engine)
{
- queryInterface(engine, AGORA_IID_AUDIO_DEVICE_MANAGER);
+ queryInterface(engine, AGORA_IID_AUDIO_DEVICE_MANAGER);
}
};
@@ -7077,7 +8321,7 @@ class AVideoDeviceManager : public agora::util::AutoPtr
public:
AVideoDeviceManager(IRtcEngine* engine)
{
- queryInterface(engine, AGORA_IID_VIDEO_DEVICE_MANAGER);
+ queryInterface(engine, AGORA_IID_VIDEO_DEVICE_MANAGER);
}
};
@@ -7106,39 +8350,39 @@ class RtcEngineParameters
RtcEngineParameters(IRtcEngine* engine)
:m_parameter(engine){}
-
+
int enableLocalVideo(bool enabled) {
return setParameters("{\"rtc.video.capture\":%s,\"che.video.local.capture\":%s,\"che.video.local.render\":%s,\"che.video.local.send\":%s}", enabled ? "true" : "false", enabled ? "true" : "false", enabled ? "true" : "false", enabled ? "true" : "false");
}
-
+
int muteLocalVideoStream(bool mute) {
return setParameters("{\"rtc.video.mute_me\":%s,\"che.video.local.send\":%s}", mute ? "true" : "false", mute ? "false" : "true");
}
-
+
int muteAllRemoteVideoStreams(bool mute) {
return m_parameter ? m_parameter->setBool("rtc.video.mute_peers", mute) : -ERR_NOT_INITIALIZED;
}
-
+
int setDefaultMuteAllRemoteVideoStreams(bool mute) {
return m_parameter ? m_parameter->setBool("rtc.video.set_default_mute_peers", mute) : -ERR_NOT_INITIALIZED;
}
-
+
int muteRemoteVideoStream(uid_t uid, bool mute) {
return setObject("rtc.video.mute_peer", "{\"uid\":%u,\"mute\":%s}", uid, mute ? "true" : "false");
}
-
+
int setPlaybackDeviceVolume(int volume) {// [0,255]
return m_parameter ? m_parameter->setInt("che.audio.output.volume", volume) : -ERR_NOT_INITIALIZED;
}
-
+
int startAudioRecording(const char* filePath, AUDIO_RECORDING_QUALITY_TYPE quality) {
return startAudioRecording(filePath, 32000, quality);
}
@@ -7155,12 +8399,12 @@ class RtcEngineParameters
return setObject("che.audio.start_recording", "{\"filePath\":\"%s\",\"sampleRate\":%d,\"quality\":%d}", filePath, sampleRate, quality);
}
-
+
int stopAudioRecording() {
return m_parameter ? m_parameter->setBool("che.audio.stop_recording", true) : -ERR_NOT_INITIALIZED;
}
-
+
int startAudioMixing(const char* filePath, bool loopback, bool replace, int cycle) {
if (!m_parameter) return -ERR_NOT_INITIALIZED;
#if defined(_WIN32)
@@ -7177,22 +8421,22 @@ class RtcEngineParameters
cycle);
}
-
+
int stopAudioMixing() {
return m_parameter ? m_parameter->setBool("che.audio.stop_file_as_playout", true) : -ERR_NOT_INITIALIZED;
}
-
+
int pauseAudioMixing() {
return m_parameter ? m_parameter->setBool("che.audio.pause_file_as_playout", true) : -ERR_NOT_INITIALIZED;
}
-
+
int resumeAudioMixing() {
return m_parameter ? m_parameter->setBool("che.audio.pause_file_as_playout", false) : -ERR_NOT_INITIALIZED;
}
-
+
int adjustAudioMixingVolume(int volume) {
int ret = adjustAudioMixingPlayoutVolume(volume);
if (ret == 0) {
@@ -7201,12 +8445,12 @@ class RtcEngineParameters
return ret;
}
-
+
int adjustAudioMixingPlayoutVolume(int volume) {
return m_parameter ? m_parameter->setInt("che.audio.set_file_as_playout_volume", volume) : -ERR_NOT_INITIALIZED;
}
-
+
int getAudioMixingPlayoutVolume() {
int volume = 0;
int r = m_parameter ? m_parameter->getInt("che.audio.get_file_as_playout_volume", volume) : -ERR_NOT_INITIALIZED;
@@ -7215,12 +8459,12 @@ class RtcEngineParameters
return r;
}
-
+
int adjustAudioMixingPublishVolume(int volume) {
return m_parameter ? m_parameter->setInt("che.audio.set_file_as_playout_publish_volume", volume) : -ERR_NOT_INITIALIZED;
}
-
+
int getAudioMixingPublishVolume() {
int volume = 0;
int r = m_parameter ? m_parameter->getInt("che.audio.get_file_as_playout_publish_volume", volume) : -ERR_NOT_INITIALIZED;
@@ -7229,7 +8473,7 @@ class RtcEngineParameters
return r;
}
-
+
int getAudioMixingDuration() {
int duration = 0;
int r = m_parameter ? m_parameter->getInt("che.audio.get_mixing_file_length_ms", duration) : -ERR_NOT_INITIALIZED;
@@ -7238,7 +8482,7 @@ class RtcEngineParameters
return r;
}
-
+
int getAudioMixingCurrentPosition() {
if (!m_parameter) return -ERR_NOT_INITIALIZED;
int pos = 0;
@@ -7247,7 +8491,7 @@ class RtcEngineParameters
r = pos;
return r;
}
-
+
int setAudioMixingPosition(int pos /*in ms*/) {
return m_parameter ? m_parameter->setInt("che.audio.mixing.file.position", pos) : -ERR_NOT_INITIALIZED;
}
@@ -7271,12 +8515,12 @@ class RtcEngineParameters
return r;
}
-
+
int setEffectsVolume(int volume) {
return m_parameter ? m_parameter->setInt("che.audio.game_set_effects_volume", volume) : -ERR_NOT_INITIALIZED;
}
-
+
int setVolumeOfEffect(int soundId, int volume) {
return setObject(
"che.audio.game_adjust_effect_volume",
@@ -7284,7 +8528,7 @@ class RtcEngineParameters
soundId, volume);
}
-
+
int playEffect(int soundId, const char* filePath, int loopCount, double pitch, double pan, int gain, bool publish = false) {
#if defined(_WIN32)
util::AString path;
@@ -7299,19 +8543,19 @@ class RtcEngineParameters
soundId, filePath, loopCount, pitch, pan, gain, publish);
}
-
+
int stopEffect(int soundId) {
return m_parameter ? m_parameter->setInt(
"che.audio.game_stop_effect", soundId) : -ERR_NOT_INITIALIZED;
}
-
+
int stopAllEffects() {
return m_parameter ? m_parameter->setBool(
"che.audio.game_stop_all_effects", true) : -ERR_NOT_INITIALIZED;
}
-
+
int preloadEffect(int soundId, char* filePath) {
return setObject(
"che.audio.game_preload_effect",
@@ -7319,61 +8563,61 @@ class RtcEngineParameters
soundId, filePath);
}
-
+
int unloadEffect(int soundId) {
return m_parameter ? m_parameter->setInt(
"che.audio.game_unload_effect", soundId) : -ERR_NOT_INITIALIZED;
}
-
+
int pauseEffect(int soundId) {
return m_parameter ? m_parameter->setInt(
"che.audio.game_pause_effect", soundId) : -ERR_NOT_INITIALIZED;
}
-
+
int pauseAllEffects() {
return m_parameter ? m_parameter->setBool(
"che.audio.game_pause_all_effects", true) : -ERR_NOT_INITIALIZED;
}
-
+
int resumeEffect(int soundId) {
return m_parameter ? m_parameter->setInt(
"che.audio.game_resume_effect", soundId) : -ERR_NOT_INITIALIZED;
}
-
+
int resumeAllEffects() {
return m_parameter ? m_parameter->setBool(
"che.audio.game_resume_all_effects", true) : -ERR_NOT_INITIALIZED;
}
-
+
int enableSoundPositionIndication(bool enabled) {
return m_parameter ? m_parameter->setBool(
"che.audio.enable_sound_position", enabled) : -ERR_NOT_INITIALIZED;
}
-
+
int setRemoteVoicePosition(uid_t uid, double pan, double gain) {
return setObject("che.audio.game_place_sound_position", "{\"uid\":%u,\"pan\":%lf,\"gain\":%lf}", uid, pan, gain);
}
-
+
int setLocalVoicePitch(double pitch) {
return m_parameter ? m_parameter->setInt(
"che.audio.morph.pitch_shift",
static_cast(pitch * 100)) : -ERR_NOT_INITIALIZED;
}
-
+
int setLocalVoiceEqualization(AUDIO_EQUALIZATION_BAND_FREQUENCY bandFrequency, int bandGain) {
return setObject(
"che.audio.morph.equalization",
"{\"index\":%d,\"gain\":%d}",
static_cast(bandFrequency), bandGain);
}
-
+
int setLocalVoiceReverb(AUDIO_REVERB_TYPE reverbKey, int value) {
return setObject(
"che.audio.morph.reverb",
@@ -7381,52 +8625,173 @@ class RtcEngineParameters
static_cast(reverbKey), value);
}
-
+
int setLocalVoiceChanger(VOICE_CHANGER_PRESET voiceChanger) {
- if(voiceChanger == 0x00000000)
- {
- return m_parameter ? m_parameter->setInt("che.audio.morph.voice_changer", static_cast(voiceChanger)) : -ERR_NOT_INITIALIZED;
+ if(!m_parameter)
+ return -ERR_NOT_INITIALIZED;
+ if(voiceChanger == 0x00000000) {
+ return m_parameter->setInt("che.audio.morph.voice_changer", static_cast(voiceChanger));
}
- else if(voiceChanger > 0x00000000 && voiceChanger < 0x00100000)
- {
- return m_parameter ? m_parameter->setInt("che.audio.morph.voice_changer", static_cast(voiceChanger)) : -ERR_NOT_INITIALIZED;
+ else if(voiceChanger > 0x00000000 && voiceChanger < 0x00100000) {
+ return m_parameter->setInt("che.audio.morph.voice_changer", static_cast(voiceChanger));
}
- else if(voiceChanger > 0x00100000 && voiceChanger < 0x00200000)
- {
- return m_parameter ? m_parameter->setInt("che.audio.morph.voice_changer", static_cast(voiceChanger - 0x00100000 + 6)) : -ERR_NOT_INITIALIZED;
+ else if(voiceChanger > 0x00100000 && voiceChanger < 0x00200000) {
+ return m_parameter->setInt("che.audio.morph.voice_changer", static_cast(voiceChanger - 0x00100000 + 6));
}
- else if(voiceChanger > 0x00200000 && voiceChanger < 0x00300000)
- {
- return m_parameter ? m_parameter->setInt("che.audio.morph.beauty_voice", static_cast(voiceChanger - 0x00200000)) : -ERR_NOT_INITIALIZED;
+ else if(voiceChanger > 0x00200000 && voiceChanger < 0x00300000) {
+ return m_parameter->setInt("che.audio.morph.beauty_voice", static_cast(voiceChanger - 0x00200000));
}
- else
- {
- return -ERR_NOT_INITIALIZED;
+ else {
+ return -ERR_INVALID_ARGUMENT;
}
}
-
+
int setLocalVoiceReverbPreset(AUDIO_REVERB_PRESET reverbPreset) {
- if(reverbPreset == 0x00000000)
- {
- return m_parameter ? m_parameter->setInt("che.audio.morph.reverb_preset", static_cast(reverbPreset)) : -ERR_NOT_INITIALIZED;
+ if(!m_parameter)
+ return -ERR_NOT_INITIALIZED;
+ if(reverbPreset == 0x00000000) {
+ return m_parameter->setInt("che.audio.morph.reverb_preset", static_cast(reverbPreset));
}
- else if(reverbPreset > 0x00000000 && reverbPreset < 0x00100000)
- {
- return m_parameter ? m_parameter->setInt("che.audio.morph.reverb_preset", static_cast(reverbPreset + 8)) : -ERR_NOT_INITIALIZED;
+ else if(reverbPreset > 0x00000000 && reverbPreset < 0x00100000) {
+ return m_parameter->setInt("che.audio.morph.reverb_preset", static_cast(reverbPreset + 8));
}
- else if(reverbPreset > 0x00100000 && reverbPreset < 0x00200000)
- {
- return m_parameter ? m_parameter->setInt("che.audio.morph.reverb_preset", static_cast(reverbPreset - 0x00100000)) : -ERR_NOT_INITIALIZED;
+ else if(reverbPreset > 0x00100000 && reverbPreset < 0x00200000) {
+ return m_parameter->setInt("che.audio.morph.reverb_preset", static_cast(reverbPreset - 0x00100000));
}
- else if(reverbPreset > 0x00200000 && reverbPreset < 0x00200002)
- {
- return m_parameter ? m_parameter->setInt("che.audio.morph.virtual_stereo", static_cast(reverbPreset - 0x00200000)) : -ERR_NOT_INITIALIZED;
+ else if(reverbPreset > 0x00200000 && reverbPreset < 0x00200002) {
+ return m_parameter->setInt("che.audio.morph.virtual_stereo", static_cast(reverbPreset - 0x00200000));
}
- else
- {
+ else if (reverbPreset > (AUDIO_REVERB_PRESET) 0x00300000 && reverbPreset < (AUDIO_REVERB_PRESET) 0x00300002)
+ return setObject( "che.audio.morph.electronic_voice", "{\"key\":%d,\"value\":%d}", 1, 4);
+ else if (reverbPreset > (AUDIO_REVERB_PRESET) 0x00400000 && reverbPreset < (AUDIO_REVERB_PRESET) 0x00400002)
+ return m_parameter->setInt("che.audio.morph.threedim_voice", 10);
+ else {
+ return -ERR_INVALID_ARGUMENT;
+ }
+ }
+
+ int setAudioEffectPreset(AUDIO_EFFECT_PRESET preset){
+ if(!m_parameter)
return -ERR_NOT_INITIALIZED;
+ if(preset == AUDIO_EFFECT_OFF) {
+ return m_parameter->setInt("che.audio.morph.voice_changer", 0);
+ }
+ if(preset == ROOM_ACOUSTICS_KTV){
+ return m_parameter->setInt("che.audio.morph.reverb_preset", 1);
+ }
+ if(preset == ROOM_ACOUSTICS_VOCAL_CONCERT) {
+ return m_parameter->setInt("che.audio.morph.reverb_preset", 2);
+ }
+ if(preset == ROOM_ACOUSTICS_STUDIO) {
+ return m_parameter->setInt("che.audio.morph.reverb_preset", 5);
+ }
+ if(preset == ROOM_ACOUSTICS_PHONOGRAPH) {
+ return m_parameter->setInt("che.audio.morph.reverb_preset", 8);
+ }
+ if(preset == ROOM_ACOUSTICS_VIRTUAL_STEREO) {
+ return m_parameter->setInt("che.audio.morph.virtual_stereo", 1);
+ }
+ if(preset == ROOM_ACOUSTICS_SPACIAL) {
+ return m_parameter->setInt("che.audio.morph.voice_changer", 15);
+ }
+ if(preset == ROOM_ACOUSTICS_ETHEREAL) {
+ return m_parameter->setInt("che.audio.morph.voice_changer", 5);
+ }
+ if(preset == ROOM_ACOUSTICS_3D_VOICE) {
+ return m_parameter->setInt("che.audio.morph.threedim_voice", 10);
+ }
+ if(preset == VOICE_CHANGER_EFFECT_UNCLE) {
+ return m_parameter->setInt("che.audio.morph.reverb_preset", 3);
+ }
+ if(preset == VOICE_CHANGER_EFFECT_OLDMAN) {
+ return m_parameter->setInt("che.audio.morph.voice_changer", 1);
}
+ if(preset == VOICE_CHANGER_EFFECT_BOY) {
+ return m_parameter->setInt("che.audio.morph.voice_changer", 2);
+ }
+ if(preset == VOICE_CHANGER_EFFECT_SISTER) {
+ return m_parameter->setInt("che.audio.morph.reverb_preset", 4);
+ }
+ if(preset == VOICE_CHANGER_EFFECT_GIRL) {
+ return m_parameter->setInt("che.audio.morph.voice_changer", 3);
+ }
+ if(preset == VOICE_CHANGER_EFFECT_PIGKING) {
+ return m_parameter->setInt("che.audio.morph.voice_changer", 4);
+ }
+ if(preset == VOICE_CHANGER_EFFECT_HULK) {
+ return m_parameter->setInt("che.audio.morph.voice_changer", 6);
+ }
+ if(preset == STYLE_TRANSFORMATION_RNB) {
+ return m_parameter->setInt("che.audio.morph.reverb_preset", 7);
+ }
+ if(preset == STYLE_TRANSFORMATION_POPULAR) {
+ return m_parameter->setInt("che.audio.morph.reverb_preset", 6);
+ }
+ if(preset == PITCH_CORRECTION) {
+ return setObject( "che.audio.morph.electronic_voice", "{\"key\":%d,\"value\":%d}", 1, 4);
+ }
+ return -ERR_INVALID_ARGUMENT;
+ }
+
+ int setVoiceBeautifierPreset(VOICE_BEAUTIFIER_PRESET preset) {
+ if(!m_parameter)
+ return -ERR_NOT_INITIALIZED;
+ if(preset == VOICE_BEAUTIFIER_OFF) {
+ return m_parameter->setInt("che.audio.morph.voice_changer", 0);
+ }
+ if(preset == CHAT_BEAUTIFIER_MAGNETIC) {
+ return m_parameter->setInt("che.audio.morph.beauty_voice", 1);
+ }
+ if(preset == CHAT_BEAUTIFIER_FRESH) {
+ return m_parameter->setInt("che.audio.morph.beauty_voice", 2);
+ }
+ if(preset == CHAT_BEAUTIFIER_VITALITY) {
+ return m_parameter->setInt("che.audio.morph.beauty_voice", 3);
+ }
+ /*if(preset == SINGING_BEAUTIFICATION_MAN) {
+ return m_parameter->setInt("che.audio.morph.beauty_sing", 1);
+ }
+ if(preset == SINGING_BEAUTIFICATION_WOMAN) {
+ return m_parameter->setInt("che.audio.morph.beauty_sing", 2);
+ }*/
+ if(preset == TIMBRE_TRANSFORMATION_VIGOROUS) {
+ return m_parameter->setInt("che.audio.morph.voice_changer", 7);
+ }
+ if(preset == TIMBRE_TRANSFORMATION_DEEP) {
+ return m_parameter->setInt("che.audio.morph.voice_changer", 8);
+ }
+ if(preset == TIMBRE_TRANSFORMATION_MELLOW) {
+ return m_parameter->setInt("che.audio.morph.voice_changer", 9);
+ }
+ if(preset == TIMBRE_TRANSFORMATION_FALSETTO) {
+ return m_parameter->setInt("che.audio.morph.voice_changer", 10);
+ }
+ if(preset == TIMBRE_TRANSFORMATION_FULL) {
+ return m_parameter->setInt("che.audio.morph.voice_changer", 11);
+ }
+ if(preset == TIMBRE_TRANSFORMATION_CLEAR) {
+ return m_parameter->setInt("che.audio.morph.voice_changer", 12);
+ }
+ if(preset == TIMBRE_TRANSFORMATION_RESOUNDING) {
+ return m_parameter->setInt("che.audio.morph.voice_changer", 13);
+ }
+ if(preset == TIMBRE_TRANSFORMATION_RINGING) {
+ return m_parameter->setInt("che.audio.morph.voice_changer", 14);
+ }
+ return -ERR_INVALID_ARGUMENT;
+ }
+
+ int setAudioEffectParameters(AUDIO_EFFECT_PRESET preset, int param1, int param2){
+ if(!m_parameter)
+ return -ERR_NOT_INITIALIZED;
+ if(preset == PITCH_CORRECTION){
+ return setObject( "che.audio.morph.electronic_voice", "{\"key\":%d,\"value\":%d}", param1, param2);
+ }
+ if(preset == ROOM_ACOUSTICS_3D_VOICE){
+ return m_parameter->setInt("che.audio.morph.threedim_voice", param1);
+ }
+ return -ERR_INVALID_ARGUMENT;
}
/** **DEPRECATED** Use \ref IRtcEngine::disableAudio "disableAudio" instead. Disables the audio function in the channel.
@@ -7439,17 +8804,17 @@ class RtcEngineParameters
return m_parameter ? m_parameter->setBool("che.pause.audio", true) : -ERR_NOT_INITIALIZED;
}
-
+
int resumeAudio() {
return m_parameter ? m_parameter->setBool("che.pause.audio", false) : -ERR_NOT_INITIALIZED;
}
-
+
int setHighQualityAudioParameters(bool fullband, bool stereo, bool fullBitrate) {
return setObject("che.audio.codec.hq", "{\"fullband\":%s,\"stereo\":%s,\"fullBitrate\":%s}", fullband ? "true" : "false", stereo ? "true" : "false", fullBitrate ? "true" : "false");
}
-
+
int adjustRecordingSignalVolume(int volume) {//[0, 400]: e.g. 50~0.5x 100~1x 400~4x
if (volume < 0)
volume = 0;
@@ -7458,7 +8823,7 @@ class RtcEngineParameters
return m_parameter ? m_parameter->setInt("che.audio.record.signal.volume", volume) : -ERR_NOT_INITIALIZED;
}
-
+
int adjustPlaybackSignalVolume(int volume) {//[0, 400]
if (volume < 0)
volume = 0;
@@ -7467,35 +8832,35 @@ class RtcEngineParameters
return m_parameter ? m_parameter->setInt("che.audio.playout.signal.volume", volume) : -ERR_NOT_INITIALIZED;
}
-
+
int enableAudioVolumeIndication(int interval, int smooth, bool report_vad) { // in ms: <= 0: disable, > 0: enable, interval in ms
if (interval < 0)
interval = 0;
return setObject("che.audio.volume_indication", "{\"interval\":%d,\"smooth\":%d,\"vad\":%d}", interval, smooth, report_vad);
}
-
+
int muteLocalAudioStream(bool mute) {
return setParameters("{\"rtc.audio.mute_me\":%s,\"che.audio.mute_me\":%s}", mute ? "true" : "false", mute ? "true" : "false");
}
// mute/unmute all peers. unmute will clear all muted peers specified mutePeer() interface
-
+
int muteRemoteAudioStream(uid_t uid, bool mute) {
return setObject("rtc.audio.mute_peer", "{\"uid\":%u,\"mute\":%s}", uid, mute?"true":"false");
}
-
+
int muteAllRemoteAudioStreams(bool mute) {
return m_parameter ? m_parameter->setBool("rtc.audio.mute_peers", mute) : -ERR_NOT_INITIALIZED;
}
-
+
int setDefaultMuteAllRemoteAudioStreams(bool mute) {
return m_parameter ? m_parameter->setBool("rtc.audio.set_default_mute_peers", mute) : -ERR_NOT_INITIALIZED;
}
-
+
int setExternalAudioSource(bool enabled, int sampleRate, int channels) {
if (enabled)
return setParameters("{\"che.audio.external_capture\":true,\"che.audio.external_capture.push\":true,\"che.audio.set_capture_raw_audio_format\":{\"sampleRate\":%d,\"channelCnt\":%d,\"mode\":%d}}", sampleRate, channels, RAW_AUDIO_FRAME_OP_MODE_TYPE::RAW_AUDIO_FRAME_OP_MODE_READ_WRITE);
@@ -7503,7 +8868,7 @@ class RtcEngineParameters
return setParameters("{\"che.audio.external_capture\":false,\"che.audio.external_capture.push\":false}");
}
-
+
int setExternalAudioSink(bool enabled, int sampleRate, int channels) {
if (enabled)
return setParameters("{\"che.audio.external_render\":true,\"che.audio.external_render.pull\":true,\"che.audio.set_render_raw_audio_format\":{\"sampleRate\":%d,\"channelCnt\":%d,\"mode\":%d}}", sampleRate, channels, RAW_AUDIO_FRAME_OP_MODE_TYPE::RAW_AUDIO_FRAME_OP_MODE_READ_ONLY);
@@ -7511,7 +8876,7 @@ class RtcEngineParameters
return setParameters("{\"che.audio.external_render\":false,\"che.audio.external_render.pull\":false}");
}
-
+
int setLogFile(const char* filePath) {
if (!m_parameter) return -ERR_NOT_INITIALIZED;
#if defined(_WIN32)
@@ -7524,73 +8889,73 @@ class RtcEngineParameters
return m_parameter->setString("rtc.log_file", filePath);
}
-
+
int setLogFilter(unsigned int filter) {
return m_parameter ? m_parameter->setUInt("rtc.log_filter", filter&LOG_FILTER_MASK) : -ERR_NOT_INITIALIZED;
}
-
+
int setLogFileSize(unsigned int fileSizeInKBytes) {
return m_parameter ? m_parameter->setUInt("rtc.log_size", fileSizeInKBytes) : -ERR_NOT_INITIALIZED;
}
-
+
int setLocalRenderMode(RENDER_MODE_TYPE renderMode) {
return setRemoteRenderMode(0, renderMode);
}
-
+
int setRemoteRenderMode(uid_t uid, RENDER_MODE_TYPE renderMode) {
return setParameters("{\"che.video.render_mode\":[{\"uid\":%u,\"renderMode\":%d}]}", uid, renderMode);
}
-
+
int setCameraCapturerConfiguration(const CameraCapturerConfiguration& config) {
if (!m_parameter) return -ERR_NOT_INITIALIZED;
return m_parameter->setInt("che.video.camera_capture_mode", (int)config.preference);
}
-
+
int enableDualStreamMode(bool enabled) {
return setParameters("{\"rtc.dual_stream_mode\":%s,\"che.video.enableLowBitRateStream\":%d}", enabled ? "true" : "false", enabled ? 1 : 0);
}
-
+
int setRemoteVideoStreamType(uid_t uid, REMOTE_VIDEO_STREAM_TYPE streamType) {
return setParameters("{\"rtc.video.set_remote_video_stream\":{\"uid\":%u,\"stream\":%d}, \"che.video.setstream\":{\"uid\":%u,\"stream\":%d}}", uid, streamType, uid, streamType);
// return setObject("rtc.video.set_remote_video_stream", "{\"uid\":%u,\"stream\":%d}", uid, streamType);
}
-
+
int setRemoteDefaultVideoStreamType(REMOTE_VIDEO_STREAM_TYPE streamType) {
return m_parameter ? m_parameter->setInt("rtc.video.set_remote_default_video_stream_type", streamType) : -ERR_NOT_INITIALIZED;
}
-
+
int setRecordingAudioFrameParameters(int sampleRate, int channel, RAW_AUDIO_FRAME_OP_MODE_TYPE mode, int samplesPerCall) {
return setObject("che.audio.set_capture_raw_audio_format", "{\"sampleRate\":%d,\"channelCnt\":%d,\"mode\":%d,\"samplesPerCall\":%d}", sampleRate, channel, mode, samplesPerCall);
}
-
+
int setPlaybackAudioFrameParameters(int sampleRate, int channel, RAW_AUDIO_FRAME_OP_MODE_TYPE mode, int samplesPerCall) {
return setObject("che.audio.set_render_raw_audio_format", "{\"sampleRate\":%d,\"channelCnt\":%d,\"mode\":%d,\"samplesPerCall\":%d}", sampleRate, channel, mode, samplesPerCall);
}
-
+
int setMixedAudioFrameParameters(int sampleRate, int samplesPerCall) {
return setObject("che.audio.set_mixed_raw_audio_format", "{\"sampleRate\":%d,\"samplesPerCall\":%d}", sampleRate, samplesPerCall);
}
-
+
int enableWebSdkInteroperability(bool enabled) {//enable interoperability with zero-plugin web sdk
return setParameters("{\"rtc.video.web_h264_interop_enable\":%s,\"che.video.web_h264_interop_enable\":%s}", enabled ? "true" : "false", enabled ? "true" : "false");
}
//only for live broadcast
-
+
int setVideoQualityParameters(bool preferFrameRateOverImageQuality) {
return setParameters("{\"rtc.video.prefer_frame_rate\":%s,\"che.video.prefer_frame_rate\":%s}", preferFrameRateOverImageQuality ? "true" : "false", preferFrameRateOverImageQuality ? "true" : "false");
}
-
+
int setLocalVideoMirrorMode(VIDEO_MIRROR_MODE_TYPE mirrorMode) {
if (!m_parameter) return -ERR_NOT_INITIALIZED;
const char *value;
@@ -7610,18 +8975,18 @@ class RtcEngineParameters
return m_parameter->setString("che.video.localViewMirrorSetting", value);
}
-
+
int setLocalPublishFallbackOption(STREAM_FALLBACK_OPTIONS option) {
return m_parameter ? m_parameter->setInt("rtc.local_publish_fallback_option", option) : -ERR_NOT_INITIALIZED;
}
-
+
int setRemoteSubscribeFallbackOption(STREAM_FALLBACK_OPTIONS option) {
return m_parameter ? m_parameter->setInt("rtc.remote_subscribe_fallback_option", option) : -ERR_NOT_INITIALIZED;
}
#if (defined(__APPLE__) && TARGET_OS_MAC && !TARGET_OS_IPHONE) || defined(_WIN32)
-
+
int enableLoopbackRecording(bool enabled, const char* deviceName = NULL) {
if (!deviceName) {
return setParameters("{\"che.audio.loopback.recording\":%s}", enabled ? "true" : "false");
@@ -7632,7 +8997,7 @@ class RtcEngineParameters
}
#endif
-
+
int setInEarMonitoringVolume(int volume) {
return m_parameter ? m_parameter->setInt("che.audio.headset.monitoring.parameter", volume) : -ERR_NOT_INITIALIZED;
}
@@ -7677,11 +9042,11 @@ class RtcEngineParameters
////////////////////////////////////////////////////////
/** Creates the IRtcEngine object and returns the pointer.
- *
+ *
* @note The Agora RTC Native SDK supports creating only one `IRtcEngine` object for an app for now.
- *
+ *
* @return Pointer to the IRtcEngine object.
- */
+ */
AGORA_API agora::rtc::IRtcEngine* AGORA_CALL createAgoraRtcEngine();
////////////////////////////////////////////////////////
diff --git a/Android/APIExample/lib-stream-encrypt/src/main/cpp/include/agora/IAgoraService.h b/Android/APIExample/lib-stream-encrypt/src/main/cpp/include/agora/IAgoraService.h
index c6195878f..299158c31 100644
--- a/Android/APIExample/lib-stream-encrypt/src/main/cpp/include/agora/IAgoraService.h
+++ b/Android/APIExample/lib-stream-encrypt/src/main/cpp/include/agora/IAgoraService.h
@@ -30,7 +30,7 @@ class IAgoraService
AGORA_CPP_API static void release ();
/** Initializes the engine.
-
+
@param context RtcEngine context.
@return
- 0: Success.
@@ -51,7 +51,7 @@ class IAgoraService
} // namespace agora
/** Gets the SDK version number.
-
+
@param build Build number of the Agora SDK.
@return
- 0: Success.
diff --git a/Android/APIExample/lib-switch-external-video/src/main/java/io/agora/advancedvideo/externvideosource/ExternalVideoInputManager.java b/Android/APIExample/lib-switch-external-video/src/main/java/io/agora/advancedvideo/externvideosource/ExternalVideoInputManager.java
index c87a02214..f0b1500ad 100644
--- a/Android/APIExample/lib-switch-external-video/src/main/java/io/agora/advancedvideo/externvideosource/ExternalVideoInputManager.java
+++ b/Android/APIExample/lib-switch-external-video/src/main/java/io/agora/advancedvideo/externvideosource/ExternalVideoInputManager.java
@@ -6,10 +6,13 @@
import android.opengl.EGLSurface;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
+import android.os.Build;
import android.util.Log;
import android.util.Size;
import android.view.Surface;
+import androidx.annotation.RequiresApi;
+
import io.agora.advancedvideo.externvideosource.localvideo.LocalVideoInput;
import io.agora.advancedvideo.externvideosource.screenshare.ScreenShareInput;
import io.agora.api.component.gles.ProgramTextureOES;
@@ -19,21 +22,25 @@
import io.agora.rtc.mediaio.IVideoSource;
import io.agora.rtc.mediaio.MediaIO;
+import static android.media.MediaRecorder.VideoSource.CAMERA;
import static io.agora.api.component.Constant.ENGINE;
import static io.agora.api.component.Constant.TEXTUREVIEW;
+import static io.agora.rtc.mediaio.MediaIO.BufferType.TEXTURE;
+import static io.agora.rtc.mediaio.MediaIO.PixelFormat.TEXTURE_OES;
-/**{@link IVideoSource}
+/**
+ * {@link IVideoSource}
* The IVideoSource interface defines a set of protocols to implement the custom video source and
- * pass it to the underlying media engine to replace the default video source.
+ * pass it to the underlying media engine to replace the default video source.
* By default, when enabling real-time communications, the Agora SDK enables the default video input
- * device (built-in camera) to start video streaming. The IVideoSource interface defines a set of
- * protocols to create customized video source objects and pass them to the media engine to replace
- * the default camera source so that you can take ownership of the video source and manipulate it.
+ * device (built-in camera) to start video streaming. The IVideoSource interface defines a set of
+ * protocols to create customized video source objects and pass them to the media engine to replace
+ * the default camera source so that you can take ownership of the video source and manipulate it.
* Once you implement this interface, the Agora Media Engine automatically releases its ownership of
- * the current video input device and pass it on to you, so that you can use the same video input
- * device to capture the video stream.*/
-public class ExternalVideoInputManager implements IVideoSource
-{
+ * the current video input device and pass it on to you, so that you can use the same video input
+ * device to capture the video stream.
+ */
+public class ExternalVideoInputManager implements IVideoSource {
private static final String TAG = ExternalVideoInputManager.class.getSimpleName();
public static final int TYPE_LOCAL_VIDEO = 1;
@@ -61,34 +68,28 @@ public class ExternalVideoInputManager implements IVideoSource
private Context context;
- public ExternalVideoInputManager(Context context)
- {
+ public ExternalVideoInputManager(Context context) {
this.context = context;
}
- void start()
- {
+ void start() {
mThread = new ExternalVideoInputThread();
mThread.start();
}
- boolean setExternalVideoInput(int type, Intent intent)
- {
+ boolean setExternalVideoInput(int type, Intent intent) {
// Do not reset current input if the target type is
// the same as the current which is still running.
if (mCurInputType == type && mCurVideoInput != null
- && mCurVideoInput.isRunning())
- {
+ && mCurVideoInput.isRunning()) {
return false;
}
IExternalVideoInput input;
- switch (type)
- {
+ switch (type) {
case TYPE_LOCAL_VIDEO:
input = new LocalVideoInput(intent.getStringExtra(FLAG_VIDEO_PATH));
- if (TEXTUREVIEW != null)
- {
+ if (TEXTUREVIEW != null) {
TEXTUREVIEW.setSurfaceTextureListener((LocalVideoInput) input);
}
break;
@@ -109,76 +110,87 @@ boolean setExternalVideoInput(int type, Intent intent)
return true;
}
- private void setExternalVideoInput(IExternalVideoInput source)
- {
+ private void setExternalVideoInput(IExternalVideoInput source) {
if (mThread != null && mThread.isAlive()) {
mThread.pauseThread();
}
mNewVideoInput = source;
}
- void stop()
- {
+ void stop() {
mThread.setThreadStopped();
}
- /**This callback initializes the video source. You can enable the camera or initialize the video
- * source and then pass one of the following return values to inform the media engine whether
- * the video source is ready.
- * @param consumer The IVideoFrameConsumer object which the media engine passes back. You need
- * to reserve this object and pass the video frame to the media engine through
- * this object once the video source is initialized. See the following contents
- * for the definition of IVideoFrameConsumer.
- * @return
- * true: The external video source is initialized.
- * false: The external video source is not ready or fails to initialize, the media engine stops
- * and reports the error.
- * PS:
- * When initializing the video source, you need to specify a buffer type in the getBufferType
- * method and pass the video source in the specified type to the media engine.*/
+ /**
+ * This callback initializes the video source. You can enable the camera or initialize the video
+ * source and then pass one of the following return values to inform the media engine whether
+ * the video source is ready.
+ *
+ * @param consumer The IVideoFrameConsumer object which the media engine passes back. You need
+ * to reserve this object and pass the video frame to the media engine through
+ * this object once the video source is initialized. See the following contents
+ * for the definition of IVideoFrameConsumer.
+ * @return true: The external video source is initialized.
+ * false: The external video source is not ready or fails to initialize, the media engine stops
+ * and reports the error.
+ * PS:
+ * When initializing the video source, you need to specify a buffer type in the getBufferType
+ * method and pass the video source in the specified type to the media engine.
+ */
@Override
- public boolean onInitialize(IVideoFrameConsumer consumer)
- {
+ public boolean onInitialize(IVideoFrameConsumer consumer) {
mConsumer = consumer;
return true;
}
- /**The SDK triggers this callback when the underlying media engine is ready to start video streaming.
- * You should start the video source to capture the video frame. Once the frame is ready, use
- * IVideoFrameConsumer to consume the video frame.
- * @return
- * true: The external video source is enabled and the SDK calls IVideoFrameConsumer to receive
- * video frames.
- * false: The external video source is not ready or fails to enable, the media engine stops and
- * reports the error.*/
+ /**
+ * The SDK triggers this callback when the underlying media engine is ready to start video streaming.
+ * You should start the video source to capture the video frame. Once the frame is ready, use
+ * IVideoFrameConsumer to consume the video frame.
+ *
+ * @return true: The external video source is enabled and the SDK calls IVideoFrameConsumer to receive
+ * video frames.
+ * false: The external video source is not ready or fails to enable, the media engine stops and
+ * reports the error.
+ */
@Override
- public boolean onStart()
- {
+ public boolean onStart() {
return true;
}
- /**The SDK triggers this callback when the media engine stops streaming. You should then stop
- * capturing and consuming the video frame. After calling this method, the video frames are
- * discarded by the media engine.*/
+ /**
+ * The SDK triggers this callback when the media engine stops streaming. You should then stop
+ * capturing and consuming the video frame. After calling this method, the video frames are
+ * discarded by the media engine.
+ */
@Override
- public void onStop()
- {
+ public void onStop() {
}
- /**The SDK triggers this callback when IVideoFrameConsumer is released by the media engine. You
- * can now release the video source as well as IVideoFrameConsumer.*/
+ /**
+ * The SDK triggers this callback when IVideoFrameConsumer is released by the media engine. You
+ * can now release the video source as well as IVideoFrameConsumer.
+ */
@Override
- public void onDispose()
- {
+ public void onDispose() {
Log.e(TAG, "SwitchExternalVideo-onDispose");
mConsumer = null;
}
@Override
- public int getBufferType()
- {
- return MediaIO.BufferType.TEXTURE.intValue();
+ public int getBufferType() {
+ return TEXTURE.intValue();
+ }
+
+ @Override
+ public int getCaptureType() {
+ return MediaIO.CaptureType.SCREEN.intValue();
+ }
+
+ @Override
+ public int getContentHint() {
+ return MediaIO.ContentHint.NONE.intValue();
}
private class ExternalVideoInputThread extends Thread
@@ -198,8 +210,7 @@ private class ExternalVideoInputThread extends Thread
private volatile boolean mStopped;
private volatile boolean mPaused;
- private void prepare()
- {
+ private void prepare() {
mEglCore = new EglCore();
mEglSurface = mEglCore.createOffscreenSurface(1, 1);
mEglCore.makeCurrent(mEglSurface);
@@ -215,10 +226,10 @@ private void prepare()
ENGINE.setVideoSource(ExternalVideoInputManager.this);
}
- private void release()
- {
- if(ENGINE == null)
- {return;}
+ private void release() {
+ if (ENGINE == null) {
+ return;
+ }
/**release external video source*/
ENGINE.setVideoSource(null);
mSurface.release();
@@ -230,35 +241,30 @@ private void release()
mEglCore.release();
}
+ @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
@Override
- public void run()
- {
+ public void run() {
prepare();
- while (!mStopped)
- {
- if (mCurVideoInput != mNewVideoInput)
- {
+ while (!mStopped) {
+ if (mCurVideoInput != mNewVideoInput) {
Log.i(TAG, "New video input selected");
// Current video input is running, but we now
// introducing a new video type.
// The new video input type may be null, referring
// that we are not using any video.
- if (mCurVideoInput != null)
- {
+ if (mCurVideoInput != null) {
mCurVideoInput.onVideoStopped(mThreadContext);
Log.i(TAG, "recycle stopped input");
}
mCurVideoInput = mNewVideoInput;
- if (mCurVideoInput != null)
- {
+ if (mCurVideoInput != null) {
mCurVideoInput.onVideoInitialized(mSurface);
Log.i(TAG, "initialize new input");
}
- if (mCurVideoInput == null)
- {
+ if (mCurVideoInput == null) {
continue;
}
@@ -267,15 +273,12 @@ public void run()
mVideoHeight = size.getHeight();
mSurfaceTexture.setDefaultBufferSize(mVideoWidth, mVideoHeight);
- if (mPaused)
- {
+ if (mPaused) {
// If current thread is in pause state, it must be paused
// because of switching external video sources.
mPaused = false;
}
- }
- else if (mCurVideoInput != null && !mCurVideoInput.isRunning())
- {
+ } else if (mCurVideoInput != null && !mCurVideoInput.isRunning()) {
// Current video source has been stopped by other
// mechanisms (video playing has completed, etc).
// A callback method is invoked to do some collect
@@ -289,32 +292,28 @@ else if (mCurVideoInput != null && !mCurVideoInput.isRunning())
mNewVideoInput = null;
}
- if (mPaused || mCurVideoInput == null)
- {
+ if (mPaused || mCurVideoInput == null) {
waitForTime(DEFAULT_WAIT_TIME);
continue;
}
- try
- {
+ try {
mSurfaceTexture.updateTexImage();
mSurfaceTexture.getTransformMatrix(mTransform);
}
- catch (Exception e)
- {
+ catch (Exception e) {
e.printStackTrace();
}
- if (mCurVideoInput != null)
- {
+ if (mCurVideoInput != null) {
mCurVideoInput.onFrameAvailable(mThreadContext, mTextureId, mTransform);
}
mEglCore.makeCurrent(mEglSurface);
GLES20.glViewport(0, 0, mVideoWidth, mVideoHeight);
- if (mConsumer != null)
- {
+ if (mConsumer != null) {
+ Log.e(TAG, "publish stream with ->width:" + mVideoWidth + ",height:" + mVideoHeight);
/**Receives the video frame in texture,and push it out
* @param textureId ID of the texture
* @param format Pixel format of the video frame
@@ -324,7 +323,7 @@ else if (mCurVideoInput != null && !mCurVideoInput.isRunning())
* @param timestamp Timestamp of the video frame. For each video frame, you need to set a timestamp
* @param matrix Matrix of the texture. The float value is between 0 and 1, such as 0.1, 0.2, and so on*/
mConsumer.consumeTextureFrame(mTextureId,
- MediaIO.PixelFormat.TEXTURE_OES.intValue(),
+ TEXTURE_OES.intValue(),
mVideoWidth, mVideoHeight, 0,
System.currentTimeMillis(), mTransform);
}
@@ -335,8 +334,7 @@ else if (mCurVideoInput != null && !mCurVideoInput.isRunning())
waitForNextFrame();
}
- if (mCurVideoInput != null)
- {
+ if (mCurVideoInput != null) {
// The manager will cause the current
// video source to be stopped.
mCurVideoInput.onVideoStopped(mThreadContext);
@@ -344,32 +342,26 @@ else if (mCurVideoInput != null && !mCurVideoInput.isRunning())
release();
}
- void pauseThread()
- {
+ void pauseThread() {
mPaused = true;
}
- void setThreadStopped()
- {
+ void setThreadStopped() {
mStopped = true;
}
- private void waitForNextFrame()
- {
+ private void waitForNextFrame() {
int wait = mCurVideoInput != null
? mCurVideoInput.timeToWait()
: DEFAULT_WAIT_TIME;
waitForTime(wait);
}
- private void waitForTime(int time)
- {
- try
- {
+ private void waitForTime(int time) {
+ try {
Thread.sleep(time);
}
- catch (InterruptedException e)
- {
+ catch (InterruptedException e) {
e.printStackTrace();
}
}
diff --git a/Android/APIExample/lib-switch-external-video/src/main/java/io/agora/advancedvideo/externvideosource/IExternalVideoInput.java b/Android/APIExample/lib-switch-external-video/src/main/java/io/agora/advancedvideo/externvideosource/IExternalVideoInput.java
index de95de279..572bbdfa7 100644
--- a/Android/APIExample/lib-switch-external-video/src/main/java/io/agora/advancedvideo/externvideosource/IExternalVideoInput.java
+++ b/Android/APIExample/lib-switch-external-video/src/main/java/io/agora/advancedvideo/externvideosource/IExternalVideoInput.java
@@ -7,6 +7,7 @@ public interface IExternalVideoInput {
/**
* Called when the external video manager is
* initializing this video input
+ *
* @param target The drawing target of the video input
*/
void onVideoInitialized(Surface target);
@@ -14,22 +15,25 @@ public interface IExternalVideoInput {
/**
* Called when the external video manager wants
* to stop this video input
+ *
* @param context The context of the GL thread
*/
void onVideoStopped(GLThreadContext context);
boolean isRunning();
+
/**
* Called when a complete video frame data is prepared to be
* processed. This is usually used to draw local preview,
* as well as other frame processing procedure before
* being transmitted to remote users.
- * @param context The context of the GL thread
+ *
+ * @param context The context of the GL thread
* @param textureId texture id
* @param transform the transformation matrix of the texture
*/
- void onFrameAvailable(GLThreadContext context, int textureId, float[] transform);
+ void onFrameAvailable(GLThreadContext context, int textureId, float[] transform);
/**
* @return the size of the frames
@@ -39,6 +43,7 @@ public interface IExternalVideoInput {
/**
* Determines the time to wait for the next possible frame due
* to the presentation time of frames of different video types.
+ *
* @return time to wait
*/
int timeToWait();
diff --git a/Android/APIExample/lib-switch-external-video/src/main/java/io/agora/advancedvideo/externvideosource/screenshare/ScreenShareInput.java b/Android/APIExample/lib-switch-external-video/src/main/java/io/agora/advancedvideo/externvideosource/screenshare/ScreenShareInput.java
index 36c270cb9..8de2d6032 100644
--- a/Android/APIExample/lib-switch-external-video/src/main/java/io/agora/advancedvideo/externvideosource/screenshare/ScreenShareInput.java
+++ b/Android/APIExample/lib-switch-external-video/src/main/java/io/agora/advancedvideo/externvideosource/screenshare/ScreenShareInput.java
@@ -3,17 +3,29 @@
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
+import android.graphics.Bitmap;
import android.hardware.display.DisplayManager;
import android.hardware.display.VirtualDisplay;
+import android.media.Image;
+import android.media.ImageReader;
import android.media.projection.MediaProjection;
import android.media.projection.MediaProjectionManager;
+import android.net.Uri;
import android.os.Build;
+import android.os.Environment;
+import android.os.Looper;
import android.util.Log;
import android.util.Size;
import android.view.Surface;
+import android.widget.Toast;
import androidx.annotation.RequiresApi;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
import io.agora.advancedvideo.externvideosource.GLThreadContext;
import io.agora.advancedvideo.externvideosource.IExternalVideoInput;
@@ -52,12 +64,108 @@ public void onVideoInitialized(Surface target) {
return;
}
+
+
+
+// mWidth = mSurfaceWidth;
+// mHeight = mSurfaceHeight;
+// mImageReader = ImageReader.newInstance(mSurfaceWidth, mSurfaceHeight, 0x01, 2);
+// mVirtualDisplay = mMediaProjection.createVirtualDisplay(
+// VIRTUAL_DISPLAY_NAME, mSurfaceWidth, mSurfaceHeight, mScreenDpi,
+// DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC, mImageReader.getSurface(),
+// null, null);
+// Looper.prepare();
+// mImageReader.setOnImageAvailableListener(new ImageAvailableListener(), null);
+// Looper.loop();
+
+
+
+
mVirtualDisplay = mMediaProjection.createVirtualDisplay(
VIRTUAL_DISPLAY_NAME, mSurfaceWidth, mSurfaceHeight, mScreenDpi,
DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC, target,
null, null);
}
+
+ private ImageReader mImageReader;
+ private static int IMAGES_PRODUCED;
+ private static final String SCREENCAP_NAME = "screencap";
+ private int mWidth;
+ private int mHeight;
+ private class ImageAvailableListener implements ImageReader.OnImageAvailableListener {
+ @Override
+ public void onImageAvailable(ImageReader reader) {
+ try (Image image = reader.acquireLatestImage()) {
+ if (image != null) {
+ String name = String.valueOf(System.currentTimeMillis());
+ IMAGES_PRODUCED++;
+ Log.e("captured image: ", String.valueOf(IMAGES_PRODUCED));
+
+ if (IMAGES_PRODUCED % 10 == 0) {
+ saveJpeg(image, name);
+ }
+ image.close();
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ }
+ private void saveJpeg(Image image, String name) {
+ Image.Plane[] planes = image.getPlanes();
+ ByteBuffer buffer = planes[0].getBuffer();
+ int pixelStride = planes[0].getPixelStride();
+ int rowStride = planes[0].getRowStride();
+ int rowPadding = rowStride - pixelStride * mWidth;
+
+ Bitmap bitmap = Bitmap.createBitmap(mWidth + rowPadding / pixelStride, mHeight, Bitmap.Config.ARGB_8888);
+ bitmap.copyPixelsFromBuffer(buffer);
+ //bitmap.compress(Bitmap.CompressFormat.JPEG, 100, fos);
+ saveBitmap2file(bitmap, mContext.getApplicationContext(), name);
+ }
+ private static final String SD_PATH = Environment.getExternalStorageDirectory().getPath() + "/MediaProjection/";
+ private static void saveBitmap2file(Bitmap bmp, Context context, String num) {
+ String savePath;
+ String fileName = num + ".JPEG";
+ if (Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED)) {
+ savePath = SD_PATH;
+ } else {
+ Toast.makeText(context, "淇濆瓨澶辫触锛", Toast.LENGTH_SHORT).show();
+ return;
+ }
+ File filePic = new File(savePath + fileName);
+ try {
+ if (!filePic.exists()) {
+ filePic.getParentFile().mkdirs();
+ filePic.createNewFile();
+ }
+ FileOutputStream fos = new FileOutputStream(filePic);
+ bmp.compress(Bitmap.CompressFormat.JPEG, 100, fos);
+ fos.flush();
+ fos.close();
+ Toast.makeText(context, "淇濆瓨鎴愬姛,浣嶇疆:" + filePic.getAbsolutePath(), Toast.LENGTH_SHORT).show();
+ }
+ catch (IOException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
+ // 鍏舵鎶婃枃浠舵彃鍏ュ埌绯荤粺鍥惧簱
+// try {
+// MediaStore.Images.Media.insertImage(context.getContentResolver(), filePic.getAbsolutePath(), fileName, null);
+// } catch (FileNotFoundException e) {
+// e.printStackTrace();
+// }
+ // 鏈鍚庨氱煡鍥惧簱鏇存柊
+ context.sendBroadcast(new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE, Uri.parse("file://" + savePath + fileName)));
+
+ }
+
+
+
+
+
@RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
@Override
public void onVideoStopped(GLThreadContext context) {
diff --git a/Android/APIExample/settings.gradle b/Android/APIExample/settings.gradle
index 1e6db9b19..28c7d0400 100644
--- a/Android/APIExample/settings.gradle
+++ b/Android/APIExample/settings.gradle
@@ -3,3 +3,4 @@ include ':app', ':lib-raw-data', ':lib-switch-external-video'
include ':lib-stream-encrypt'
include ':lib-component'
include ':lib-push-externalvideo'
+include ':lib-screensharing'
\ No newline at end of file
diff --git a/azure-pipelines.yml b/azure-pipelines.yml
index 740956f82..cef45f82e 100644
--- a/azure-pipelines.yml
+++ b/azure-pipelines.yml
@@ -12,13 +12,22 @@ pool:
vmImage: 'macos-latest'
jobs:
-- template: cicd/build-template/build-ios.yml
+- template: ./iOS/cicd/build-template/build-ios.yml
parameters:
displayName: 'APIExampleiOS'
workingDirectory: 'iOS'
project: 'APIExample'
scheme: 'APIExample'
+- template: ./macOS/cicd/build-template/build-mac.yml
+ parameters:
+ displayName: 'APIExampleMacOS'
+ workingDirectory: 'macOS'
+ project: 'APIExample'
+ scheme: 'APIExample'
+ bundleid: 'io.agora.api.example.APIExample'
+ username: 'qianze.zhang@hotmail.com'
+ ascprovider: 'GM72UGLGZW'
- template: ./Android/build-template/build-android.yml
parameters:
diff --git a/iOS/.gitignore b/iOS/.gitignore
index 579929264..e9520cadf 100644
--- a/iOS/.gitignore
+++ b/iOS/.gitignore
@@ -3,7 +3,6 @@
*.DS_Store
*.xcscmblueprint
*.framework
-*.a
*.xcworkspacedata
xcshareddata
diff --git a/iOS/APIExample.xcodeproj/project.pbxproj b/iOS/APIExample.xcodeproj/project.pbxproj
index 003343193..7a813d59d 100644
--- a/iOS/APIExample.xcodeproj/project.pbxproj
+++ b/iOS/APIExample.xcodeproj/project.pbxproj
@@ -8,68 +8,246 @@
/* Begin PBXBuildFile section */
0318857924CD667A00C699EB /* SettingsViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0318857824CD667A00C699EB /* SettingsViewController.swift */; };
+ 0339BE64251DCA3B007D4FDD /* GlobalSettings.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0339BE63251DCA3B007D4FDD /* GlobalSettings.swift */; };
+ 0339BE6D251DEAFC007D4FDD /* PrecallTest.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0339BE6B251DEAFC007D4FDD /* PrecallTest.swift */; };
+ 0339BE72251EF075007D4FDD /* MediaPlayer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0339BE70251EF074007D4FDD /* MediaPlayer.swift */; };
+ 0339BE84251EF728007D4FDD /* AgoraRtcChannelPublishHelper.mm in Sources */ = {isa = PBXBuildFile; fileRef = 0339BE75251EF728007D4FDD /* AgoraRtcChannelPublishHelper.mm */; };
+ 0339BE85251EF728007D4FDD /* AgoraMediaPlayerEx.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 0339BE76251EF728007D4FDD /* AgoraMediaPlayerEx.cpp */; };
+ 0339BE86251EF728007D4FDD /* AudioCircularBuffer.cc in Sources */ = {isa = PBXBuildFile; fileRef = 0339BE7C251EF728007D4FDD /* AudioCircularBuffer.cc */; };
+ 0339BE89251EF728007D4FDD /* AudioFrameObserver.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 0339BE83251EF728007D4FDD /* AudioFrameObserver.cpp */; };
+ 0339BE9625203293007D4FDD /* ScreenShare.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0339BE9425203293007D4FDD /* ScreenShare.swift */; };
+ 0339BE9D25205B7F007D4FDD /* ReplayKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 0339BE9C25205B7F007D4FDD /* ReplayKit.framework */; };
+ 0339BEA025205B7F007D4FDD /* SampleHandler.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0339BE9F25205B7F007D4FDD /* SampleHandler.swift */; };
+ 0339BEB325205B80007D4FDD /* Agora-ScreenShare-Extension.appex in Embed App Extensions */ = {isa = PBXBuildFile; fileRef = 0339BE9B25205B7F007D4FDD /* Agora-ScreenShare-Extension.appex */; settings = {ATTRIBUTES = (RemoveHeadersOnCopy, ); }; };
+ 0339BEC225205D1A007D4FDD /* libios_resampler.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 0339BEBD25205D1A007D4FDD /* libios_resampler.a */; };
+ 0339BEC325205D1A007D4FDD /* AgoraUploader.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0339BEBE25205D1A007D4FDD /* AgoraUploader.swift */; };
+ 0339BEC425205D1A007D4FDD /* AgoraAudioTube.mm in Sources */ = {isa = PBXBuildFile; fileRef = 0339BEC025205D1A007D4FDD /* AgoraAudioTube.mm */; };
+ 0339BEC525206635007D4FDD /* KeyCenter.swift in Sources */ = {isa = PBXBuildFile; fileRef = 03D13C0024488F1E00B599B3 /* KeyCenter.swift */; };
+ 0339BEC625207EA7007D4FDD /* Accelerate.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 03BCEC5724494F3A00ED7177 /* Accelerate.framework */; };
+ 0339BEC72520A612007D4FDD /* GlobalSettings.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0339BE63251DCA3B007D4FDD /* GlobalSettings.swift */; };
+ 0339BECC25210A93007D4FDD /* SuperResolution.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0339BECA25210A93007D4FDD /* SuperResolution.swift */; };
0339D6D224E91B80008739CD /* QuickSwitchChannelVCItem.xib in Resources */ = {isa = PBXBuildFile; fileRef = 0339D6D124E91B80008739CD /* QuickSwitchChannelVCItem.xib */; };
0339D6D424E91BAA008739CD /* QuickSwitchChannelVCItem.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0339D6D324E91BAA008739CD /* QuickSwitchChannelVCItem.swift */; };
0339D6D624E91CEB008739CD /* QuickSwitchChannel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0339D6D524E91CEB008739CD /* QuickSwitchChannel.swift */; };
- 036C42A924D27AB000A59000 /* CustomVideoSourceMediaIO.swift in Sources */ = {isa = PBXBuildFile; fileRef = 036C42A824D27AB000A59000 /* CustomVideoSourceMediaIO.swift */; };
+ 033A9EE5252D5C6900BC26E1 /* VideoMetadata.swift in Sources */ = {isa = PBXBuildFile; fileRef = 033A9EE4252D5C6900BC26E1 /* VideoMetadata.swift */; };
+ 033A9EEA252D5F5E00BC26E1 /* JoinMultiChannel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 033A9EE9252D5F5E00BC26E1 /* JoinMultiChannel.swift */; };
+ 033A9EFA252D61E200BC26E1 /* CustomAudioRender.swift in Sources */ = {isa = PBXBuildFile; fileRef = 033A9EEE252D61E200BC26E1 /* CustomAudioRender.swift */; };
+ 033A9EFB252D61E200BC26E1 /* CustomVideoSourcePush.swift in Sources */ = {isa = PBXBuildFile; fileRef = 033A9EF0252D61E200BC26E1 /* CustomVideoSourcePush.swift */; };
+ 033A9EFC252D61E200BC26E1 /* CustomVideoRender.swift in Sources */ = {isa = PBXBuildFile; fileRef = 033A9EF2252D61E200BC26E1 /* CustomVideoRender.swift */; };
+ 033A9EFF252D61E200BC26E1 /* CustomVideoSourceMediaIO.swift in Sources */ = {isa = PBXBuildFile; fileRef = 033A9EF7252D61E200BC26E1 /* CustomVideoSourceMediaIO.swift */; };
+ 033A9F00252D61E200BC26E1 /* CustomAudioSource.swift in Sources */ = {isa = PBXBuildFile; fileRef = 033A9EF9252D61E200BC26E1 /* CustomAudioSource.swift */; };
+ 033A9F07252D61FC00BC26E1 /* RawMediaData.swift in Sources */ = {isa = PBXBuildFile; fileRef = 033A9F02252D61FB00BC26E1 /* RawMediaData.swift */; };
+ 033A9F08252D61FC00BC26E1 /* RTMPInjection.swift in Sources */ = {isa = PBXBuildFile; fileRef = 033A9F04252D61FB00BC26E1 /* RTMPInjection.swift */; };
+ 033A9F09252D61FC00BC26E1 /* RTMPStreaming.swift in Sources */ = {isa = PBXBuildFile; fileRef = 033A9F06252D61FB00BC26E1 /* RTMPStreaming.swift */; };
+ 033A9F23252D70E400BC26E1 /* JoinChannelVideo.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 033A9F25252D70E400BC26E1 /* JoinChannelVideo.storyboard */; };
+ 033A9F2A252D737900BC26E1 /* Localizable.strings in Resources */ = {isa = PBXBuildFile; fileRef = 033A9F2C252D737900BC26E1 /* Localizable.strings */; };
+ 033A9F30252D860100BC26E1 /* JoinChannelAudio.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 033A9F32252D860100BC26E1 /* JoinChannelAudio.storyboard */; };
+ 033A9F35252D896100BC26E1 /* RawMediaData.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 033A9F37252D896100BC26E1 /* RawMediaData.storyboard */; };
+ 033A9F3A252D89A600BC26E1 /* RTMPInjection.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 033A9F3C252D89A600BC26E1 /* RTMPInjection.storyboard */; };
+ 033A9F3F252D89BC00BC26E1 /* RTMPStreaming.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 033A9F41252D89BC00BC26E1 /* RTMPStreaming.storyboard */; };
+ 033A9F48252D89D000BC26E1 /* CustomAudioRender.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 033A9F4A252D89D000BC26E1 /* CustomAudioRender.storyboard */; };
+ 033A9F4D252D89DB00BC26E1 /* CustomAudioSource.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 033A9F4F252D89DB00BC26E1 /* CustomAudioSource.storyboard */; };
+ 033A9F52252D89E600BC26E1 /* CustomVideoRender.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 033A9F54252D89E600BC26E1 /* CustomVideoRender.storyboard */; };
+ 033A9F57252D89F000BC26E1 /* CustomVideoSourceMediaIO.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 033A9F59252D89F000BC26E1 /* CustomVideoSourceMediaIO.storyboard */; };
+ 033A9F5C252D89FD00BC26E1 /* CustomVideoSourcePush.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 033A9F5E252D89FD00BC26E1 /* CustomVideoSourcePush.storyboard */; };
+ 033A9F61252D8B0A00BC26E1 /* VideoMetadata.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 033A9F63252D8B0A00BC26E1 /* VideoMetadata.storyboard */; };
+ 033A9F66252D8B2A00BC26E1 /* VoiceChanger.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 033A9F68252D8B2A00BC26E1 /* VoiceChanger.storyboard */; };
+ 033A9F6B252D8B3500BC26E1 /* MediaChannelRelay.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 033A9F6D252D8B3500BC26E1 /* MediaChannelRelay.storyboard */; };
+ 033A9F70252D8B3E00BC26E1 /* SuperResolution.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 033A9F72252D8B3E00BC26E1 /* SuperResolution.storyboard */; };
+ 033A9F75252D8B4800BC26E1 /* ScreenShare.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 033A9F77252D8B4800BC26E1 /* ScreenShare.storyboard */; };
+ 033A9F7A252D8B5000BC26E1 /* MediaPlayer.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 033A9F7C252D8B5000BC26E1 /* MediaPlayer.storyboard */; };
+ 033A9F7F252D8B5900BC26E1 /* AudioMixing.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 033A9F81252D8B5900BC26E1 /* AudioMixing.storyboard */; };
+ 033A9F84252D8B6400BC26E1 /* StreamEncryption.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 033A9F86252D8B6400BC26E1 /* StreamEncryption.storyboard */; };
+ 033A9F89252D8B6C00BC26E1 /* PrecallTest.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 033A9F8B252D8B6C00BC26E1 /* PrecallTest.storyboard */; };
+ 033A9F8E252D8FF300BC26E1 /* JoinMultiChannel.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 033A9F45252D89C800BC26E1 /* JoinMultiChannel.storyboard */; };
+ 034C625E2524A06800296ECF /* VoiceChanger.swift in Sources */ = {isa = PBXBuildFile; fileRef = 034C625D2524A06800296ECF /* VoiceChanger.swift */; };
+ 0364C1FC2551AD6D00C6C0AE /* ARKit.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0364C1F82551AD6D00C6C0AE /* ARKit.swift */; };
+ 0364C1FD2551AD6D00C6C0AE /* ARKit.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 0364C1F92551AD6D00C6C0AE /* ARKit.storyboard */; };
+ 0364C2022551B19800C6C0AE /* ARVideoSource.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0364C2002551B19800C6C0AE /* ARVideoSource.swift */; };
+ 0364C2032551B19800C6C0AE /* ARVideoRenderer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0364C2012551B19800C6C0AE /* ARVideoRenderer.swift */; };
+ 0364C2052551B46100C6C0AE /* AR.scnassets in Resources */ = {isa = PBXBuildFile; fileRef = 0364C2042551B46100C6C0AE /* AR.scnassets */; };
036C42AC24D292A700A59000 /* AgoraCameraSourceMediaIO.swift in Sources */ = {isa = PBXBuildFile; fileRef = 036C42AB24D292A700A59000 /* AgoraCameraSourceMediaIO.swift */; };
- 036C42AE24D2950A00A59000 /* CustomVideoSourcePush.swift in Sources */ = {isa = PBXBuildFile; fileRef = 036C42AD24D2950A00A59000 /* CustomVideoSourcePush.swift */; };
036C42B024D2955D00A59000 /* AgoraCameraSourcePush.swift in Sources */ = {isa = PBXBuildFile; fileRef = 036C42AF24D2955D00A59000 /* AgoraCameraSourcePush.swift */; };
036C42B524D2A3C600A59000 /* AgoraMetalRender.swift in Sources */ = {isa = PBXBuildFile; fileRef = 036C42B324D2A3C600A59000 /* AgoraMetalRender.swift */; };
036C42B624D2A3C600A59000 /* AgoraMetalShader.metal in Sources */ = {isa = PBXBuildFile; fileRef = 036C42B424D2A3C600A59000 /* AgoraMetalShader.metal */; };
- 036C42B824D57F6D00A59000 /* RawMediaData.swift in Sources */ = {isa = PBXBuildFile; fileRef = 036C42B724D57F6D00A59000 /* RawMediaData.swift */; };
036C42BE24D5853200A59000 /* AgoraMediaDataPlugin.mm in Sources */ = {isa = PBXBuildFile; fileRef = 036C42BB24D5853200A59000 /* AgoraMediaDataPlugin.mm */; };
036C42BF24D5853200A59000 /* AgoraMediaRawData.m in Sources */ = {isa = PBXBuildFile; fileRef = 036C42BC24D5853200A59000 /* AgoraMediaRawData.m */; };
- 03824D0D24CA822F00E9C047 /* VoiceChanger.swift in Sources */ = {isa = PBXBuildFile; fileRef = 03824D0C24CA822F00E9C047 /* VoiceChanger.swift */; };
- 03824D0F24CAB61A00E9C047 /* PopMenu.swift in Sources */ = {isa = PBXBuildFile; fileRef = 03824D0E24CAB61A00E9C047 /* PopMenu.swift */; };
+ 036CBA3F2519186300D74FAD /* StreamEncryption.swift in Sources */ = {isa = PBXBuildFile; fileRef = 036CBA3D2519186300D74FAD /* StreamEncryption.swift */; };
+ 036CBA4625198F1A00D74FAD /* AgoraCustomEncryption.mm in Sources */ = {isa = PBXBuildFile; fileRef = 036CBA4425198F1A00D74FAD /* AgoraCustomEncryption.mm */; };
+ 036CBA47251990B400D74FAD /* AgoraCustomEncryption.h in Sources */ = {isa = PBXBuildFile; fileRef = 036CBA4525198F1A00D74FAD /* AgoraCustomEncryption.h */; };
+ 0371D8AE250B4A2C00C0DD61 /* JoinChannelAudio.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0371D8AD250B4A2C00C0DD61 /* JoinChannelAudio.swift */; };
+ 0385767E2521E5A0003C369A /* MediaChannelRelay.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0385767C2521E59F003C369A /* MediaChannelRelay.swift */; };
+ 0385768225224A88003C369A /* JoinChannelVideo.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0385768125224A88003C369A /* JoinChannelVideo.swift */; };
+ 03B12DA8251125A500E55818 /* VideoView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 03B12DA7251125A500E55818 /* VideoView.swift */; };
+ 03B12DAA251125B700E55818 /* VideoView.xib in Resources */ = {isa = PBXBuildFile; fileRef = 03B12DA9251125B700E55818 /* VideoView.xib */; };
+ 03B12DAC251127DC00E55818 /* VideoViewMetal.xib in Resources */ = {isa = PBXBuildFile; fileRef = 03B12DAB251127DC00E55818 /* VideoViewMetal.xib */; };
03BCEC50244938C500ED7177 /* BaseViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 03BCEC4F244938C500ED7177 /* BaseViewController.swift */; };
03BCEC762449EB5000ED7177 /* LogViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 03BCEC752449EB4F00ED7177 /* LogViewController.swift */; };
+ 03BEED08251C35E7005E78F4 /* AudioMixing.swift in Sources */ = {isa = PBXBuildFile; fileRef = 03BEED06251C35E7005E78F4 /* AudioMixing.swift */; };
+ 03BEED0B251C4446005E78F4 /* audiomixing.mp3 in Resources */ = {isa = PBXBuildFile; fileRef = 03BEED0A251C4446005E78F4 /* audiomixing.mp3 */; };
+ 03BEED0D251CAB9C005E78F4 /* audioeffect.mp3 in Resources */ = {isa = PBXBuildFile; fileRef = 03BEED0C251CAB9C005E78F4 /* audioeffect.mp3 */; };
03D13BD02448758900B599B3 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 03D13BCF2448758900B599B3 /* AppDelegate.swift */; };
03D13BD42448758900B599B3 /* ViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 03D13BD32448758900B599B3 /* ViewController.swift */; };
03D13BD72448758900B599B3 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 03D13BD52448758900B599B3 /* Main.storyboard */; };
03D13BD92448758B00B599B3 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 03D13BD82448758B00B599B3 /* Assets.xcassets */; };
03D13BDC2448758B00B599B3 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 03D13BDA2448758B00B599B3 /* LaunchScreen.storyboard */; };
03D13C0124488F1F00B599B3 /* KeyCenter.swift in Sources */ = {isa = PBXBuildFile; fileRef = 03D13C0024488F1E00B599B3 /* KeyCenter.swift */; };
- 03DF1D7824CFBF4800DF7151 /* CustomAudioSource.swift in Sources */ = {isa = PBXBuildFile; fileRef = 03DF1D7724CFBF4800DF7151 /* CustomAudioSource.swift */; };
03DF1D9024CFC29700DF7151 /* AudioWriteToFile.m in Sources */ = {isa = PBXBuildFile; fileRef = 03DF1D8624CFC29700DF7151 /* AudioWriteToFile.m */; };
03DF1D9124CFC29700DF7151 /* UIColor+CSRGB.m in Sources */ = {isa = PBXBuildFile; fileRef = 03DF1D8924CFC29700DF7151 /* UIColor+CSRGB.m */; };
03DF1D9224CFC29700DF7151 /* UIView+CSshortFrame.m in Sources */ = {isa = PBXBuildFile; fileRef = 03DF1D8A24CFC29700DF7151 /* UIView+CSshortFrame.m */; };
03DF1D9324CFC29700DF7151 /* ExternalAudio.mm in Sources */ = {isa = PBXBuildFile; fileRef = 03DF1D8B24CFC29700DF7151 /* ExternalAudio.mm */; };
03DF1D9424CFC29700DF7151 /* AudioController.m in Sources */ = {isa = PBXBuildFile; fileRef = 03DF1D8D24CFC29700DF7151 /* AudioController.m */; };
- 03DF1D9624D06AF000DF7151 /* CustomAudioRender.swift in Sources */ = {isa = PBXBuildFile; fileRef = 03DF1D9524D06AEF00DF7151 /* CustomAudioRender.swift */; };
03F8733224C8696600EDB1A3 /* EntryViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 03F8733124C8696600EDB1A3 /* EntryViewController.swift */; };
+ 03FB5B3625642E7C00F04ED0 /* LiveStreaming.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 03FB5B3225642E7C00F04ED0 /* LiveStreaming.storyboard */; };
+ 03FB5B3725642E7C00F04ED0 /* LiveStreaming.swift in Sources */ = {isa = PBXBuildFile; fileRef = 03FB5B3425642E7C00F04ED0 /* LiveStreaming.swift */; };
+ 576BB8EE259B00E100323D43 /* CreateDataStream.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 576BB8F0259B00E100323D43 /* CreateDataStream.storyboard */; };
+ 576EA57A25ADC4A1000B3D79 /* VideoChat.swift in Sources */ = {isa = PBXBuildFile; fileRef = 576EA57925ADC4A1000B3D79 /* VideoChat.swift */; };
+ 576EA58525AED471000B3D79 /* VideoChat.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 576EA58725AED471000B3D79 /* VideoChat.storyboard */; };
+ 578AA65C259A05B200D7CAD9 /* CreateDataStream.swift in Sources */ = {isa = PBXBuildFile; fileRef = 578AA65B259A05B200D7CAD9 /* CreateDataStream.swift */; };
+ 57B7FC83259C313200407BE1 /* RawAudioData.swift in Sources */ = {isa = PBXBuildFile; fileRef = 57B7FC82259C313200407BE1 /* RawAudioData.swift */; };
+ 57B7FC89259C599100407BE1 /* RawAudioData.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 57B7FC8B259C599100407BE1 /* RawAudioData.storyboard */; };
+ 7F76DCA92571794C00E8B7BC /* SettingsCells.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7F76DCA82571794C00E8B7BC /* SettingsCells.swift */; };
+ 7FDE65A2257E5DCA002AC81F /* UITypeAlias.swift in Sources */ = {isa = PBXBuildFile; fileRef = A7BD765F247CC6920062A6B3 /* UITypeAlias.swift */; };
8407E0942472320800AC5DE8 /* (null) in Sources */ = {isa = PBXBuildFile; };
- A75A56DB24A0603100D0089E /* JoinChannelVideo.swift in Sources */ = {isa = PBXBuildFile; fileRef = A75A56D424A0603000D0089E /* JoinChannelVideo.swift */; };
- A75A56DC24A0603100D0089E /* JoinChannelAudio.swift in Sources */ = {isa = PBXBuildFile; fileRef = A75A56D524A0603000D0089E /* JoinChannelAudio.swift */; };
- A75A56DD24A0603100D0089E /* RTMPStreaming.swift in Sources */ = {isa = PBXBuildFile; fileRef = A75A56D824A0603000D0089E /* RTMPStreaming.swift */; };
- A75A56DE24A0603100D0089E /* VideoMetadata.swift in Sources */ = {isa = PBXBuildFile; fileRef = A75A56D924A0603000D0089E /* VideoMetadata.swift */; };
- A75A56DF24A0603100D0089E /* RTMPInjection.swift in Sources */ = {isa = PBXBuildFile; fileRef = A75A56DA24A0603000D0089E /* RTMPInjection.swift */; };
A7847F922458062900469187 /* StatisticsInfo.swift in Sources */ = {isa = PBXBuildFile; fileRef = A7847F912458062900469187 /* StatisticsInfo.swift */; };
A7847F942458089E00469187 /* AgoraExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A7847F932458089E00469187 /* AgoraExtension.swift */; };
A7BD7660247CC6920062A6B3 /* UITypeAlias.swift in Sources */ = {isa = PBXBuildFile; fileRef = A7BD765F247CC6920062A6B3 /* UITypeAlias.swift */; };
A7CA48C424553CF700507435 /* Popover.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = A7CA48C224553CF600507435 /* Popover.storyboard */; };
- A7CA48C624553D3500507435 /* VideoView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A7CA48C524553D3500507435 /* VideoView.swift */; };
+ CBCDE23FB64E60D6A79F3723 /* Pods_Agora_ScreenShare_Extension.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 09E72C5D1AABD812866E41A6 /* Pods_Agora_ScreenShare_Extension.framework */; };
D4046B5D3DE984062E3F6D92 /* Pods_APIExample.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 07A781F5D5D3783CEC7C8EFA /* Pods_APIExample.framework */; };
/* End PBXBuildFile section */
+/* Begin PBXContainerItemProxy section */
+ 0339BEB125205B80007D4FDD /* PBXContainerItemProxy */ = {
+ isa = PBXContainerItemProxy;
+ containerPortal = 03D13BC42448758900B599B3 /* Project object */;
+ proxyType = 1;
+ remoteGlobalIDString = 0339BE9A25205B7F007D4FDD;
+ remoteInfo = "Agora-ScreenShare-Extension";
+ };
+/* End PBXContainerItemProxy section */
+
+/* Begin PBXCopyFilesBuildPhase section */
+ 0339BEBA25205B80007D4FDD /* Embed App Extensions */ = {
+ isa = PBXCopyFilesBuildPhase;
+ buildActionMask = 2147483647;
+ dstPath = "";
+ dstSubfolderSpec = 13;
+ files = (
+ 0339BEB325205B80007D4FDD /* Agora-ScreenShare-Extension.appex in Embed App Extensions */,
+ );
+ name = "Embed App Extensions";
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+/* End PBXCopyFilesBuildPhase section */
+
/* Begin PBXFileReference section */
0318857824CD667A00C699EB /* SettingsViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SettingsViewController.swift; sourceTree = ""; };
+ 0339BE63251DCA3B007D4FDD /* GlobalSettings.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = GlobalSettings.swift; sourceTree = ""; };
+ 0339BE6B251DEAFC007D4FDD /* PrecallTest.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = PrecallTest.swift; sourceTree = ""; };
+ 0339BE70251EF074007D4FDD /* MediaPlayer.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = MediaPlayer.swift; sourceTree = ""; };
+ 0339BE74251EF728007D4FDD /* AgoraRtcChannelPublishHelper.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AgoraRtcChannelPublishHelper.h; sourceTree = ""; };
+ 0339BE75251EF728007D4FDD /* AgoraRtcChannelPublishHelper.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = AgoraRtcChannelPublishHelper.mm; sourceTree = ""; };
+ 0339BE76251EF728007D4FDD /* AgoraMediaPlayerEx.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = AgoraMediaPlayerEx.cpp; sourceTree = ""; };
+ 0339BE77251EF728007D4FDD /* AgoraMediaPlayerEx.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AgoraMediaPlayerEx.h; sourceTree = ""; };
+ 0339BE79251EF728007D4FDD /* scoped_ptr.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = scoped_ptr.h; sourceTree = ""; };
+ 0339BE7A251EF728007D4FDD /* AudioCircularBuffer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AudioCircularBuffer.h; sourceTree = ""; };
+ 0339BE7B251EF728007D4FDD /* template_util.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = template_util.h; sourceTree = ""; };
+ 0339BE7C251EF728007D4FDD /* AudioCircularBuffer.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = AudioCircularBuffer.cc; sourceTree = ""; };
+ 0339BE82251EF728007D4FDD /* AudioFrameObserver.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AudioFrameObserver.h; sourceTree = ""; };
+ 0339BE83251EF728007D4FDD /* AudioFrameObserver.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = AudioFrameObserver.cpp; sourceTree = ""; };
+ 0339BE9425203293007D4FDD /* ScreenShare.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ScreenShare.swift; sourceTree = ""; };
+ 0339BE9B25205B7F007D4FDD /* Agora-ScreenShare-Extension.appex */ = {isa = PBXFileReference; explicitFileType = "wrapper.app-extension"; includeInIndex = 0; path = "Agora-ScreenShare-Extension.appex"; sourceTree = BUILT_PRODUCTS_DIR; };
+ 0339BE9C25205B7F007D4FDD /* ReplayKit.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = ReplayKit.framework; path = System/Library/Frameworks/ReplayKit.framework; sourceTree = SDKROOT; };
+ 0339BE9F25205B7F007D4FDD /* SampleHandler.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SampleHandler.swift; sourceTree = ""; };
+ 0339BEA125205B7F007D4FDD /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; };
+ 0339BEA825205B7F007D4FDD /* UIKit.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = UIKit.framework; path = System/Library/Frameworks/UIKit.framework; sourceTree = SDKROOT; };
+ 0339BEBB25205D1A007D4FDD /* Agora-ScreenShare-Extension-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "Agora-ScreenShare-Extension-Bridging-Header.h"; sourceTree = ""; };
+ 0339BEBD25205D1A007D4FDD /* libios_resampler.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; path = libios_resampler.a; sourceTree = ""; };
+ 0339BEBE25205D1A007D4FDD /* AgoraUploader.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = AgoraUploader.swift; sourceTree = ""; };
+ 0339BEBF25205D1A007D4FDD /* AgoraAudioTube.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AgoraAudioTube.h; sourceTree = ""; };
+ 0339BEC025205D1A007D4FDD /* AgoraAudioTube.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = AgoraAudioTube.mm; sourceTree = ""; };
+ 0339BEC125205D1A007D4FDD /* external_resampler.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = external_resampler.h; sourceTree = ""; };
+ 0339BECA25210A93007D4FDD /* SuperResolution.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SuperResolution.swift; sourceTree = ""; };
0339D6D124E91B80008739CD /* QuickSwitchChannelVCItem.xib */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.xib; path = QuickSwitchChannelVCItem.xib; sourceTree = ""; };
0339D6D324E91BAA008739CD /* QuickSwitchChannelVCItem.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = QuickSwitchChannelVCItem.swift; sourceTree = ""; };
0339D6D524E91CEB008739CD /* QuickSwitchChannel.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = QuickSwitchChannel.swift; sourceTree = ""; };
- 036C42A824D27AB000A59000 /* CustomVideoSourceMediaIO.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CustomVideoSourceMediaIO.swift; sourceTree = ""; };
+ 033A9EE4252D5C6900BC26E1 /* VideoMetadata.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = VideoMetadata.swift; sourceTree = ""; };
+ 033A9EE9252D5F5E00BC26E1 /* JoinMultiChannel.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = JoinMultiChannel.swift; sourceTree = ""; };
+ 033A9EEE252D61E200BC26E1 /* CustomAudioRender.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CustomAudioRender.swift; sourceTree = ""; };
+ 033A9EF0252D61E200BC26E1 /* CustomVideoSourcePush.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CustomVideoSourcePush.swift; sourceTree = ""; };
+ 033A9EF2252D61E200BC26E1 /* CustomVideoRender.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CustomVideoRender.swift; sourceTree = ""; };
+ 033A9EF7252D61E200BC26E1 /* CustomVideoSourceMediaIO.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CustomVideoSourceMediaIO.swift; sourceTree = ""; };
+ 033A9EF9252D61E200BC26E1 /* CustomAudioSource.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CustomAudioSource.swift; sourceTree = ""; };
+ 033A9F02252D61FB00BC26E1 /* RawMediaData.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = RawMediaData.swift; sourceTree = ""; };
+ 033A9F04252D61FB00BC26E1 /* RTMPInjection.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = RTMPInjection.swift; sourceTree = ""; };
+ 033A9F06252D61FB00BC26E1 /* RTMPStreaming.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = RTMPStreaming.swift; sourceTree = ""; };
+ 033A9F22252D70C400BC26E1 /* zh-Hans */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hans"; path = "zh-Hans.lproj/Main.strings"; sourceTree = ""; };
+ 033A9F24252D70E400BC26E1 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/JoinChannelVideo.storyboard; sourceTree = ""; };
+ 033A9F27252D70E900BC26E1 /* zh-Hans */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hans"; path = "zh-Hans.lproj/JoinChannelVideo.strings"; sourceTree = ""; };
+ 033A9F2B252D737900BC26E1 /* zh-Hans */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hans"; path = "zh-Hans.lproj/Localizable.strings"; sourceTree = ""; };
+ 033A9F31252D860100BC26E1 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/JoinChannelAudio.storyboard; sourceTree = ""; };
+ 033A9F34252D860900BC26E1 /* zh-Hans */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hans"; path = "zh-Hans.lproj/JoinChannelAudio.strings"; sourceTree = ""; };
+ 033A9F36252D896100BC26E1 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/RawMediaData.storyboard; sourceTree = ""; };
+ 033A9F39252D896A00BC26E1 /* zh-Hans */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hans"; path = "zh-Hans.lproj/RawMediaData.strings"; sourceTree = ""; };
+ 033A9F3B252D89A600BC26E1 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/RTMPInjection.storyboard; sourceTree = ""; };
+ 033A9F3E252D89AC00BC26E1 /* zh-Hans */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hans"; path = "zh-Hans.lproj/RTMPInjection.strings"; sourceTree = ""; };
+ 033A9F40252D89BC00BC26E1 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/RTMPStreaming.storyboard; sourceTree = ""; };
+ 033A9F43252D89C200BC26E1 /* zh-Hans */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hans"; path = "zh-Hans.lproj/RTMPStreaming.strings"; sourceTree = ""; };
+ 033A9F44252D89C800BC26E1 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/JoinMultiChannel.storyboard; sourceTree = ""; };
+ 033A9F47252D89CB00BC26E1 /* zh-Hans */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hans"; path = "zh-Hans.lproj/JoinMultiChannel.strings"; sourceTree = ""; };
+ 033A9F49252D89D000BC26E1 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/CustomAudioRender.storyboard; sourceTree = ""; };
+ 033A9F4C252D89D400BC26E1 /* zh-Hans */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hans"; path = "zh-Hans.lproj/CustomAudioRender.strings"; sourceTree = ""; };
+ 033A9F4E252D89DB00BC26E1 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/CustomAudioSource.storyboard; sourceTree = ""; };
+ 033A9F51252D89E000BC26E1 /* zh-Hans */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hans"; path = "zh-Hans.lproj/CustomAudioSource.strings"; sourceTree = ""; };
+ 033A9F53252D89E600BC26E1 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/CustomVideoRender.storyboard; sourceTree = ""; };
+ 033A9F56252D89EA00BC26E1 /* zh-Hans */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hans"; path = "zh-Hans.lproj/CustomVideoRender.strings"; sourceTree = ""; };
+ 033A9F58252D89F000BC26E1 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/CustomVideoSourceMediaIO.storyboard; sourceTree = ""; };
+ 033A9F5B252D89F400BC26E1 /* zh-Hans */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hans"; path = "zh-Hans.lproj/CustomVideoSourceMediaIO.strings"; sourceTree = ""; };
+ 033A9F5D252D89FD00BC26E1 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/CustomVideoSourcePush.storyboard; sourceTree = ""; };
+ 033A9F60252D8A0100BC26E1 /* zh-Hans */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hans"; path = "zh-Hans.lproj/CustomVideoSourcePush.strings"; sourceTree = ""; };
+ 033A9F62252D8B0A00BC26E1 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/VideoMetadata.storyboard; sourceTree = ""; };
+ 033A9F65252D8B0E00BC26E1 /* zh-Hans */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hans"; path = "zh-Hans.lproj/VideoMetadata.strings"; sourceTree = ""; };
+ 033A9F67252D8B2A00BC26E1 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/VoiceChanger.storyboard; sourceTree = ""; };
+ 033A9F6A252D8B2F00BC26E1 /* zh-Hans */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hans"; path = "zh-Hans.lproj/VoiceChanger.strings"; sourceTree = ""; };
+ 033A9F6C252D8B3500BC26E1 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/MediaChannelRelay.storyboard; sourceTree = ""; };
+ 033A9F6F252D8B3900BC26E1 /* zh-Hans */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hans"; path = "zh-Hans.lproj/MediaChannelRelay.strings"; sourceTree = ""; };
+ 033A9F71252D8B3E00BC26E1 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/SuperResolution.storyboard; sourceTree = ""; };
+ 033A9F74252D8B4300BC26E1 /* zh-Hans */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hans"; path = "zh-Hans.lproj/SuperResolution.strings"; sourceTree = ""; };
+ 033A9F76252D8B4800BC26E1 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/ScreenShare.storyboard; sourceTree = ""; };
+ 033A9F79252D8B4B00BC26E1 /* zh-Hans */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hans"; path = "zh-Hans.lproj/ScreenShare.strings"; sourceTree = ""; };
+ 033A9F7B252D8B5000BC26E1 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/MediaPlayer.storyboard; sourceTree = ""; };
+ 033A9F7E252D8B5400BC26E1 /* zh-Hans */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hans"; path = "zh-Hans.lproj/MediaPlayer.strings"; sourceTree = ""; };
+ 033A9F80252D8B5900BC26E1 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/AudioMixing.storyboard; sourceTree = ""; };
+ 033A9F83252D8B5C00BC26E1 /* zh-Hans */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hans"; path = "zh-Hans.lproj/AudioMixing.strings"; sourceTree = ""; };
+ 033A9F85252D8B6400BC26E1 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/StreamEncryption.storyboard; sourceTree = ""; };
+ 033A9F88252D8B6700BC26E1 /* zh-Hans */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hans"; path = "zh-Hans.lproj/StreamEncryption.strings"; sourceTree = ""; };
+ 033A9F8A252D8B6C00BC26E1 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/PrecallTest.storyboard; sourceTree = ""; };
+ 033A9F8D252D8B7000BC26E1 /* zh-Hans */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hans"; path = "zh-Hans.lproj/PrecallTest.strings"; sourceTree = ""; };
+ 03414B502551C98E00AB114D /* zh-Hans */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hans"; path = "zh-Hans.lproj/ARKit.strings"; sourceTree = ""; };
+ 034C625D2524A06800296ECF /* VoiceChanger.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = VoiceChanger.swift; sourceTree = ""; };
+ 0364C1F82551AD6D00C6C0AE /* ARKit.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ARKit.swift; sourceTree = ""; };
+ 0364C1FA2551AD6D00C6C0AE /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/ARKit.storyboard; sourceTree = ""; };
+ 0364C2002551B19800C6C0AE /* ARVideoSource.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ARVideoSource.swift; sourceTree = ""; };
+ 0364C2012551B19800C6C0AE /* ARVideoRenderer.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ARVideoRenderer.swift; sourceTree = ""; };
+ 0364C2042551B46100C6C0AE /* AR.scnassets */ = {isa = PBXFileReference; lastKnownFileType = wrapper.scnassets; path = AR.scnassets; sourceTree = ""; };
036C42AB24D292A700A59000 /* AgoraCameraSourceMediaIO.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = AgoraCameraSourceMediaIO.swift; sourceTree = ""; };
- 036C42AD24D2950A00A59000 /* CustomVideoSourcePush.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CustomVideoSourcePush.swift; sourceTree = ""; };
036C42AF24D2955D00A59000 /* AgoraCameraSourcePush.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = AgoraCameraSourcePush.swift; sourceTree = ""; };
036C42B324D2A3C600A59000 /* AgoraMetalRender.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = AgoraMetalRender.swift; sourceTree = ""; };
036C42B424D2A3C600A59000 /* AgoraMetalShader.metal */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.metal; path = AgoraMetalShader.metal; sourceTree = ""; };
- 036C42B724D57F6D00A59000 /* RawMediaData.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = RawMediaData.swift; sourceTree = ""; };
036C42BA24D5853200A59000 /* AgoraMediaRawData.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AgoraMediaRawData.h; sourceTree = ""; };
036C42BB24D5853200A59000 /* AgoraMediaDataPlugin.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = AgoraMediaDataPlugin.mm; sourceTree = ""; };
036C42BC24D5853200A59000 /* AgoraMediaRawData.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AgoraMediaRawData.m; sourceTree = ""; };
036C42BD24D5853200A59000 /* AgoraMediaDataPlugin.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AgoraMediaDataPlugin.h; sourceTree = ""; };
- 03824D0C24CA822F00E9C047 /* VoiceChanger.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VoiceChanger.swift; sourceTree = ""; };
- 03824D0E24CAB61A00E9C047 /* PopMenu.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PopMenu.swift; sourceTree = ""; };
+ 036CBA3D2519186300D74FAD /* StreamEncryption.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = StreamEncryption.swift; sourceTree = ""; };
+ 036CBA4425198F1A00D74FAD /* AgoraCustomEncryption.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = AgoraCustomEncryption.mm; sourceTree = ""; };
+ 036CBA4525198F1A00D74FAD /* AgoraCustomEncryption.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AgoraCustomEncryption.h; sourceTree = ""; };
+ 0371D8AD250B4A2C00C0DD61 /* JoinChannelAudio.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = JoinChannelAudio.swift; sourceTree = ""; };
+ 0385767C2521E59F003C369A /* MediaChannelRelay.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = MediaChannelRelay.swift; sourceTree = ""; };
+ 0385768125224A88003C369A /* JoinChannelVideo.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = JoinChannelVideo.swift; sourceTree = ""; };
+ 03B12DA7251125A500E55818 /* VideoView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VideoView.swift; sourceTree = ""; };
+ 03B12DA9251125B700E55818 /* VideoView.xib */ = {isa = PBXFileReference; lastKnownFileType = file.xib; path = VideoView.xib; sourceTree = ""; };
+ 03B12DAB251127DC00E55818 /* VideoViewMetal.xib */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.xib; path = VideoViewMetal.xib; sourceTree = ""; };
03BCEC4F244938C500ED7177 /* BaseViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BaseViewController.swift; sourceTree = ""; };
03BCEC5724494F3A00ED7177 /* Accelerate.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Accelerate.framework; path = System/Library/Frameworks/Accelerate.framework; sourceTree = SDKROOT; };
03BCEC5924494F4600ED7177 /* AudioToolbox.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AudioToolbox.framework; path = System/Library/Frameworks/AudioToolbox.framework; sourceTree = SDKROOT; };
@@ -83,6 +261,9 @@
03BCEC6924494F8E00ED7177 /* libc++.tbd */ = {isa = PBXFileReference; lastKnownFileType = "sourcecode.text-based-dylib-definition"; name = "libc++.tbd"; path = "usr/lib/libc++.tbd"; sourceTree = SDKROOT; };
03BCEC6A24494F9700ED7177 /* libresolv.tbd */ = {isa = PBXFileReference; lastKnownFileType = "sourcecode.text-based-dylib-definition"; name = libresolv.tbd; path = usr/lib/libresolv.tbd; sourceTree = SDKROOT; };
03BCEC752449EB4F00ED7177 /* LogViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LogViewController.swift; sourceTree = ""; };
+ 03BEED06251C35E7005E78F4 /* AudioMixing.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = AudioMixing.swift; sourceTree = ""; };
+ 03BEED0A251C4446005E78F4 /* audiomixing.mp3 */ = {isa = PBXFileReference; lastKnownFileType = audio.mp3; path = audiomixing.mp3; sourceTree = ""; };
+ 03BEED0C251CAB9C005E78F4 /* audioeffect.mp3 */ = {isa = PBXFileReference; lastKnownFileType = audio.mp3; path = audioeffect.mp3; sourceTree = ""; };
03D13BCC2448758900B599B3 /* APIExample.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = APIExample.app; sourceTree = BUILT_PRODUCTS_DIR; };
03D13BCF2448758900B599B3 /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; };
03D13BD32448758900B599B3 /* ViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ViewController.swift; sourceTree = ""; };
@@ -92,7 +273,6 @@
03D13BDD2448758B00B599B3 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; };
03D13C0024488F1E00B599B3 /* KeyCenter.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = KeyCenter.swift; sourceTree = ""; };
03DF1D7324CFBBBA00DF7151 /* APIExample-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "APIExample-Bridging-Header.h"; sourceTree = ""; };
- 03DF1D7724CFBF4800DF7151 /* CustomAudioSource.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CustomAudioSource.swift; sourceTree = ""; };
03DF1D8524CFC29700DF7151 /* AudioOptions.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AudioOptions.h; sourceTree = ""; };
03DF1D8624CFC29700DF7151 /* AudioWriteToFile.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AudioWriteToFile.m; sourceTree = ""; };
03DF1D8724CFC29700DF7151 /* ExternalAudio.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = ExternalAudio.h; sourceTree = ""; };
@@ -104,27 +284,58 @@
03DF1D8D24CFC29700DF7151 /* AudioController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AudioController.m; sourceTree = ""; };
03DF1D8E24CFC29700DF7151 /* UIView+CSshortFrame.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "UIView+CSshortFrame.h"; sourceTree = ""; };
03DF1D8F24CFC29700DF7151 /* UIColor+CSRGB.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "UIColor+CSRGB.h"; sourceTree = ""; };
- 03DF1D9524D06AEF00DF7151 /* CustomAudioRender.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CustomAudioRender.swift; sourceTree = ""; };
03F8733124C8696600EDB1A3 /* EntryViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = EntryViewController.swift; sourceTree = ""; };
+ 03FB5B3325642E7C00F04ED0 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LiveStreaming.storyboard; sourceTree = ""; };
+ 03FB5B3425642E7C00F04ED0 /* LiveStreaming.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = LiveStreaming.swift; sourceTree = ""; };
+ 03FB5B3A256435A600F04ED0 /* zh-Hans */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hans"; path = "zh-Hans.lproj/LiveStreaming.strings"; sourceTree = ""; };
07A781F5D5D3783CEC7C8EFA /* Pods_APIExample.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_APIExample.framework; sourceTree = BUILT_PRODUCTS_DIR; };
+ 09E72C5D1AABD812866E41A6 /* Pods_Agora_ScreenShare_Extension.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_Agora_ScreenShare_Extension.framework; sourceTree = BUILT_PRODUCTS_DIR; };
3C49960D6F11D44FA9A62337 /* Pods-APIExample-Mac.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-APIExample-Mac.debug.xcconfig"; path = "Target Support Files/Pods-APIExample-Mac/Pods-APIExample-Mac.debug.xcconfig"; sourceTree = ""; };
3EA7D4B4D7C9540659392B7F /* Pods-APIExample.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-APIExample.debug.xcconfig"; path = "Target Support Files/Pods-APIExample/Pods-APIExample.debug.xcconfig"; sourceTree = ""; };
+ 5708D0B2259C905D00BE0C41 /* Agoraffmpeg.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Agoraffmpeg.framework; path = APIExample/Agoraffmpeg.framework; sourceTree = ""; };
+ 5708D0B3259C905D00BE0C41 /* AgoraCore.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AgoraCore.framework; path = APIExample/AgoraCore.framework; sourceTree = ""; };
+ 5708D0B4259C905D00BE0C41 /* AgoraSuperResolution.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AgoraSuperResolution.framework; path = APIExample/AgoraSuperResolution.framework; sourceTree = ""; };
+ 5708D0B5259C905D00BE0C41 /* AgoraSoundTouch.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AgoraSoundTouch.framework; path = APIExample/AgoraSoundTouch.framework; sourceTree = ""; };
+ 5708D0B6259C905D00BE0C41 /* Agorafdkaac.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Agorafdkaac.framework; path = APIExample/Agorafdkaac.framework; sourceTree = ""; };
+ 5708D0B7259C905D00BE0C41 /* AgoraRtcKit.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AgoraRtcKit.framework; path = APIExample/AgoraRtcKit.framework; sourceTree = ""; };
+ 5708D0B8259C905D00BE0C41 /* AgoraAIDenoise.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AgoraAIDenoise.framework; path = APIExample/AgoraAIDenoise.framework; sourceTree = ""; };
+ 576BB8EF259B00E100323D43 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/CreateDataStream.storyboard; sourceTree = ""; };
+ 576BB8F3259B00E300323D43 /* zh-Hans */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hans"; path = "zh-Hans.lproj/CreateDataStream.strings"; sourceTree = ""; };
+ 576EA57925ADC4A1000B3D79 /* VideoChat.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VideoChat.swift; sourceTree = ""; };
+ 576EA58625AED471000B3D79 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/VideoChat.storyboard; sourceTree = ""; };
+ 576EA59F25AEE8BC000B3D79 /* zh-Hans */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hans"; path = "zh-Hans.lproj/VideoChat.strings"; sourceTree = ""; };
+ 578AA65B259A05B200D7CAD9 /* CreateDataStream.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CreateDataStream.swift; sourceTree = ""; };
+ 57B7FC82259C313200407BE1 /* RawAudioData.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RawAudioData.swift; sourceTree = ""; };
+ 57B7FC8A259C599100407BE1 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/RawAudioData.storyboard; sourceTree = ""; };
+ 57B7FC8E259C599700407BE1 /* zh-Hans */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = "zh-Hans"; path = "zh-Hans.lproj/RawAudioData.strings"; sourceTree = ""; };
+ 7F76DCA82571794C00E8B7BC /* SettingsCells.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SettingsCells.swift; sourceTree = ""; };
+ 7FBE1D502576A904005A8619 /* pvc_jnqd.bundle */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.plug-in"; name = pvc_jnqd.bundle; path = Pods/AgoraRtcEngine_iOS/AgoraRtcKit.framework/AgoraResources/pvc_jnqd.bundle; sourceTree = SOURCE_ROOT; };
+ 7FBE1D512576A904005A8619 /* srmetal.metallib */ = {isa = PBXFileReference; lastKnownFileType = "archive.metal-library"; name = srmetal.metallib; path = Pods/AgoraRtcEngine_iOS/AgoraRtcKit.framework/AgoraResources/srmetal.metallib; sourceTree = SOURCE_ROOT; };
+ 7FBE1D522576A904005A8619 /* pvc_kernels.metallib */ = {isa = PBXFileReference; lastKnownFileType = "archive.metal-library"; name = pvc_kernels.metallib; path = Pods/AgoraRtcEngine_iOS/AgoraRtcKit.framework/AgoraResources/pvc_kernels.metallib; sourceTree = SOURCE_ROOT; };
+ 7FBE1D532576A904005A8619 /* model.bundle */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.plug-in"; name = model.bundle; path = Pods/AgoraRtcEngine_iOS/AgoraRtcKit.framework/AgoraResources/model.bundle; sourceTree = SOURCE_ROOT; };
846AE4340F81DCC00B6F9543 /* Pods-APIExample.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-APIExample.release.xcconfig"; path = "Target Support Files/Pods-APIExample/Pods-APIExample.release.xcconfig"; sourceTree = ""; };
+ 92EACE913B50B28F1588FE03 /* Pods-Agora-ScreenShare-Extension.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Agora-ScreenShare-Extension.release.xcconfig"; path = "Target Support Files/Pods-Agora-ScreenShare-Extension/Pods-Agora-ScreenShare-Extension.release.xcconfig"; sourceTree = ""; };
92FF830485692225436E2D77 /* Pods-APIExample-Mac.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-APIExample-Mac.release.xcconfig"; path = "Target Support Files/Pods-APIExample-Mac/Pods-APIExample-Mac.release.xcconfig"; sourceTree = ""; };
960FD7C836F90E68E6776106 /* Pods_APIExample_Mac.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_APIExample_Mac.framework; sourceTree = BUILT_PRODUCTS_DIR; };
- A75A56D424A0603000D0089E /* JoinChannelVideo.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = JoinChannelVideo.swift; sourceTree = ""; };
- A75A56D524A0603000D0089E /* JoinChannelAudio.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = JoinChannelAudio.swift; sourceTree = ""; };
- A75A56D824A0603000D0089E /* RTMPStreaming.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = RTMPStreaming.swift; sourceTree = ""; };
- A75A56D924A0603000D0089E /* VideoMetadata.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = VideoMetadata.swift; sourceTree = ""; };
- A75A56DA24A0603000D0089E /* RTMPInjection.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = RTMPInjection.swift; sourceTree = ""; };
A7847F912458062900469187 /* StatisticsInfo.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = StatisticsInfo.swift; sourceTree = ""; };
A7847F932458089E00469187 /* AgoraExtension.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = AgoraExtension.swift; sourceTree = ""; };
A7BD765F247CC6920062A6B3 /* UITypeAlias.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = UITypeAlias.swift; sourceTree = ""; };
A7CA48C324553CF600507435 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Popover.storyboard; sourceTree = ""; };
- A7CA48C524553D3500507435 /* VideoView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = VideoView.swift; sourceTree = ""; };
+ FAAC2AEE355D103B9E8527B5 /* Pods-Agora-ScreenShare-Extension.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Agora-ScreenShare-Extension.debug.xcconfig"; path = "Target Support Files/Pods-Agora-ScreenShare-Extension/Pods-Agora-ScreenShare-Extension.debug.xcconfig"; sourceTree = ""; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
+ 0339BE9825205B7F007D4FDD /* Frameworks */ = {
+ isa = PBXFrameworksBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ 0339BE9D25205B7F007D4FDD /* ReplayKit.framework in Frameworks */,
+ 0339BEC625207EA7007D4FDD /* Accelerate.framework in Frameworks */,
+ 0339BEC225205D1A007D4FDD /* libios_resampler.a in Frameworks */,
+ CBCDE23FB64E60D6A79F3723 /* Pods_Agora_ScreenShare_Extension.framework in Frameworks */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
03D13BC92448758900B599B3 /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
@@ -140,10 +351,95 @@
isa = PBXGroup;
children = (
0318857824CD667A00C699EB /* SettingsViewController.swift */,
+ 7F76DCA82571794C00E8B7BC /* SettingsCells.swift */,
);
path = Settings;
sourceTree = "