From 583740e866610a9189d6fe7e56f3cd4195cb383f Mon Sep 17 00:00:00 2001
From: Martin <13046765170@163.com>
Date: Thu, 18 Apr 2024 09:47:38 +0800
Subject: [PATCH] =?UTF-8?q?add[=E5=A3=B0=E6=9C=9B=E5=8D=87=E7=BA=A7-?=
=?UTF-8?q?=E6=8E=A5=E5=85=A5=E7=BE=8E=E9=A2=9C]?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
FaceUnity/build.gradle | 16 +-
.../com/yunbao/faceunity/FaceManager.java | 17 +
.../yunbao/faceunity/utils/FURenderer.java | 4 +-
.../faceunity/utils/FaceUnityConfig.java | 2 +-
Share/build.gradle | 2 +-
app/build.gradle | 4 +-
.../java/com/shayu/phonelive/AppContext.java | 1 +
build.gradle | 8 +-
common/build.gradle | 3 +-
config.gradle | 4 +-
gradle.properties | 6 +-
lib_faceunity/.gitignore | 4 +
lib_faceunity/build.gradle | 79 +
lib_faceunity/consumer-rules.pro | 0
lib_faceunity/proguard-rules.pro | 21 +
lib_faceunity/src/main/AndroidManifest.xml | 5 +
.../beautyapi/faceunity/FaceUnityBeautyAPI.kt | 179 +++
.../faceunity/FaceUnityBeautyAPIImpl.kt | 818 ++++++++++
.../faceunity/agora}/SWAuManager.java | 3 +-
.../beautyapi/faceunity/agora}/SWManager.java | 88 +-
.../faceunity/utils/FuDeviceUtils.java | 607 ++++++++
.../beautyapi/faceunity/utils/LogUtils.kt | 57 +
.../beautyapi/faceunity/utils/StatsHelper.kt | 80 +
.../faceunity/utils/egl/EGLContextHelper.java | 210 +++
.../faceunity/utils/egl/GLCopyHelper.java | 84 +
.../faceunity/utils/egl/GLFrameBuffer.java | 204 +++
.../utils/egl/GLTextureBufferQueue.kt | 180 +++
.../faceunity/utils/egl/GLUtils.java | 279 ++++
.../utils/egl/TextureProcessHelper.kt | 214 +++
live/build.gradle | 2 +-
.../live/activity/LiveRyAnchorActivity.java | 9 +-
.../LiveFaceUnityDialogNewFragment.java | 87 ++
.../presenter/LiveRyLinkMicPkPresenter.java | 228 +--
.../yunbao/live/socket/SocketRyClient.java | 2 +
.../live/views/LiveNewReadyRyViewHolder.java | 54 +-
.../live/views/LivePlayRyViewHolder.java | 89 +-
.../live/views/LivePlaySwViewHolder.java | 1374 -----------------
.../live/views/LivePushRyViewHolder.java | 194 +--
main/build.gradle | 2 +-
pluginsForAnchor/build.gradle | 2 +-
settings.gradle | 1 +
41 files changed, 3303 insertions(+), 1920 deletions(-)
create mode 100644 lib_faceunity/.gitignore
create mode 100644 lib_faceunity/build.gradle
create mode 100644 lib_faceunity/consumer-rules.pro
create mode 100644 lib_faceunity/proguard-rules.pro
create mode 100644 lib_faceunity/src/main/AndroidManifest.xml
create mode 100644 lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/FaceUnityBeautyAPI.kt
create mode 100644 lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/FaceUnityBeautyAPIImpl.kt
rename {common/src/main/java/com/yunbao/common/manager => lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/agora}/SWAuManager.java (99%)
rename {common/src/main/java/com/yunbao/common/manager => lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/agora}/SWManager.java (77%)
create mode 100644 lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/utils/FuDeviceUtils.java
create mode 100644 lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/utils/LogUtils.kt
create mode 100644 lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/utils/StatsHelper.kt
create mode 100644 lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/utils/egl/EGLContextHelper.java
create mode 100644 lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/utils/egl/GLCopyHelper.java
create mode 100644 lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/utils/egl/GLFrameBuffer.java
create mode 100644 lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/utils/egl/GLTextureBufferQueue.kt
create mode 100644 lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/utils/egl/GLUtils.java
create mode 100644 lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/utils/egl/TextureProcessHelper.kt
create mode 100644 live/src/main/java/com/yunbao/live/dialog/LiveFaceUnityDialogNewFragment.java
delete mode 100644 live/src/main/java/com/yunbao/live/views/LivePlaySwViewHolder.java
diff --git a/FaceUnity/build.gradle b/FaceUnity/build.gradle
index 4d0686f41..e69ef51ea 100644
--- a/FaceUnity/build.gradle
+++ b/FaceUnity/build.gradle
@@ -1,7 +1,7 @@
apply plugin: 'com.android.library'
apply plugin: 'img-optimizer'
apply plugin: 'kotlin-android'
-apply plugin: 'kotlin-android-extensions'
+apply plugin: 'kotlin-parcelize'
android {
@@ -58,15 +58,15 @@ repositories {
}
}
dependencies {
- implementation fileTree(dir: 'libs', include: ['*.jar'])
- implementation rootProject.ext.dependencies["appcompat-androidx"]
- implementation rootProject.ext.dependencies["recyclerview-androidx"]
- implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
+ api fileTree(dir: 'libs', include: ['*.jar'])
+ api rootProject.ext.dependencies["appcompat-androidx"]
+ api rootProject.ext.dependencies["recyclerview-androidx"]
+ api "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
//common
- implementation project(path: ':common')
+ api project(path: ':common')
- implementation 'com.faceunity:core:8.3.1'
- implementation 'com.faceunity:model:8.3.1'
+ api 'com.faceunity:core:8.7.0'
+ api 'com.faceunity:model:8.7.0'
//implementation 'com.faceunity:nama:8.3.1' //底层库-标准版
diff --git a/FaceUnity/src/main/java/com/yunbao/faceunity/FaceManager.java b/FaceUnity/src/main/java/com/yunbao/faceunity/FaceManager.java
index 9cbef67ef..0c3c09d6e 100644
--- a/FaceUnity/src/main/java/com/yunbao/faceunity/FaceManager.java
+++ b/FaceUnity/src/main/java/com/yunbao/faceunity/FaceManager.java
@@ -96,11 +96,17 @@ public class FaceManager implements SensorEventListener {
faceUnityView.setIFaceUnityInter(new FaceUnityView.IFaceUnityInter() {
@Override
public void onPause() {
+ if(onMirrorChanged!=null){
+ onMirrorChanged.onChange(false);
+ }
pauseFace = true;
}
@Override
public void onStart() {
+ if(onMirrorChanged!=null){
+ onMirrorChanged.onChange(true);
+ }
pauseFace = false;
}
});
@@ -295,7 +301,18 @@ public class FaceManager implements SensorEventListener {
}
+ OnMirrorChanged onMirrorChanged;
+
+ public void setOnMirrorChanged(OnMirrorChanged onMirrorChanged) {
+ this.onMirrorChanged = onMirrorChanged;
+ }
+
public interface FaceStatusChanged {
void onFaceChanged(int num);
}
+
+
+ public interface OnMirrorChanged{
+ void onChange(boolean falg);
+ }
}
diff --git a/FaceUnity/src/main/java/com/yunbao/faceunity/utils/FURenderer.java b/FaceUnity/src/main/java/com/yunbao/faceunity/utils/FURenderer.java
index 19ff7db23..12fd910f8 100644
--- a/FaceUnity/src/main/java/com/yunbao/faceunity/utils/FURenderer.java
+++ b/FaceUnity/src/main/java/com/yunbao/faceunity/utils/FURenderer.java
@@ -52,10 +52,10 @@ public class FURenderer extends IFURenderer {
/* 特效FURenderKit*/
- private FURenderKit mFURenderKit;
+ public FURenderKit mFURenderKit;
/* AI道具*/
- public static String BUNDLE_AI_FACE = "model" + File.separator + "ai_face_processor_lite.bundle";
+ public static String BUNDLE_AI_FACE = "model" + File.separator + "ai_face_processor.bundle";
public static String BUNDLE_AI_HUMAN = "model" + File.separator + "ai_human_processor.bundle";
/* GL 线程 ID */
diff --git a/FaceUnity/src/main/java/com/yunbao/faceunity/utils/FaceUnityConfig.java b/FaceUnity/src/main/java/com/yunbao/faceunity/utils/FaceUnityConfig.java
index 7ca84f925..b6a2d51de 100644
--- a/FaceUnity/src/main/java/com/yunbao/faceunity/utils/FaceUnityConfig.java
+++ b/FaceUnity/src/main/java/com/yunbao/faceunity/utils/FaceUnityConfig.java
@@ -15,7 +15,7 @@ public class FaceUnityConfig {
/************************** 算法Model ******************************/
// 人脸识别
- public static String BUNDLE_AI_FACE = "model" + File.separator + "ai_face_processor_lite.bundle";
+ public static String BUNDLE_AI_FACE = "model" + File.separator + "ai_face_processor.bundle";
// 手势
public static String BUNDLE_AI_HAND = "model" + File.separator + "ai_hand_processor.bundle";
diff --git a/Share/build.gradle b/Share/build.gradle
index 039fa8b89..e3acfbeca 100644
--- a/Share/build.gradle
+++ b/Share/build.gradle
@@ -1,7 +1,7 @@
apply plugin: 'com.android.library'
apply plugin: 'img-optimizer'
apply plugin: 'kotlin-android'
-apply plugin: 'kotlin-android-extensions'
+apply plugin: 'kotlin-parcelize'
android {
diff --git a/app/build.gradle b/app/build.gradle
index 4a5890d4b..ca93012ac 100644
--- a/app/build.gradle
+++ b/app/build.gradle
@@ -131,7 +131,7 @@ android {
variant.mergeAssetsProvider.configure {
doLast {
delete(fileTree(dir: outputDir, includes: ['model/ai_bgseg_green.bundle',
- 'model/ai_face_processor.bundle',
+ //'model/ai_face_processor.bundle',
//'model/ai_face_processor_lite.bundle',
'model/ai_hairseg.bundle',
'model/ai_hand_processor.bundle',
@@ -158,7 +158,7 @@ android {
]))
println "isPluginModel = " + rootProject.ext.manifestPlaceholders.isPluginModel
if (rootProject.ext.manifestPlaceholders.isPluginModel) {
- delete(fileTree(dir: outputDir, includes: ['model/ai_face_processor_lite.bundle',
+ delete(fileTree(dir: outputDir, includes: ['model/ai_face_processor.bundle',
'graphics/face_beautification.bundle']))
} else {
println "不删除bundle"
diff --git a/app/src/main/java/com/shayu/phonelive/AppContext.java b/app/src/main/java/com/shayu/phonelive/AppContext.java
index d9ccd9be5..737efced5 100644
--- a/app/src/main/java/com/shayu/phonelive/AppContext.java
+++ b/app/src/main/java/com/shayu/phonelive/AppContext.java
@@ -274,6 +274,7 @@ public class AppContext extends CommonAppContext {
});
configSPApp();
+
//初始化美颜SDK
// FaceManager.initFaceUnity(this);
}
diff --git a/build.gradle b/build.gradle
index 7212cbfef..fbf0b6204 100644
--- a/build.gradle
+++ b/build.gradle
@@ -52,4 +52,10 @@ allprojects {
task clean(type: Delete) {
delete rootProject.buildDir
}
-
+ext {
+ IS_PUBLISH_LOCAL=true
+ LIB_VERSION="1.0.6"
+// AGORA_RTC_SDK="io.agora.rtc:agora-special-full:4.1.1.28"
+// AGORA_RTC_SDK= "${rootProject.rootDir.absolutePath}/sdk"
+ AGORA_RTC_SDK="io.agora.rtc:full-sdk:4.2.6"
+}
diff --git a/common/build.gradle b/common/build.gradle
index b69c7b668..def1c55da 100644
--- a/common/build.gradle
+++ b/common/build.gradle
@@ -1,5 +1,6 @@
apply plugin: 'com.android.library'
apply plugin: 'img-optimizer'
+apply plugin: 'kotlin-android'
android {
@@ -226,6 +227,6 @@ dependencies {
//轮播 一屏显示多个
api 'com.github.xiaohaibin:XBanner:androidx_v1.2.6'
//声网SDK
- api 'io.agora.rtc:agora-special-full:4.1.1.28'
+ //api 'io.agora.rtc:agora-special-full:4.2.6.245'
}
diff --git a/config.gradle b/config.gradle
index 72fe88cbe..bead9f425 100644
--- a/config.gradle
+++ b/config.gradle
@@ -9,9 +9,9 @@ ext {
]
manifestPlaceholders = [
//正式、
- serverHost : "https://napi.yaoulive.com",
+ //serverHost : "https://napi.yaoulive.com",
// 测试
-// serverHost : "https://ceshi.yaoulive.com",
+ serverHost : "https://ceshi.yaoulive.com",
//百度语音识别
diff --git a/gradle.properties b/gradle.properties
index 15d390c86..38b11a081 100644
--- a/gradle.properties
+++ b/gradle.properties
@@ -23,8 +23,6 @@ android.enableJetifier=true
systemProp.http.proxyHost=127.0.0.1
systemProp.https.proxyHost=127.0.0.1
-systemProp.https.proxyPort=7890
-systemProp.http.proxyPort=7890
-#systemProp.https.proxyPort=10809
-#systemProp.http.proxyPort=10809
+systemProp.https.proxyPort=10809
+systemProp.http.proxyPort=10809
#android.enableR8.fullMode=true
\ No newline at end of file
diff --git a/lib_faceunity/.gitignore b/lib_faceunity/.gitignore
new file mode 100644
index 000000000..bcc2eb0f5
--- /dev/null
+++ b/lib_faceunity/.gitignore
@@ -0,0 +1,4 @@
+/build
+/src/main/assets/makeup
+/src/main/assets/sticker
+authpack.java
\ No newline at end of file
diff --git a/lib_faceunity/build.gradle b/lib_faceunity/build.gradle
new file mode 100644
index 000000000..cfa97aa94
--- /dev/null
+++ b/lib_faceunity/build.gradle
@@ -0,0 +1,79 @@
+apply plugin: 'com.android.library'
+apply plugin: 'maven-publish'
+apply plugin: 'kotlin-android'
+
+android {
+ compileSdkVersion 31
+ buildToolsVersion "30.0.3"
+
+ defaultConfig {
+ minSdkVersion 21
+ targetSdkVersion 31
+
+ testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
+ consumerProguardFiles "consumer-rules.pro"
+ }
+
+ buildTypes {
+ release {
+ minifyEnabled false
+ proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
+ }
+ }
+ compileOptions {
+ sourceCompatibility JavaVersion.VERSION_1_8
+ targetCompatibility JavaVersion.VERSION_1_8
+ }
+ kotlinOptions {
+ jvmTarget = '1.8'
+ }
+}
+
+dependencies {
+
+ testImplementation 'junit:junit:4.13.2'
+ androidTestImplementation 'androidx.test.ext:junit:1.1.3'
+ androidTestImplementation 'androidx.test.espresso:espresso-core:3.4.0'
+ if (new File("$AGORA_RTC_SDK").exists()) {
+ api fileTree(dir: "${AGORA_RTC_SDK}", include: ['*.jar', '*.aar'])
+ } else {
+ api "$AGORA_RTC_SDK"
+ }
+ api project(path: ':FaceUnity')
+}
+
+// Because the components are created only during the afterEvaluate phase, you must
+// configure your publications using the afterEvaluate() lifecycle method.
+afterEvaluate {
+ publishing {
+ publications {
+ // Creates a Maven publication called "release".
+ release(MavenPublication) {
+ // Applies the component for the release build variant.
+ from components.release
+
+ // You can then customize attributes of the publication as shown below.
+ groupId = 'com.github.AgoraIO-Community.BeautyAPI'
+ artifactId = 'FaceUnity'
+ version = "$LIB_VERSION"
+ }
+ // Creates a Maven publication called “debug”.
+ debug(MavenPublication) {
+ // Applies the component for the debug build variant.
+ from components.debug
+
+ groupId = 'com.github.AgoraIO-Community.BeautyAPI'
+ artifactId = 'FaceUnity'
+ version = "$LIB_VERSION"
+ }
+ }
+ if(IS_PUBLISH_LOCAL){
+ repositories {
+ maven {
+ url = "file://${rootProject.projectDir.path}/maven"
+ println("maven publish to ${url}")
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/lib_faceunity/consumer-rules.pro b/lib_faceunity/consumer-rules.pro
new file mode 100644
index 000000000..e69de29bb
diff --git a/lib_faceunity/proguard-rules.pro b/lib_faceunity/proguard-rules.pro
new file mode 100644
index 000000000..481bb4348
--- /dev/null
+++ b/lib_faceunity/proguard-rules.pro
@@ -0,0 +1,21 @@
+# Add project specific ProGuard rules here.
+# You can control the set of applied configuration files using the
+# proguardFiles setting in build.gradle.
+#
+# For more details, see
+# http://developer.android.com/guide/developing/tools/proguard.html
+
+# If your project uses WebView with JS, uncomment the following
+# and specify the fully qualified class name to the JavaScript interface
+# class:
+#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
+# public *;
+#}
+
+# Uncomment this to preserve the line number information for
+# debugging stack traces.
+#-keepattributes SourceFile,LineNumberTable
+
+# If you keep the line number information, uncomment this to
+# hide the original source file name.
+#-renamesourcefileattribute SourceFile
\ No newline at end of file
diff --git a/lib_faceunity/src/main/AndroidManifest.xml b/lib_faceunity/src/main/AndroidManifest.xml
new file mode 100644
index 000000000..0ec7a4108
--- /dev/null
+++ b/lib_faceunity/src/main/AndroidManifest.xml
@@ -0,0 +1,5 @@
+
+
+
+
\ No newline at end of file
diff --git a/lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/FaceUnityBeautyAPI.kt b/lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/FaceUnityBeautyAPI.kt
new file mode 100644
index 000000000..1058ea229
--- /dev/null
+++ b/lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/FaceUnityBeautyAPI.kt
@@ -0,0 +1,179 @@
+/*
+ * MIT License
+ *
+ * Copyright (c) 2023 Agora Community
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+package io.agora.beautyapi.faceunity
+
+import android.content.Context
+import android.view.View
+import com.faceunity.core.faceunity.FURenderKit
+import io.agora.base.VideoFrame
+import io.agora.rtc2.Constants
+import io.agora.rtc2.RtcEngine
+
+const val VERSION = "1.0.6"
+
+enum class CaptureMode{
+ Agora, // 使用声网内部的祼数据接口进行处理
+ Custom // 自定义模式,需要自己调用onFrame接口将原始视频帧传给BeautyAPI做处理
+}
+
+interface IEventCallback{
+
+ /**
+ * 统计数据回调,每处理完一帧后会回调一次
+ *
+ * @param stats 美颜统计数据
+ */
+ fun onBeautyStats(stats: BeautyStats)
+}
+
+data class BeautyStats(
+ val minCostMs:Long, // 统计区间内的最小值
+ val maxCostMs: Long, // 统计区间内的最大值
+ val averageCostMs: Long // 统计区间内的平均值
+)
+
+enum class MirrorMode {
+
+ // 没有镜像正常画面的定义:前置拍到画面和手机看到画面是左右不一致的,后置拍到画面和手机看到画面是左右一致的
+
+ MIRROR_LOCAL_REMOTE, //本地远端都镜像,前置默认,本地和远端贴纸都正常
+ MIRROR_LOCAL_ONLY, // 仅本地镜像,远端不镜像,,远端贴纸正常,本地贴纸镜像。用于打电话场景,电商直播场景(保证电商直播后面的告示牌文字是正的);这种模式因为本地远端是反的,所以肯定有一边的文字贴纸方向会是反的
+ MIRROR_REMOTE_ONLY, // 仅远端镜像,本地不镜像,远端贴纸正常,本地贴纸镜像
+ MIRROR_NONE // 本地远端都不镜像,后置默认,本地和远端贴纸都正常
+}
+
+data class CameraConfig(
+ val frontMirror: MirrorMode = MirrorMode.MIRROR_LOCAL_REMOTE, // 前置默认镜像:本地远端都镜像
+ val backMirror: MirrorMode = MirrorMode.MIRROR_NONE // 后置默认镜像:本地远端都不镜像
+)
+
+data class Config(
+ val context: Context, // Android Context 上下文
+ val rtcEngine: RtcEngine, // 声网Rtc引擎
+ val fuRenderKit: FURenderKit, // 美颜SDK处理句柄
+ val eventCallback: IEventCallback? = null, // 事件回调
+ val captureMode: CaptureMode = CaptureMode.Agora, // 处理模式
+ val statsDuration: Long = 1000, // 统计区间
+ val statsEnable: Boolean = false, // 是否开启统计
+ val cameraConfig: CameraConfig = CameraConfig() // 摄像头镜像配置
+)
+
+enum class ErrorCode(val value: Int) {
+ ERROR_OK(0), // 一切正常
+ ERROR_HAS_NOT_INITIALIZED(101), // 没有调用Initialize或调用失败情况下调用了其他API
+ ERROR_HAS_INITIALIZED(102), // 已经Initialize成功后再次调用报错
+ ERROR_HAS_RELEASED(103), // 已经调用release销毁后还调用其他API
+ ERROR_PROCESS_NOT_CUSTOM(104), // 非Custom处理模式下调用onFrame接口从外部传入视频帧
+ ERROR_VIEW_TYPE_ERROR(105), // 当调用setupLocalVideo时view类型错误时返回
+ ERROR_FRAME_SKIPPED(106), // 当处理帧忽略时在onFrame返回
+}
+
+enum class BeautyPreset {
+ CUSTOM, // 不使用推荐的美颜参数
+ DEFAULT // 默认的
+}
+
+fun createFaceUnityBeautyAPI(): FaceUnityBeautyAPI = FaceUnityBeautyAPIImpl()
+
+interface FaceUnityBeautyAPI {
+
+ /**
+ * 初始化API
+ *
+ * @param config 配置参数
+ * @return 见ErrorCode
+ */
+ fun initialize(config: Config): Int
+
+ /**
+ * 开启/关闭美颜
+ *
+ * @param enable true:开启; false: 关闭
+ * @return 见ErrorCode
+ */
+ fun enable(enable: Boolean): Int
+
+ /**
+ * 本地视图渲染,由内部来处理镜像问题
+ *
+ * @param view SurfaceView或TextureView
+ * @param renderMode 渲染缩放模式
+ * @return 见ErrorCode
+ */
+ fun setupLocalVideo(view: View, renderMode: Int = Constants.RENDER_MODE_HIDDEN): Int
+
+ /**
+ * 当ProcessMode==Custom时由外部传入原始视频帧
+ *
+ * @param videoFrame 原始视频帧
+ * @return 见ErrorCode
+ */
+ fun onFrame(videoFrame: VideoFrame): Int
+
+ /**
+ * 声网提供的美颜最佳默认参数
+ *
+ * @return 见ErrorCode
+ */
+ fun setBeautyPreset(preset: BeautyPreset = BeautyPreset.DEFAULT): Int
+
+ /**
+ * 更新摄像头配置
+ */
+ fun updateCameraConfig(config: CameraConfig): Int
+
+ /**
+ * 是否是前置摄像头
+ * PS:只在美颜处理中才能知道准确的值,否则会一直是true
+ */
+ fun isFrontCamera(): Boolean
+
+ /**
+ * 获取镜像状态
+ *
+ * @return 镜像状态,true: 镜像,false:非镜像
+ */
+ fun getMirrorApplied(): Boolean
+
+ /**
+ * 在处理线程里执行操作
+ *
+ * @param run 操作run
+ */
+ fun runOnProcessThread(run: ()->Unit)
+
+ /**
+ * 私参配置,用于不对外api的调用,多用于测试
+ */
+ fun setParameters(key: String, value: String)
+
+ /**
+ * 释放资源,一旦释放后这个实例将无法使用
+ *
+ * @return 见ErrorCode
+ */
+ fun release(): Int
+
+}
\ No newline at end of file
diff --git a/lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/FaceUnityBeautyAPIImpl.kt b/lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/FaceUnityBeautyAPIImpl.kt
new file mode 100644
index 000000000..5c17a503d
--- /dev/null
+++ b/lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/FaceUnityBeautyAPIImpl.kt
@@ -0,0 +1,818 @@
+/*
+ * MIT License
+ *
+ * Copyright (c) 2023 Agora Community
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+package io.agora.beautyapi.faceunity
+
+import android.graphics.Matrix
+import android.opengl.GLES11Ext
+import android.opengl.GLES20
+import android.view.SurfaceView
+import android.view.TextureView
+import android.view.View
+import com.faceunity.core.entity.FUBundleData
+import com.faceunity.core.entity.FURenderInputData
+import com.faceunity.core.enumeration.CameraFacingEnum
+import com.faceunity.core.enumeration.FUInputBufferEnum
+import com.faceunity.core.enumeration.FUInputTextureEnum
+import com.faceunity.core.enumeration.FUTransformMatrixEnum
+import com.faceunity.core.faceunity.FUAIKit
+import com.faceunity.core.faceunity.FURenderKit
+import com.faceunity.core.model.facebeauty.FaceBeauty
+import com.faceunity.core.model.facebeauty.FaceBeautyFilterEnum
+import io.agora.base.TextureBufferHelper
+import io.agora.base.VideoFrame
+import io.agora.base.VideoFrame.I420Buffer
+import io.agora.base.VideoFrame.SourceType
+import io.agora.base.VideoFrame.TextureBuffer
+import io.agora.base.internal.video.EglBase
+import io.agora.base.internal.video.YuvHelper
+import io.agora.beautyapi.faceunity.utils.FuDeviceUtils
+import io.agora.beautyapi.faceunity.utils.LogUtils
+import io.agora.beautyapi.faceunity.utils.StatsHelper
+import io.agora.beautyapi.faceunity.utils.egl.GLFrameBuffer
+import io.agora.beautyapi.faceunity.utils.egl.TextureProcessHelper
+import io.agora.rtc2.Constants
+import io.agora.rtc2.gl.EglBaseProvider
+import io.agora.rtc2.video.IVideoFrameObserver
+import io.agora.rtc2.video.VideoCanvas
+import java.io.File
+import java.nio.ByteBuffer
+import java.util.Collections
+import java.util.concurrent.Callable
+
+class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver {
+ private val TAG = "FaceUnityBeautyAPIImpl"
+ private val reportId = "scenarioAPI"
+ private val reportCategory = "beauty_android_$VERSION"
+ private var beautyMode = 0 // 0: 自动根据buffer类型切换,1:固定使用OES纹理,2:固定使用i420,3: 单纹理模式
+ private var enableTextureAsync = true // 是否开启纹理+异步缓存处理,不支持在预览中实时切换。对于GPU性能好的手机可以减小美颜处理耗时,对于中端机开启后效果也不明显。
+
+ private var textureBufferHelper: TextureBufferHelper? = null
+ private var wrapTextureBufferHelper: TextureBufferHelper? = null
+ private var byteBuffer: ByteBuffer? = null
+ private var byteArray: ByteArray? = null
+ private var config: Config? = null
+ private var enable: Boolean = false
+ private var enableChange: Boolean = false
+ private var isReleased: Boolean = false
+ private var captureMirror = false
+ private var renderMirror = false
+ private val identityMatrix = Matrix()
+ private var mTextureProcessHelper: TextureProcessHelper? = null
+ private var statsHelper: StatsHelper? = null
+ private var skipFrame = 0
+ private enum class ProcessSourceType{
+ UNKNOWN,
+ TEXTURE_OES_ASYNC,
+ TEXTURE_2D_ASYNC,
+ TEXTURE_OES,
+ TEXTURE_2D,
+ I420
+ }
+ private var currProcessSourceType = ProcessSourceType.UNKNOWN
+ private var deviceLevel = FuDeviceUtils.DEVICEINFO_UNKNOWN
+ private var isFrontCamera = true
+ private var cameraConfig = CameraConfig()
+ private var localVideoRenderMode = Constants.RENDER_MODE_HIDDEN
+ private val pendingProcessRunList = Collections.synchronizedList(mutableListOf<()->Unit>())
+ private val transformGLFrameBuffer = GLFrameBuffer()
+
+ override fun initialize(config: Config): Int {
+ if (this.config != null) {
+ LogUtils.e(TAG, "initialize >> The beauty api has been initialized!")
+ return ErrorCode.ERROR_HAS_INITIALIZED.value
+ }
+ this.config = config
+ this.cameraConfig = config.cameraConfig
+ if (config.captureMode == CaptureMode.Agora) {
+ config.rtcEngine.registerVideoFrameObserver(this)
+ }
+ statsHelper = StatsHelper(config.statsDuration){
+ this.config?.eventCallback?.onBeautyStats(it)
+ }
+ LogUtils.i(TAG, "initialize >> config = $config")
+ LogUtils.i(TAG, "initialize >> beauty api version=$VERSION, beauty sdk version=${FURenderKit.getInstance().getVersion()}")
+
+ // config face beauty
+ if (deviceLevel == FuDeviceUtils.DEVICEINFO_UNKNOWN) {
+ deviceLevel = FuDeviceUtils.judgeDeviceLevel(config.context)
+ FUAIKit.getInstance().faceProcessorSetFaceLandmarkQuality(deviceLevel)
+ if (deviceLevel > FuDeviceUtils.DEVICE_LEVEL_MID) {
+ FUAIKit.getInstance().fuFaceProcessorSetDetectSmallFace(true)
+ }
+ }
+ LogUtils.i(TAG, "initialize >> FuDeviceUtils deviceLevel=$deviceLevel")
+ config.rtcEngine.sendCustomReportMessage(reportId, reportCategory, "initialize", "config=$config, deviceLevel=$deviceLevel", 0)
+ return ErrorCode.ERROR_OK.value
+ }
+
+ override fun enable(enable: Boolean): Int {
+ LogUtils.i(TAG, "enable >> enable = $enable")
+ if (config == null) {
+ LogUtils.e(TAG, "enable >> The beauty api has not been initialized!")
+ return ErrorCode.ERROR_HAS_NOT_INITIALIZED.value
+ }
+ if (isReleased) {
+ LogUtils.e(TAG, "enable >> The beauty api has been released!")
+ return ErrorCode.ERROR_HAS_RELEASED.value
+ }
+ if(config?.captureMode == CaptureMode.Custom){
+ skipFrame = 2
+ LogUtils.i(TAG, "enable >> skipFrame = $skipFrame")
+ }
+ config?.rtcEngine?.sendCustomReportMessage(reportId, reportCategory, "enable", "enable=$enable", 0)
+
+ if(this.enable != enable){
+ this.enable = enable
+ enableChange = true
+ LogUtils.i(TAG, "enable >> enableChange")
+ }
+ return ErrorCode.ERROR_OK.value
+ }
+
+ override fun setupLocalVideo(view: View, renderMode: Int): Int {
+ val rtcEngine = config?.rtcEngine
+ if(rtcEngine == null){
+ LogUtils.e(TAG, "setupLocalVideo >> The beauty api has not been initialized!")
+ return ErrorCode.ERROR_HAS_NOT_INITIALIZED.value
+ }
+ LogUtils.i(TAG, "setupLocalVideo >> view=$view, renderMode=$renderMode")
+ localVideoRenderMode = renderMode
+ rtcEngine.sendCustomReportMessage(reportId, reportCategory, "enable", "view=$view, renderMode=$renderMode", 0)
+ if (view is TextureView || view is SurfaceView) {
+ val canvas = VideoCanvas(view, renderMode, 0)
+ canvas.mirrorMode = Constants.VIDEO_MIRROR_MODE_DISABLED
+ rtcEngine.setupLocalVideo(canvas)
+ return ErrorCode.ERROR_OK.value
+ }
+ return ErrorCode.ERROR_VIEW_TYPE_ERROR.value
+ }
+
+ override fun onFrame(videoFrame: VideoFrame): Int {
+ val conf = config
+ if(conf == null){
+ LogUtils.e(TAG, "onFrame >> The beauty api has not been initialized!")
+ return ErrorCode.ERROR_HAS_NOT_INITIALIZED.value
+ }
+ if (isReleased) {
+ LogUtils.e(TAG, "onFrame >> The beauty api has been released!")
+ return ErrorCode.ERROR_HAS_RELEASED.value
+ }
+ if (conf.captureMode != CaptureMode.Custom) {
+ LogUtils.e(TAG, "onFrame >> The capture mode is not Custom!")
+ return ErrorCode.ERROR_PROCESS_NOT_CUSTOM.value
+ }
+ if (processBeauty(videoFrame)) {
+ return ErrorCode.ERROR_OK.value
+ }
+ LogUtils.i(TAG, "onFrame >> Skip Frame.")
+ return ErrorCode.ERROR_FRAME_SKIPPED.value
+ }
+
+ override fun updateCameraConfig(config: CameraConfig): Int {
+ LogUtils.i(TAG, "updateCameraConfig >> oldCameraConfig=$cameraConfig, newCameraConfig=$config")
+ cameraConfig = CameraConfig(config.frontMirror, config.backMirror)
+ this.config?.rtcEngine?.sendCustomReportMessage(reportId, reportCategory, "updateCameraConfig", "config=$config", 0)
+
+ return ErrorCode.ERROR_OK.value
+ }
+
+ override fun runOnProcessThread(run: () -> Unit) {
+ if (config == null) {
+ LogUtils.e(TAG, "runOnProcessThread >> The beauty api has not been initialized!")
+ return
+ }
+ if (isReleased) {
+ LogUtils.e(TAG, "runOnProcessThread >> The beauty api has been released!")
+ return
+ }
+ if (textureBufferHelper?.handler?.looper?.thread == Thread.currentThread()) {
+ run.invoke()
+ } else if (textureBufferHelper != null) {
+ textureBufferHelper?.handler?.post(run)
+ } else {
+ pendingProcessRunList.add(run)
+ }
+ }
+
+ override fun isFrontCamera() = isFrontCamera
+
+ override fun setParameters(key: String, value: String) {
+ when(key){
+ "beauty_mode" -> beautyMode = value.toInt()
+ "enableTextureAsync" -> enableTextureAsync = value.toBoolean()
+ }
+ }
+
+ override fun setBeautyPreset(preset: BeautyPreset): Int {
+ val conf = config
+ if(conf == null){
+ LogUtils.e(TAG, "setBeautyPreset >> The beauty api has not been initialized!")
+ return ErrorCode.ERROR_HAS_NOT_INITIALIZED.value
+ }
+ if (isReleased) {
+ LogUtils.e(TAG, "setBeautyPreset >> The beauty api has been released!")
+ return ErrorCode.ERROR_HAS_RELEASED.value
+ }
+
+ LogUtils.i(TAG, "setBeautyPreset >> preset = $preset")
+ config?.rtcEngine?.sendCustomReportMessage(reportId, reportCategory, "enable", "preset=$preset", 0)
+
+ val recommendFaceBeauty = FaceBeauty(FUBundleData("graphics" + File.separator + "face_beautification.bundle"))
+ if (preset == BeautyPreset.DEFAULT) {
+ recommendFaceBeauty.filterName = FaceBeautyFilterEnum.FENNEN_1
+ recommendFaceBeauty.filterIntensity = 0.7
+ // 美牙
+ recommendFaceBeauty.toothIntensity = 0.3
+ // 亮眼
+ recommendFaceBeauty.eyeBrightIntensity = 0.3
+ // 大眼
+ recommendFaceBeauty.eyeEnlargingIntensity = 0.5
+ // 红润
+ recommendFaceBeauty.redIntensity = 0.5 * 2
+ // 美白
+ recommendFaceBeauty.colorIntensity = 0.75 * 2
+ // 磨皮
+ recommendFaceBeauty.blurIntensity = 0.75 * 6
+ if (deviceLevel > FuDeviceUtils.DEVICE_LEVEL_MID) {
+ val score = FUAIKit.getInstance().getFaceProcessorGetConfidenceScore(0)
+ if (score > 0.95) {
+ recommendFaceBeauty.blurType = 3
+ recommendFaceBeauty.enableBlurUseMask = true
+ } else {
+ recommendFaceBeauty.blurType = 2
+ recommendFaceBeauty.enableBlurUseMask = false
+ }
+ } else {
+ recommendFaceBeauty.blurType = 2
+ recommendFaceBeauty.enableBlurUseMask = false
+ }
+ // 嘴型
+ recommendFaceBeauty.mouthIntensity = 0.3
+ // 瘦鼻
+ recommendFaceBeauty.noseIntensity = 0.1
+ // 额头
+ recommendFaceBeauty.forHeadIntensity = 0.3
+ // 下巴
+ recommendFaceBeauty.chinIntensity = 0.0
+ // 瘦脸
+ recommendFaceBeauty.cheekThinningIntensity = 0.3
+ // 窄脸
+ recommendFaceBeauty.cheekNarrowIntensity = 0.0
+ // 小脸
+ recommendFaceBeauty.cheekSmallIntensity = 0.0
+ // v脸
+ recommendFaceBeauty.cheekVIntensity = 0.0
+ }
+ conf.fuRenderKit.faceBeauty = recommendFaceBeauty
+ return ErrorCode.ERROR_OK.value
+ }
+
+ override fun release(): Int {
+ val conf = config
+ val fuRenderer = conf?.fuRenderKit
+ if(fuRenderer == null){
+ LogUtils.e(TAG, "release >> The beauty api has not been initialized!")
+ return ErrorCode.ERROR_HAS_NOT_INITIALIZED.value
+ }
+ if (isReleased) {
+ LogUtils.e(TAG, "setBeautyPreset >> The beauty api has been released!")
+ return ErrorCode.ERROR_HAS_RELEASED.value
+ }
+ LogUtils.i(TAG, "release")
+ if (conf.captureMode == CaptureMode.Agora) {
+ conf.rtcEngine.registerVideoFrameObserver(null)
+ }
+ conf.rtcEngine.sendCustomReportMessage(reportId, reportCategory, "release", "", 0)
+
+ isReleased = true
+ textureBufferHelper?.let {
+ textureBufferHelper = null
+ it.handler.removeCallbacksAndMessages(null)
+ it.invoke {
+ fuRenderer.release()
+ mTextureProcessHelper?.release()
+ mTextureProcessHelper = null
+ transformGLFrameBuffer.release()
+ null
+ }
+ // it.handler.looper.quit()
+ it.dispose()
+ }
+ wrapTextureBufferHelper?.let {
+ wrapTextureBufferHelper = null
+ it.dispose()
+ }
+ statsHelper?.reset()
+ statsHelper = null
+ pendingProcessRunList.clear()
+ return ErrorCode.ERROR_OK.value
+ }
+
+ private fun processBeauty(videoFrame: VideoFrame): Boolean {
+ if (isReleased) {
+ LogUtils.e(TAG, "processBeauty >> The beauty api has been released!")
+ return false
+ }
+
+ val cMirror =
+ if (isFrontCamera) {
+ when (cameraConfig.frontMirror) {
+ MirrorMode.MIRROR_LOCAL_REMOTE -> true
+ MirrorMode.MIRROR_LOCAL_ONLY -> false
+ MirrorMode.MIRROR_REMOTE_ONLY -> true
+ MirrorMode.MIRROR_NONE -> false
+ }
+ } else {
+ when (cameraConfig.backMirror) {
+ MirrorMode.MIRROR_LOCAL_REMOTE -> true
+ MirrorMode.MIRROR_LOCAL_ONLY -> false
+ MirrorMode.MIRROR_REMOTE_ONLY -> true
+ MirrorMode.MIRROR_NONE -> false
+ }
+ }
+ val rMirror =
+ if (isFrontCamera) {
+ when (cameraConfig.frontMirror) {
+ MirrorMode.MIRROR_LOCAL_REMOTE -> false
+ MirrorMode.MIRROR_LOCAL_ONLY -> true
+ MirrorMode.MIRROR_REMOTE_ONLY -> true
+ MirrorMode.MIRROR_NONE -> false
+ }
+ } else {
+ when (cameraConfig.backMirror) {
+ MirrorMode.MIRROR_LOCAL_REMOTE -> false
+ MirrorMode.MIRROR_LOCAL_ONLY -> true
+ MirrorMode.MIRROR_REMOTE_ONLY -> true
+ MirrorMode.MIRROR_NONE -> false
+ }
+ }
+ if (captureMirror != cMirror || renderMirror != rMirror) {
+ LogUtils.w(TAG, "processBeauty >> enable=$enable, captureMirror=$captureMirror->$cMirror, renderMirror=$renderMirror->$rMirror")
+ captureMirror = cMirror
+ if(renderMirror != rMirror){
+ renderMirror = rMirror
+ config?.rtcEngine?.setLocalRenderMode(
+ localVideoRenderMode,
+ if(renderMirror) Constants.VIDEO_MIRROR_MODE_ENABLED else Constants.VIDEO_MIRROR_MODE_DISABLED
+ )
+ }
+ textureBufferHelper?.invoke {
+ mTextureProcessHelper?.reset()
+ }
+ skipFrame = 2
+ return false
+ }
+
+ val oldIsFrontCamera = isFrontCamera
+ isFrontCamera = videoFrame.sourceType == SourceType.kFrontCamera
+ if(oldIsFrontCamera != isFrontCamera){
+ LogUtils.w(TAG, "processBeauty >> oldIsFrontCamera=$oldIsFrontCamera, isFrontCamera=$isFrontCamera")
+ return false
+ }
+
+ if(enableChange){
+ enableChange = false
+ textureBufferHelper?.invoke {
+ mTextureProcessHelper?.reset()
+ }
+ return false
+ }
+
+ if(!enable){
+ return true
+ }
+
+ if (textureBufferHelper == null) {
+ textureBufferHelper = TextureBufferHelper.create(
+ "FURender",
+ EglBaseProvider.instance().rootEglBase.eglBaseContext
+ )
+ textureBufferHelper?.invoke {
+ synchronized(pendingProcessRunList){
+ val iterator = pendingProcessRunList.iterator()
+ while (iterator.hasNext()){
+ iterator.next().invoke()
+ iterator.remove()
+ }
+ }
+ }
+ LogUtils.i(TAG, "processBeauty >> create texture buffer, beautyMode=$beautyMode")
+ }
+ if (wrapTextureBufferHelper == null) {
+ wrapTextureBufferHelper = TextureBufferHelper.create(
+ "FURenderWrap",
+ EglBaseProvider.instance().rootEglBase.eglBaseContext
+ )
+ LogUtils.i(TAG, "processBeauty >> create texture buffer wrap, beautyMode=$beautyMode")
+ }
+ val startTime = System.currentTimeMillis()
+ val processTexId = when (beautyMode) {
+ 2 -> processBeautySingleBuffer(videoFrame)
+ 3 -> {
+ if (enableTextureAsync) {
+ processBeautySingleTextureAsync(videoFrame)
+ } else {
+ processBeautySingleTexture(videoFrame)
+ }
+ }
+ else -> processBeautyAuto(videoFrame)
+ }
+
+ if(config?.statsEnable == true){
+ val costTime = System.currentTimeMillis() - startTime
+ statsHelper?.once(costTime)
+ }
+
+ if (processTexId <= 0) {
+ LogUtils.w(TAG, "processBeauty >> processTexId <= 0")
+ return false
+ }
+
+ if(skipFrame > 0){
+ skipFrame --
+ LogUtils.w(TAG, "processBeauty >> skipFrame=$skipFrame")
+ return false
+ }
+
+ val processBuffer: TextureBuffer = wrapTextureBufferHelper?.wrapTextureBuffer(
+ videoFrame.rotatedWidth,
+ videoFrame.rotatedHeight,
+ TextureBuffer.Type.RGB,
+ processTexId,
+ identityMatrix
+ ) ?: return false
+ videoFrame.replaceBuffer(processBuffer, 0, videoFrame.timestampNs)
+ return true
+ }
+
+ private fun processBeautyAuto(videoFrame: VideoFrame): Int {
+ val buffer = videoFrame.buffer
+ return if (buffer is TextureBuffer) {
+ if (enableTextureAsync) {
+ processBeautySingleTextureAsync(videoFrame)
+ } else {
+ processBeautySingleTexture(videoFrame)
+ }
+ } else {
+ processBeautySingleBuffer(videoFrame)
+ }
+ }
+
+ private fun processBeautySingleTextureAsync(videoFrame: VideoFrame): Int {
+ val texBufferHelper = wrapTextureBufferHelper ?: return -1
+ val textureBuffer = videoFrame.buffer as? TextureBuffer ?: return -1
+
+ when(textureBuffer.type){
+ TextureBuffer.Type.OES -> {
+ if(currProcessSourceType != ProcessSourceType.TEXTURE_OES_ASYNC){
+ LogUtils.i(TAG, "processBeauty >> process source type change old=$currProcessSourceType, new=${ProcessSourceType.TEXTURE_OES_ASYNC}")
+ if (currProcessSourceType != ProcessSourceType.UNKNOWN) {
+ skipFrame = 3
+ }
+ currProcessSourceType = ProcessSourceType.TEXTURE_OES_ASYNC
+ return -1
+ }
+ }
+ else -> {
+ if(currProcessSourceType != ProcessSourceType.TEXTURE_2D_ASYNC){
+ LogUtils.i(TAG, "processBeauty >> process source type change old=$currProcessSourceType, new=${ProcessSourceType.TEXTURE_2D_ASYNC}")
+ if (currProcessSourceType != ProcessSourceType.UNKNOWN) {
+ skipFrame = 3
+ }
+ currProcessSourceType = ProcessSourceType.TEXTURE_2D_ASYNC
+ skipFrame = 6
+ return -1
+ }
+ }
+ }
+
+ if(mTextureProcessHelper == null) {
+ mTextureProcessHelper = TextureProcessHelper()
+ mTextureProcessHelper?.setFilter { frame ->
+ val fuRenderKit = config?.fuRenderKit ?: return@setFilter -1
+
+ val input = FURenderInputData(frame.width, frame.height)
+ input.texture = FURenderInputData.FUTexture(
+ FUInputTextureEnum.FU_ADM_FLAG_COMMON_TEXTURE,
+ frame.textureId
+ )
+ val isFront = frame.isFrontCamera
+ input.renderConfig.let {
+ if (isFront) {
+ it.cameraFacing = CameraFacingEnum.CAMERA_FRONT
+ it.inputBufferMatrix = FUTransformMatrixEnum.CCROT0
+ it.inputTextureMatrix = FUTransformMatrixEnum.CCROT0
+ it.outputMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL
+ it.deviceOrientation = 270
+ } else {
+ it.cameraFacing = CameraFacingEnum.CAMERA_BACK
+ it.inputBufferMatrix = FUTransformMatrixEnum.CCROT0
+ it.inputTextureMatrix = FUTransformMatrixEnum.CCROT0
+ it.outputMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL
+ it.deviceOrientation = 270
+ }
+ }
+ if (isReleased) {
+ return@setFilter -1
+ }
+ val ret = textureBufferHelper?.invoke {
+ synchronized(EglBase.lock){
+ return@invoke fuRenderKit.renderWithInput(input).texture?.texId ?: -1
+ }
+ }
+ return@setFilter ret ?: -1
+ }
+ }
+
+ return texBufferHelper.invoke {
+ if(isReleased){
+ return@invoke -1
+ }
+
+ return@invoke mTextureProcessHelper?.process(
+ textureBuffer.textureId,
+ when (textureBuffer.type) {
+ TextureBuffer.Type.OES -> GLES11Ext.GL_TEXTURE_EXTERNAL_OES
+ else -> GLES20.GL_TEXTURE_2D
+ },
+ textureBuffer.width,
+ textureBuffer.height,
+ videoFrame.rotation,
+ textureBuffer.transformMatrixArray,
+ isFrontCamera,
+ (isFrontCamera && !captureMirror) || (!isFrontCamera && captureMirror)
+ )?: -1
+ }
+ }
+
+ private fun processBeautySingleTexture(videoFrame: VideoFrame): Int {
+ val texBufferHelper = wrapTextureBufferHelper ?: return -1
+ val textureBuffer = videoFrame.buffer as? TextureBuffer ?: return -1
+
+ when(textureBuffer.type){
+ TextureBuffer.Type.OES -> {
+ if(currProcessSourceType != ProcessSourceType.TEXTURE_OES){
+ LogUtils.i(TAG, "processBeauty >> process source type change old=$currProcessSourceType, new=${ProcessSourceType.TEXTURE_OES}")
+ if (currProcessSourceType != ProcessSourceType.UNKNOWN) {
+ skipFrame = 3
+ }
+ currProcessSourceType = ProcessSourceType.TEXTURE_OES
+ return -1
+ }
+ }
+ else -> {
+ if(currProcessSourceType != ProcessSourceType.TEXTURE_2D){
+ LogUtils.i(TAG, "processBeauty >> process source type change old=$currProcessSourceType, new=${ProcessSourceType.TEXTURE_2D}")
+ if (currProcessSourceType != ProcessSourceType.UNKNOWN) {
+ skipFrame = 3
+ }
+ currProcessSourceType = ProcessSourceType.TEXTURE_2D
+ skipFrame = 6
+ return -1
+ }
+ }
+ }
+
+ val width = videoFrame.rotatedWidth
+ val height = videoFrame.rotatedHeight
+ val isFront = videoFrame.sourceType == SourceType.kFrontCamera
+ val rotation = videoFrame.rotation
+
+ return texBufferHelper.invoke {
+ val fuRenderKit = config?.fuRenderKit ?: return@invoke -1
+
+ transformGLFrameBuffer.setSize(width, height)
+ transformGLFrameBuffer.resetTransform()
+ transformGLFrameBuffer.setTexMatrix(textureBuffer.transformMatrixArray)
+ transformGLFrameBuffer.setRotation(rotation)
+ var flipH = isFront
+ if((isFrontCamera && !captureMirror) || (!isFrontCamera && captureMirror)){
+ flipH = !flipH
+ }
+ transformGLFrameBuffer.setFlipH(flipH)
+ val transformTexId = transformGLFrameBuffer.process(
+ textureBuffer.textureId, when (textureBuffer.type) {
+ TextureBuffer.Type.OES -> GLES11Ext.GL_TEXTURE_EXTERNAL_OES
+ else -> GLES20.GL_TEXTURE_2D
+ }
+ )
+
+ val input = FURenderInputData(width, height)
+ input.texture = FURenderInputData.FUTexture(
+ FUInputTextureEnum.FU_ADM_FLAG_COMMON_TEXTURE,
+ transformTexId
+ )
+ input.renderConfig.let {
+ if (isFront) {
+ it.cameraFacing = CameraFacingEnum.CAMERA_FRONT
+ it.inputBufferMatrix = FUTransformMatrixEnum.CCROT0
+ it.inputTextureMatrix = FUTransformMatrixEnum.CCROT0
+ it.outputMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL
+ it.deviceOrientation = 270
+ } else {
+ it.cameraFacing = CameraFacingEnum.CAMERA_BACK
+ it.inputBufferMatrix = FUTransformMatrixEnum.CCROT0
+ it.inputTextureMatrix = FUTransformMatrixEnum.CCROT0
+ it.outputMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL
+ it.deviceOrientation = 270
+ }
+ }
+ if (isReleased) {
+ return@invoke -1
+ }
+ synchronized(EglBase.lock){
+ return@invoke fuRenderKit.renderWithInput(input).texture?.texId ?: -1
+ }
+ }
+ }
+
+ private fun processBeautySingleBuffer(videoFrame: VideoFrame): Int {
+ val texBufferHelper = textureBufferHelper ?: return -1
+ if(currProcessSourceType != ProcessSourceType.I420){
+ LogUtils.i(TAG, "processBeauty >> process source type change old=$currProcessSourceType, new=${ProcessSourceType.I420}")
+ if (currProcessSourceType != ProcessSourceType.UNKNOWN) {
+ skipFrame = 3
+ }
+ currProcessSourceType = ProcessSourceType.I420
+ return -1
+ }
+ val bufferArray = getNV21Buffer(videoFrame) ?: return -1
+ val buffer = videoFrame.buffer
+ val width = buffer.width
+ val height = buffer.height
+ val isFront = videoFrame.sourceType == SourceType.kFrontCamera
+ val mirror = (isFrontCamera && !captureMirror) || (!isFrontCamera && captureMirror)
+ val rotation = videoFrame.rotation
+
+ return texBufferHelper.invoke(Callable {
+ if(isReleased){
+ return@Callable -1
+ }
+ val fuRenderKit = config?.fuRenderKit ?: return@Callable -1
+ val input = FURenderInputData(width, height)
+ input.imageBuffer = FURenderInputData.FUImageBuffer(
+ FUInputBufferEnum.FU_FORMAT_NV21_BUFFER,
+ bufferArray
+ )
+ input.renderConfig.let {
+ if (isFront) {
+ it.cameraFacing = CameraFacingEnum.CAMERA_FRONT
+ it.inputBufferMatrix = if(mirror) {
+ when (rotation) {
+ 0 -> FUTransformMatrixEnum.CCROT0
+ 180 -> FUTransformMatrixEnum.CCROT180
+ else -> FUTransformMatrixEnum.CCROT90
+ }
+ } else {
+ when (rotation) {
+ 0 -> FUTransformMatrixEnum.CCROT0_FLIPHORIZONTAL
+ 180 -> FUTransformMatrixEnum.CCROT0_FLIPVERTICAL
+ else -> FUTransformMatrixEnum.CCROT90_FLIPHORIZONTAL
+ }
+ }
+ it.inputTextureMatrix = if(mirror) {
+ when (rotation) {
+ 0 -> FUTransformMatrixEnum.CCROT0
+ 180 -> FUTransformMatrixEnum.CCROT180
+ else -> FUTransformMatrixEnum.CCROT90
+ }
+ } else {
+ when (rotation) {
+ 0 -> FUTransformMatrixEnum.CCROT0_FLIPHORIZONTAL
+ 180 -> FUTransformMatrixEnum.CCROT0_FLIPVERTICAL
+ else -> FUTransformMatrixEnum.CCROT90_FLIPHORIZONTAL
+ }
+ }
+ it.deviceOrientation = when(rotation){
+ 0 -> 270
+ 180 -> 90
+ else -> 0
+ }
+ it.outputMatrix = FUTransformMatrixEnum.CCROT0
+ } else {
+ it.cameraFacing = CameraFacingEnum.CAMERA_BACK
+ it.inputBufferMatrix = if(mirror) {
+ when (rotation) {
+ 0 -> FUTransformMatrixEnum.CCROT0_FLIPHORIZONTAL
+ 180 -> FUTransformMatrixEnum.CCROT0_FLIPVERTICAL
+ else -> FUTransformMatrixEnum.CCROT90_FLIPVERTICAL
+ }
+ } else {
+ when (rotation) {
+ 0 -> FUTransformMatrixEnum.CCROT0
+ 180 -> FUTransformMatrixEnum.CCROT180
+ else -> FUTransformMatrixEnum.CCROT270
+ }
+ }
+ it.inputTextureMatrix = if(mirror) {
+ when (rotation) {
+ 0 -> FUTransformMatrixEnum.CCROT0_FLIPHORIZONTAL
+ 180 -> FUTransformMatrixEnum.CCROT0_FLIPVERTICAL
+ else -> FUTransformMatrixEnum.CCROT90_FLIPVERTICAL
+ }
+ } else {
+ when (rotation) {
+ 0 -> FUTransformMatrixEnum.CCROT0
+ 180 -> FUTransformMatrixEnum.CCROT180
+ else -> FUTransformMatrixEnum.CCROT270
+ }
+ }
+ it.deviceOrientation = when(rotation){
+ 0 -> 270
+ 180 -> 90
+ else -> 0
+ }
+ it.outputMatrix = FUTransformMatrixEnum.CCROT0
+ }
+ }
+
+ mTextureProcessHelper?.let {
+ if(it.size() > 0){
+ it.reset()
+ return@Callable -1
+ }
+ }
+ synchronized(EglBase.lock){
+ return@Callable fuRenderKit.renderWithInput(input).texture?.texId ?: -1
+ }
+ })
+ }
+
+ private fun getNV21Buffer(videoFrame: VideoFrame): ByteArray? {
+ val buffer = videoFrame.buffer
+ val width = buffer.width
+ val height = buffer.height
+ val size = (width * height * 3.0f / 2.0f + 0.5f).toInt()
+ if (byteBuffer == null || byteBuffer?.capacity() != size || byteArray == null || byteArray?.size != size) {
+ byteBuffer?.clear()
+ byteBuffer = ByteBuffer.allocateDirect(size)
+ byteArray = ByteArray(size)
+ return null
+ }
+ val outArray = byteArray ?: return null
+ val outBuffer = byteBuffer ?: return null
+ val i420Buffer = buffer as? I420Buffer ?: buffer.toI420()
+ YuvHelper.I420ToNV12(
+ i420Buffer.dataY, i420Buffer.strideY,
+ i420Buffer.dataV, i420Buffer.strideV,
+ i420Buffer.dataU, i420Buffer.strideU,
+ outBuffer, width, height
+ )
+ outBuffer.position(0)
+ outBuffer.get(outArray)
+ if(buffer !is I420Buffer){
+ i420Buffer.release()
+ }
+ return outArray
+ }
+
+ // IVideoFrameObserver implements
+
+ override fun onCaptureVideoFrame(sourceType: Int, videoFrame: VideoFrame?): Boolean {
+ videoFrame ?: return false
+ return processBeauty(videoFrame)
+ }
+
+ override fun onPreEncodeVideoFrame(sourceType: Int, videoFrame: VideoFrame?) = false
+
+ override fun onMediaPlayerVideoFrame(videoFrame: VideoFrame?, mediaPlayerId: Int) = false
+
+ override fun onRenderVideoFrame(
+ channelId: String?,
+ uid: Int,
+ videoFrame: VideoFrame?
+ ) = false
+
+ override fun getVideoFrameProcessMode() = IVideoFrameObserver.PROCESS_MODE_READ_WRITE
+
+ override fun getVideoFormatPreference() = IVideoFrameObserver.VIDEO_PIXEL_DEFAULT
+
+ override fun getRotationApplied() = false
+
+ override fun getMirrorApplied() = captureMirror && !enable
+
+ override fun getObservedFramePosition() = IVideoFrameObserver.POSITION_POST_CAPTURER
+
+}
\ No newline at end of file
diff --git a/common/src/main/java/com/yunbao/common/manager/SWAuManager.java b/lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/agora/SWAuManager.java
similarity index 99%
rename from common/src/main/java/com/yunbao/common/manager/SWAuManager.java
rename to lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/agora/SWAuManager.java
index e69707e99..e22394f46 100644
--- a/common/src/main/java/com/yunbao/common/manager/SWAuManager.java
+++ b/lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/agora/SWAuManager.java
@@ -1,10 +1,11 @@
-package com.yunbao.common.manager;
+package io.agora.beautyapi.faceunity.agora;
import android.app.Activity;
import android.content.Context;
import android.view.SurfaceView;
import android.widget.FrameLayout;
+
import com.yunbao.common.CommonAppConfig;
import com.yunbao.common.CommonAppContext;
import com.yunbao.common.manager.base.BaseCacheManager;
diff --git a/common/src/main/java/com/yunbao/common/manager/SWManager.java b/lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/agora/SWManager.java
similarity index 77%
rename from common/src/main/java/com/yunbao/common/manager/SWManager.java
rename to lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/agora/SWManager.java
index 2d17287ea..6fa380edf 100644
--- a/common/src/main/java/com/yunbao/common/manager/SWManager.java
+++ b/lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/agora/SWManager.java
@@ -1,4 +1,4 @@
-package com.yunbao.common.manager;
+package io.agora.beautyapi.faceunity.agora;
import android.app.Activity;
import android.content.Context;
@@ -10,11 +10,20 @@ import com.yunbao.common.CommonAppContext;
import com.yunbao.common.bean.SwTokenModel;
import com.yunbao.common.http.base.HttpCallback;
import com.yunbao.common.http.live.LiveNetManager;
+import com.yunbao.common.manager.IMLoginManager;
import com.yunbao.common.manager.base.BaseCacheManager;
import com.yunbao.common.utils.L;
import com.yunbao.common.utils.StringUtil;
import com.yunbao.common.utils.ToastUtil;
+import com.yunbao.faceunity.utils.FURenderer;
+import io.agora.beautyapi.faceunity.BeautyPreset;
+import io.agora.beautyapi.faceunity.CameraConfig;
+import io.agora.beautyapi.faceunity.CaptureMode;
+import io.agora.beautyapi.faceunity.Config;
+import io.agora.beautyapi.faceunity.FaceUnityBeautyAPI;
+import io.agora.beautyapi.faceunity.FaceUnityBeautyAPIKt;
+import io.agora.beautyapi.faceunity.MirrorMode;
import io.agora.rtc2.ChannelMediaOptions;
import io.agora.rtc2.Constants;
import io.agora.rtc2.IRtcEngineEventHandler;
@@ -35,8 +44,9 @@ public class SWManager extends BaseCacheManager {
private Activity mContext;
public static SWManager manager;
private RtcEngineEx mRtcEngine;
+ private final FaceUnityBeautyAPI faceUnityBeautyAPI = FaceUnityBeautyAPIKt.createFaceUnityBeautyAPI();
private int uid;
- VideoEncoderConfiguration cfg;
+ private VideoEncoderConfiguration cfg;
private FrameLayout anchorContainer; //主播视图
private FrameLayout pkContainer1; //pk主播视图1
private FrameLayout pkContainer2; //pk主播视图2
@@ -111,8 +121,23 @@ public class SWManager extends BaseCacheManager {
// 创建一个 SurfaceView 对象,并将其作为 FrameLayout 的子对象
SurfaceView surfaceView = new SurfaceView(mContext);
anchorContainer.addView(surfaceView);
+ Config config = new Config(mContext, mRtcEngine, FURenderer.INSTANCE.mFURenderKit, null, CaptureMode.Agora, 0, false, new CameraConfig(MirrorMode.MIRROR_NONE,MirrorMode.MIRROR_NONE));
+ faceUnityBeautyAPI.initialize(config);
+ faceUnityBeautyAPI.enable(true);
+
+ faceUnityBeautyAPI.setBeautyPreset(BeautyPreset.CUSTOM);
+ //FaceUnityBeautyManage.getInstance().mFURenderKit.setFaceBeauty();
+
// 设置视图
- mRtcEngine.setupLocalVideo(new VideoCanvas(surfaceView, VideoCanvas.RENDER_MODE_HIDDEN, uid));
+ faceUnityBeautyAPI.setupLocalVideo(surfaceView, Constants.RENDER_MODE_HIDDEN);
+ //faceUnityBeautyAPI.updateCameraConfig(new CameraConfig(MirrorMode.MIRROR_NONE,MirrorMode.MIRROR_NONE));
+ //mRtcEngine.setupLocalVideo(new VideoCanvas(surfaceView, VideoCanvas.RENDER_MODE_HIDDEN, uid));
+ }
+
+ public void setEnableBeauty(boolean flag){
+ if(faceUnityBeautyAPI!=null){
+ faceUnityBeautyAPI.enable(flag);
+ }
}
/**
@@ -133,9 +158,51 @@ public class SWManager extends BaseCacheManager {
break;
}
mRtcEngine.setVideoEncoderConfiguration(cfg);
+ // 创建一个 SurfaceView 对象,并将其作为 FrameLayout 的子对象
+ SurfaceView surfaceView = new SurfaceView(mContext);
+ anchorContainer.addView(surfaceView);
+ // 设置视图
+ mRtcEngine.setupLocalVideo(new VideoCanvas(surfaceView, VideoCanvas.RENDER_MODE_HIDDEN, uid));
}
}
+ /**
+ * 设置镜像模式
+ */
+ public void setMirrorMode(){
+ if(cfg!=null&&mRtcEngine!=null){
+ L.eSw("setMirrorMode设置镜像"+cfg.mirrorMode);
+ if(cfg.mirrorMode==VideoEncoderConfiguration.MIRROR_MODE_TYPE.MIRROR_MODE_ENABLED){
+ cfg.mirrorMode = VideoEncoderConfiguration.MIRROR_MODE_TYPE.MIRROR_MODE_DISABLED; //取消镜像
+ }else{
+ cfg.mirrorMode = VideoEncoderConfiguration.MIRROR_MODE_TYPE.MIRROR_MODE_ENABLED; //设置镜像
+ }
+ mRtcEngine.setVideoEncoderConfiguration(cfg);
+ SurfaceView surfaceView = new SurfaceView(mContext);
+ anchorContainer.addView(surfaceView);
+ mRtcEngine.setupLocalVideo(new VideoCanvas(surfaceView, VideoCanvas.RENDER_MODE_HIDDEN, uid));
+ }
+ }
+
+ /**
+ * 设置镜像模式
+ */
+ public void switchCamera(){
+ if(mRtcEngine!=null){
+ mRtcEngine.switchCamera();
+ }
+ }
+
+ /**
+ * 设置美颜
+ */
+ public void setBeautPreset(){
+ if(mRtcEngine!=null){
+
+ }
+ }
+
+
/**
* 创建直播间
*/
@@ -231,6 +298,15 @@ public class SWManager extends BaseCacheManager {
mRtcEngine.leaveChannelEx(rtcConnection);
}
+ /**
+ * 退出所有频道
+ */
+ public void exitChannelAll(){
+ if(mRtcEngine!=null){
+ mRtcEngine.leaveChannel();
+ }
+ }
+
//设置对方主播视图
private void setupRemoteVideo(int uid) {
SurfaceView surfaceView = new SurfaceView(mContext);
@@ -292,6 +368,12 @@ public class SWManager extends BaseCacheManager {
super.onLeaveChannel(stats);
L.eSw("onLeaveChannel退出頻道");
}
+
+ @Override
+ public void onLocalVideoStateChanged(Constants.VideoSourceType source, int state, int error) {
+ super.onLocalVideoStateChanged(source, state, error);
+ L.eSw("onLocalVideoStateChanged_source"+source+" state_"+state+" error_"+error);
+ }
};
private void refreshToken() {
diff --git a/lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/utils/FuDeviceUtils.java b/lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/utils/FuDeviceUtils.java
new file mode 100644
index 000000000..60de92611
--- /dev/null
+++ b/lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/utils/FuDeviceUtils.java
@@ -0,0 +1,607 @@
+/*
+ * MIT License
+ *
+ * Copyright (c) 2023 Agora Community
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ *
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+package io.agora.beautyapi.faceunity.utils;
+
+import android.annotation.TargetApi;
+import android.app.ActivityManager;
+import android.content.Context;
+import android.os.Build;
+import android.text.TextUtils;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileFilter;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+
+public class FuDeviceUtils {
+
+ public static final String TAG = "FuDeviceUtils";
+
+ public static final int DEVICE_LEVEL_HIGH = 2;
+ public static final int DEVICE_LEVEL_MID = 1;
+ public static final int DEVICE_LEVEL_LOW = 0;
+
+ /**
+ * The default return value of any method in this class when an
+ * error occurs or when processing fails (Currently set to -1). Use this to check if
+ * the information about the device in question was successfully obtained.
+ */
+ public static final int DEVICEINFO_UNKNOWN = -1;
+
+ private static final FileFilter CPU_FILTER = new FileFilter() {
+ @Override
+ public boolean accept(File pathname) {
+ String path = pathname.getName();
+ //regex is slow, so checking char by char.
+ if (path.startsWith("cpu")) {
+ for (int i = 3; i < path.length(); i++) {
+ if (!Character.isDigit(path.charAt(i))) {
+ return false;
+ }
+ }
+ return true;
+ }
+ return false;
+ }
+ };
+
+
+ /**
+ * Calculates the total RAM of the device through Android API or /proc/meminfo.
+ *
+ * @param c - Context object for current running activity.
+ * @return Total RAM that the device has, or DEVICEINFO_UNKNOWN = -1 in the event of an error.
+ */
+ @TargetApi(Build.VERSION_CODES.JELLY_BEAN)
+ public static long getTotalMemory(Context c) {
+ // memInfo.totalMem not supported in pre-Jelly Bean APIs.
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
+ ActivityManager.MemoryInfo memInfo = new ActivityManager.MemoryInfo();
+ ActivityManager am = (ActivityManager) c.getSystemService(Context.ACTIVITY_SERVICE);
+ am.getMemoryInfo(memInfo);
+ if (memInfo != null) {
+ return memInfo.totalMem;
+ } else {
+ return DEVICEINFO_UNKNOWN;
+ }
+ } else {
+ long totalMem = DEVICEINFO_UNKNOWN;
+ try {
+ FileInputStream stream = new FileInputStream("/proc/meminfo");
+ try {
+ totalMem = parseFileForValue("MemTotal", stream);
+ totalMem *= 1024;
+ } finally {
+ stream.close();
+ }
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ return totalMem;
+ }
+ }
+
+ /**
+ * Method for reading the clock speed of a CPU core on the device. Will read from either
+ * {@code /sys/devices/system/cpu/cpu0/cpufreq/cpuinfo_max_freq} or {@code /proc/cpuinfo}.
+ *
+ * @return Clock speed of a core on the device, or -1 in the event of an error.
+ */
+ public static int getCPUMaxFreqKHz() {
+ int maxFreq = DEVICEINFO_UNKNOWN;
+ try {
+ for (int i = 0; i < getNumberOfCPUCores(); i++) {
+ String filename =
+ "/sys/devices/system/cpu/cpu" + i + "/cpufreq/cpuinfo_max_freq";
+ File cpuInfoMaxFreqFile = new File(filename);
+ if (cpuInfoMaxFreqFile.exists() && cpuInfoMaxFreqFile.canRead()) {
+ byte[] buffer = new byte[128];
+ FileInputStream stream = new FileInputStream(cpuInfoMaxFreqFile);
+ try {
+ stream.read(buffer);
+ int endIndex = 0;
+ //Trim the first number out of the byte buffer.
+ while (Character.isDigit(buffer[endIndex]) && endIndex < buffer.length) {
+ endIndex++;
+ }
+ String str = new String(buffer, 0, endIndex);
+ Integer freqBound = Integer.parseInt(str);
+ if (freqBound > maxFreq) {
+ maxFreq = freqBound;
+ }
+ } catch (NumberFormatException e) {
+ //Fall through and use /proc/cpuinfo.
+ } finally {
+ stream.close();
+ }
+ }
+ }
+ if (maxFreq == DEVICEINFO_UNKNOWN) {
+ FileInputStream stream = new FileInputStream("/proc/cpuinfo");
+ try {
+ int freqBound = parseFileForValue("cpu MHz", stream);
+ freqBound *= 1024; //MHz -> kHz
+ if (freqBound > maxFreq) maxFreq = freqBound;
+ } finally {
+ stream.close();
+ }
+ }
+ } catch (IOException e) {
+ maxFreq = DEVICEINFO_UNKNOWN; //Fall through and return unknown.
+ }
+ return maxFreq;
+ }
+
+ /**
+ * Reads the number of CPU cores from the first available information from
+ * {@code /sys/devices/system/cpu/possible}, {@code /sys/devices/system/cpu/present},
+ * then {@code /sys/devices/system/cpu/}.
+ *
+ * @return Number of CPU cores in the phone, or DEVICEINFO_UKNOWN = -1 in the event of an error.
+ */
+ public static int getNumberOfCPUCores() {
+ if (Build.VERSION.SDK_INT <= Build.VERSION_CODES.GINGERBREAD_MR1) {
+ // Gingerbread doesn't support giving a single application access to both cores, but a
+ // handful of devices (Atrix 4G and Droid X2 for example) were released with a dual-core
+ // chipset and Gingerbread; that can let an app in the background run without impacting
+ // the foreground application. But for our purposes, it makes them single core.
+ return 1;
+ }
+ int cores;
+ try {
+ cores = getCoresFromFileInfo("/sys/devices/system/cpu/possible");
+ if (cores == DEVICEINFO_UNKNOWN) {
+ cores = getCoresFromFileInfo("/sys/devices/system/cpu/present");
+ }
+ if (cores == DEVICEINFO_UNKNOWN) {
+ cores = new File("/sys/devices/system/cpu/").listFiles(CPU_FILTER).length;
+ }
+ } catch (SecurityException e) {
+ cores = DEVICEINFO_UNKNOWN;
+ } catch (NullPointerException e) {
+ cores = DEVICEINFO_UNKNOWN;
+ }
+ return cores;
+ }
+
+ /**
+ * Tries to read file contents from the file location to determine the number of cores on device.
+ *
+ * @param fileLocation The location of the file with CPU information
+ * @return Number of CPU cores in the phone, or DEVICEINFO_UKNOWN = -1 in the event of an error.
+ */
+ private static int getCoresFromFileInfo(String fileLocation) {
+ InputStream is = null;
+ try {
+ is = new FileInputStream(fileLocation);
+ BufferedReader buf = new BufferedReader(new InputStreamReader(is));
+ String fileContents = buf.readLine();
+ buf.close();
+ return getCoresFromFileString(fileContents);
+ } catch (IOException e) {
+ return DEVICEINFO_UNKNOWN;
+ } finally {
+ if (is != null) {
+ try {
+ is.close();
+ } catch (IOException e) {
+ // Do nothing.
+ }
+ }
+ }
+ }
+
+ /**
+ * Converts from a CPU core information format to number of cores.
+ *
+ * @param str The CPU core information string, in the format of "0-N"
+ * @return The number of cores represented by this string
+ */
+ private static int getCoresFromFileString(String str) {
+ if (str == null || !str.matches("0-[\\d]+$")) {
+ return DEVICEINFO_UNKNOWN;
+ }
+ return Integer.valueOf(str.substring(2)) + 1;
+ }
+
+ /**
+ * Helper method for reading values from system files, using a minimised buffer.
+ *
+ * @param textToMatch - Text in the system files to read for.
+ * @param stream - FileInputStream of the system file being read from.
+ * @return A numerical value following textToMatch in specified the system file.
+ * -1 in the event of a failure.
+ */
+ private static int parseFileForValue(String textToMatch, FileInputStream stream) {
+ byte[] buffer = new byte[1024];
+ try {
+ int length = stream.read(buffer);
+ for (int i = 0; i < length; i++) {
+ if (buffer[i] == '\n' || i == 0) {
+ if (buffer[i] == '\n') i++;
+ for (int j = i; j < length; j++) {
+ int textIndex = j - i;
+ //Text doesn't match query at some point.
+ if (buffer[j] != textToMatch.charAt(textIndex)) {
+ break;
+ }
+ //Text matches query here.
+ if (textIndex == textToMatch.length() - 1) {
+ return extractValue(buffer, j);
+ }
+ }
+ }
+ }
+ } catch (IOException e) {
+ //Ignore any exceptions and fall through to return unknown value.
+ } catch (NumberFormatException e) {
+ }
+ return DEVICEINFO_UNKNOWN;
+ }
+
+ /**
+ * Helper method used by {@link #parseFileForValue(String, FileInputStream) parseFileForValue}. Parses
+ * the next available number after the match in the file being read and returns it as an integer.
+ *
+ * @param index - The index in the buffer array to begin looking.
+ * @return The next number on that line in the buffer, returned as an int. Returns
+ * DEVICEINFO_UNKNOWN = -1 in the event that no more numbers exist on the same line.
+ */
+ private static int extractValue(byte[] buffer, int index) {
+ while (index < buffer.length && buffer[index] != '\n') {
+ if (Character.isDigit(buffer[index])) {
+ int start = index;
+ index++;
+ while (index < buffer.length && Character.isDigit(buffer[index])) {
+ index++;
+ }
+ String str = new String(buffer, 0, start, index - start);
+ return Integer.parseInt(str);
+ }
+ index++;
+ }
+ return DEVICEINFO_UNKNOWN;
+ }
+
+ /**
+ * 获取当前剩余内存(ram)
+ *
+ * @param context
+ * @return
+ */
+ public static long getAvailMemory(Context context) {
+ ActivityManager am = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
+ ActivityManager.MemoryInfo mi = new ActivityManager.MemoryInfo();
+ am.getMemoryInfo(mi);
+ return mi.availMem;
+ }
+
+ /**
+ * 获取厂商信息
+ *
+ * @return
+ */
+ public static String getBrand() {
+ return Build.BRAND;
+ }
+
+ /**
+ * 获取手机机型
+ *
+ * @return
+ */
+ public static String getModel() {
+ return Build.MODEL;
+ }
+
+ /**
+ * 获取硬件信息(cpu型号)
+ *
+ * @return
+ */
+ public static String getHardWare() {
+ try {
+ FileReader fr = new FileReader("/proc/cpuinfo");
+ BufferedReader br = new BufferedReader(fr);
+ String text;
+ String last = "";
+ while ((text = br.readLine()) != null) {
+ last = text;
+ }
+ //一般机型的cpu型号都会在cpuinfo文件的最后一行
+ if (last.contains("Hardware")) {
+ String[] hardWare = last.split(":\\s+", 2);
+ return hardWare[1];
+ }
+ } catch (FileNotFoundException e) {
+ e.printStackTrace();
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ return Build.HARDWARE;
+ }
+
+
+ /**
+ * Level judgement based on current memory and CPU.
+ *
+ * @param context - Context object.
+ * @return
+ */
+ public static int judgeDeviceLevel(Context context) {
+ int level;
+ //有一些设备不符合下述的判断规则,则走一个机型判断模式
+ int specialDevice = judgeDeviceLevelInDeviceName();
+ if (specialDevice >= 0) return specialDevice;
+
+ int ramLevel = judgeMemory(context);
+ int cpuLevel = judgeCPU();
+ if (ramLevel == 0 || ramLevel == 1 || cpuLevel == 0) {
+ level = DEVICE_LEVEL_LOW;
+ } else {
+ if (cpuLevel > 1) {
+ level = DEVICE_LEVEL_HIGH;
+ } else {
+ level = DEVICE_LEVEL_MID;
+ }
+ }
+ LogUtils.d(TAG,"DeviceLevel: " + level);
+ return level;
+ }
+
+ /**
+ * -1 不是特定的高低端机型
+ * @return
+ */
+ private static int judgeDeviceLevelInDeviceName() {
+ String currentDeviceName = getDeviceName();
+ for (String deviceName:upscaleDevice) {
+ if (deviceName.equals(currentDeviceName)) {
+ return DEVICE_LEVEL_HIGH;
+ }
+ }
+
+ for (String deviceName:middleDevice) {
+ if (deviceName.equals(currentDeviceName)) {
+ return DEVICE_LEVEL_MID;
+ }
+ }
+
+ for (String deviceName:lowDevice) {
+ if (deviceName.equals(currentDeviceName)) {
+ return DEVICE_LEVEL_LOW;
+ }
+ }
+ return -1;
+ }
+
+ public static final String[] upscaleDevice = {"vivo X6S A","MHA-AL00","VKY-AL00","V1838A"};
+ public static final String[] lowDevice = {};
+ public static final String[] middleDevice = {"OPPO R11s","PAR-AL00","MI 8 Lite","ONEPLUS A6000","PRO 6","PRO 7 Plus"};
+
+ /**
+ * 评定内存的等级.
+ *
+ * @return
+ */
+ private static int judgeMemory(Context context) {
+ long ramMB = getTotalMemory(context) / (1024 * 1024);
+ int level = -1;
+ if (ramMB <= 2000) { //2G或以下的最低档
+ level = 0;
+ } else if (ramMB <= 3000) { //2-3G
+ level = 1;
+ } else if (ramMB <= 4000) { //4G档 2018主流中端机
+ level = 2;
+ } else if (ramMB <= 6000) { //6G档 高端机
+ level = 3;
+ } else { //6G以上 旗舰机配置
+ level = 4;
+ }
+ return level;
+ }
+
+ /**
+ * 评定CPU等级.(按频率和厂商型号综合判断)
+ *
+ * @return
+ */
+ private static int judgeCPU() {
+ int level = 0;
+ String cpuName = getHardWare();
+ int freqMHz = getCPUMaxFreqKHz() / 1024;
+
+ //一个不符合下述规律的高级白名单
+ //如果可以获取到CPU型号名称 -> 根据不同的名称走不同判定策略
+ if (!TextUtils.isEmpty(cpuName)) {
+ if (cpuName.contains("qcom") || cpuName.contains("Qualcomm")) { //高通骁龙
+ return judgeQualcommCPU(cpuName, freqMHz);
+ } else if (cpuName.contains("hi") || cpuName.contains("kirin")) { //海思麒麟
+ return judgeSkinCPU(cpuName, freqMHz);
+ } else if (cpuName.contains("MT")) {//联发科
+ return judgeMTCPU(cpuName, freqMHz);
+ }
+ }
+
+ //cpu型号无法获取的普通规则
+ if (freqMHz <= 1600) { //1.5G 低端
+ level = 0;
+ } else if (freqMHz <= 1950) { //2GHz 低中端
+ level = 1;
+ } else if (freqMHz <= 2500) { //2.2 2.3g 中高端
+ level = 2;
+ } else { //高端
+ level = 3;
+ }
+ return level;
+ }
+
+ /**
+ * 联发科芯片等级判定
+ *
+ * @return
+ */
+ private static int judgeMTCPU(String cpuName, int freqMHz) {
+ //P60之前的全是低端机 MT6771V/C
+ int level = 0;
+ int mtCPUVersion = getMTCPUVersion(cpuName);
+ if (mtCPUVersion == -1) {
+ //读取不出version 按照一个比较严格的方式来筛选出高端机
+ if (freqMHz <= 1600) { //1.5G 低端
+ level = 0;
+ } else if (freqMHz <= 2200) { //2GHz 低中端
+ level = 1;
+ } else if (freqMHz <= 2700) { //2.2 2.3g 中高端
+ level = 2;
+ } else { //高端
+ level = 3;
+ }
+ } else if (mtCPUVersion < 6771) {
+ //均为中低端机
+ if (freqMHz <= 1600) { //1.5G 低端
+ level = 0;
+ } else { //2GHz 中端
+ level = 1;
+ }
+ } else {
+ if (freqMHz <= 1600) { //1.5G 低端
+ level = 0;
+ } else if (freqMHz <= 1900) { //2GHz 低中端
+ level = 1;
+ } else if (freqMHz <= 2500) { //2.2 2.3g 中高端
+ level = 2;
+ } else { //高端
+ level = 3;
+ }
+ }
+
+ return level;
+ }
+
+ /**
+ * 通过联发科CPU型号定义 -> 获取cpu version
+ *
+ * @param cpuName
+ * @return
+ */
+ private static int getMTCPUVersion(String cpuName) {
+ //截取MT后面的四位数字
+ int cpuVersion = -1;
+ if (cpuName.length() > 5) {
+ String cpuVersionStr = cpuName.substring(2, 6);
+ try {
+ cpuVersion = Integer.valueOf(cpuVersionStr);
+ } catch (NumberFormatException exception) {
+ exception.printStackTrace();
+ }
+ }
+
+ return cpuVersion;
+ }
+
+ /**
+ * 高通骁龙芯片等级判定
+ *
+ * @return
+ */
+ private static int judgeQualcommCPU(String cpuName, int freqMHz) {
+ int level = 0;
+ //xxxx inc MSM8937 比较老的芯片
+ //7 8 xxx inc SDM710
+ if (cpuName.contains("MSM")) {
+ //老芯片
+ if (freqMHz <= 1600) { //1.5G 低端
+ level = 0;
+ } else { //2GHz 低中端
+ level = 1;
+ }
+ } else {
+ //新的芯片
+ if (freqMHz <= 1600) { //1.5G 低端
+ level = 0;
+ } else if (freqMHz <= 2000) { //2GHz 低中端
+ level = 1;
+ } else if (freqMHz <= 2500) { //2.2 2.3g 中高端
+ level = 2;
+ } else { //高端
+ level = 3;
+ }
+ }
+
+ return level;
+ }
+
+ /**
+ * 麒麟芯片等级判定
+ *
+ * @param freqMHz
+ * @return
+ */
+ private static int judgeSkinCPU(String cpuName, int freqMHz) {
+ //型号 -> kirin710之后 & 最高核心频率
+ int level = 0;
+ if (cpuName.startsWith("hi")) {
+ //这个是海思的芯片中低端
+ if (freqMHz <= 1600) { //1.5G 低端
+ level = 0;
+ } else if (freqMHz <= 2000) { //2GHz 低中端
+ level = 1;
+ }
+ } else {
+ //这个是海思麒麟的芯片
+ if (freqMHz <= 1600) { //1.5G 低端
+ level = 0;
+ } else if (freqMHz <= 2000) { //2GHz 低中端
+ level = 1;
+ } else if (freqMHz <= 2500) { //2.2 2.3g 中高端
+ level = 2;
+ } else { //高端
+ level = 3;
+ }
+ }
+
+ return level;
+ }
+
+ public static final String Nexus_6P = "Nexus 6P";
+
+ /**
+ * 获取设备名
+ *
+ * @return
+ */
+ public static String getDeviceName() {
+ String deviceName = "";
+ if (Build.MODEL != null) deviceName = Build.MODEL;
+ LogUtils.e(TAG,"deviceName: " + deviceName);
+ return deviceName;
+ }
+}
diff --git a/lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/utils/LogUtils.kt b/lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/utils/LogUtils.kt
new file mode 100644
index 000000000..4c1a5252d
--- /dev/null
+++ b/lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/utils/LogUtils.kt
@@ -0,0 +1,57 @@
+/*
+ * MIT License
+ *
+ * Copyright (c) 2023 Agora Community
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+package io.agora.beautyapi.faceunity.utils
+
+import io.agora.base.internal.Logging
+
+object LogUtils {
+ private const val beautyType = "FaceUnity"
+
+
+ @JvmStatic
+ fun i(tag: String, content: String, vararg args: Any) {
+ val consoleMessage = "[BeautyAPI][$beautyType] : ${String.format(content, args)}"
+ Logging.log(Logging.Severity.LS_INFO, tag, consoleMessage)
+ }
+
+ @JvmStatic
+ fun d(tag: String, content: String, vararg args: Any) {
+ val consoleMessage = "[BeautyAPI][$beautyType] : ${String.format(content, args)}"
+ Logging.d(tag, consoleMessage)
+ }
+
+ @JvmStatic
+ fun w(tag: String, content: String, vararg args: Any){
+ val consoleMessage = "[BeautyAPI][$beautyType] : ${String.format(content, args)}"
+ Logging.w(tag, consoleMessage)
+ }
+
+ @JvmStatic
+ fun e(tag: String, content: String, vararg args: Any){
+ val consoleMessage = "[BeautyAPI][$beautyType] : ${String.format(content, args)}"
+ Logging.e(tag, consoleMessage)
+ }
+
+}
\ No newline at end of file
diff --git a/lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/utils/StatsHelper.kt b/lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/utils/StatsHelper.kt
new file mode 100644
index 000000000..cb4cf1292
--- /dev/null
+++ b/lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/utils/StatsHelper.kt
@@ -0,0 +1,80 @@
+/*
+ * MIT License
+ *
+ * Copyright (c) 2023 Agora Community
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+package io.agora.beautyapi.faceunity.utils
+
+import android.os.Handler
+import android.os.Looper
+import io.agora.beautyapi.faceunity.BeautyStats
+import kotlin.math.max
+import kotlin.math.min
+
+class StatsHelper(
+ private val statsDuration: Long,
+ private val onStatsChanged: (BeautyStats) -> Unit
+) {
+
+ private val mMainHandler = Handler(Looper.getMainLooper())
+ private var mStartTime = 0L
+ private var mCostList = mutableListOf()
+ private var mCostMax = 0L
+ private var mCostMin = Long.MAX_VALUE
+
+ fun once(cost: Long) {
+ val curr = System.currentTimeMillis()
+ if (mStartTime == 0L) {
+ mStartTime = curr
+ } else if (curr - mStartTime >= statsDuration) {
+ mStartTime = curr
+ var total = 0L
+ mCostList.forEach {
+ total += it
+ }
+ val average = total / mCostList.size
+ val costMin = mCostMin
+ val costMax = mCostMax
+ mMainHandler.post {
+ onStatsChanged.invoke(BeautyStats(costMin, costMax, average))
+ }
+
+ mCostList.clear()
+ mCostMax = 0L
+ mCostMin = Long.MAX_VALUE
+ }
+
+ mCostList.add(cost)
+ mCostMax = max(mCostMax, cost)
+ mCostMin = min(mCostMin, cost)
+ }
+
+ fun reset() {
+ mMainHandler.removeCallbacksAndMessages(null)
+ mStartTime = 0
+ mCostList.clear()
+ mCostMax = 0L
+ mCostMin = Long.MAX_VALUE
+ }
+
+
+}
\ No newline at end of file
diff --git a/lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/utils/egl/EGLContextHelper.java b/lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/utils/egl/EGLContextHelper.java
new file mode 100644
index 000000000..97b3c7a53
--- /dev/null
+++ b/lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/utils/egl/EGLContextHelper.java
@@ -0,0 +1,210 @@
+/*
+ * MIT License
+ *
+ * Copyright (c) 2023 Agora Community
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+package io.agora.beautyapi.faceunity.utils.egl;
+
+import static android.opengl.EGL14.EGL_CONTEXT_CLIENT_VERSION;
+
+import android.opengl.GLDebugHelper;
+
+import javax.microedition.khronos.egl.EGL10;
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.egl.EGLContext;
+import javax.microedition.khronos.egl.EGLDisplay;
+import javax.microedition.khronos.egl.EGLSurface;
+
+import io.agora.beautyapi.faceunity.utils.LogUtils;
+
+public class EGLContextHelper {
+ private static final String DEBUG_TAG = "EGLContextManager";
+ private final int mRedSize = 8;
+ private final int mGreenSize = 8;
+ private final int mBlueSize = 8;
+ private final int mAlphaSize = 0;
+ private final int mDepthSize = 16;
+ private final int mStencilSize = 0;
+ private final int mRenderType = 4;
+ public EGLContextHelper(){}
+
+ public void initEGL(EGLContext shareContext) throws Exception {
+ mEGL = (EGL10) GLDebugHelper.wrap(EGLContext.getEGL(),
+ GLDebugHelper.CONFIG_CHECK_GL_ERROR
+ | GLDebugHelper.CONFIG_CHECK_THREAD, null);
+
+ if (mEGL == null) {
+ throw new Exception("Couldn't get EGL");
+ }
+
+ mGLDisplay = mEGL.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
+
+ if (mGLDisplay == null) {
+ throw new Exception("Couldn't get display for GL");
+ }
+
+ int[] curGLVersion = new int[2];
+ mEGL.eglInitialize(mGLDisplay, curGLVersion);
+
+ LogUtils.i(DEBUG_TAG, "GL version = " + curGLVersion[0] + "."
+ + curGLVersion[1]);
+
+ int[] num_config = new int[1];
+ if(!mEGL.eglChooseConfig(mGLDisplay, mConfigSpec, null, 1,
+ num_config)){
+ throw new IllegalArgumentException("eglChooseConfig failed");
+ }
+ int numConfigs = num_config[0];
+ if (numConfigs <= 0) {
+ throw new IllegalArgumentException(
+ "No configs match configSpec");
+ }
+
+ EGLConfig[] configs = new EGLConfig[numConfigs];
+ if (!mEGL.eglChooseConfig(mGLDisplay, mConfigSpec, configs, numConfigs,
+ num_config)) {
+ throw new IllegalArgumentException("eglChooseConfig#2 failed");
+ }
+ mGLConfig = chooseConfig(mEGL, mGLDisplay, configs);
+ if (mGLConfig == null) {
+ mGLConfig = configs[0];
+ }
+
+ int[] surfaceAttribs = {
+ EGL10.EGL_WIDTH, 1,
+ EGL10.EGL_HEIGHT, 1,
+ EGL10.EGL_NONE
+ };
+ mGLSurface = mEGL.eglCreatePbufferSurface(mGLDisplay, mGLConfig, surfaceAttribs);
+
+ if (mGLSurface == null) {
+ throw new Exception("Couldn't create new surface");
+ }
+
+ int[] attrib_list = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE};
+ mGLContext = mEGL.eglCreateContext(mGLDisplay, mGLConfig,
+ shareContext, attrib_list);
+
+ if (mGLContext == null) {
+ throw new Exception("Couldn't create new context");
+ }
+
+
+// if (!mEGL.eglMakeCurrent(mGLDisplay, mGLSurface, mGLSurface, mGLContext)) {
+// throw new Exception("Failed to eglMakeCurrent");
+// }
+
+ }
+
+ public EGLContext getEGLContext() {
+ return mGLContext;
+ }
+
+ public EGLDisplay getGLDisplay() {
+ return mGLDisplay;
+ }
+
+ public EGLConfig getGLConfig() {
+ return mGLConfig;
+ }
+
+ public EGLSurface getGLSurface() {
+ return mGLSurface;
+ }
+
+ public EGL10 getEGL() {
+ return mEGL;
+ }
+
+ EGL10 mEGL;
+ EGLDisplay mGLDisplay;
+ EGLConfig mGLConfig;
+ EGLSurface mGLSurface;
+ EGLContext mGLContext;
+
+ int[] mConfigSpec = new int[]{
+ EGL10.EGL_RED_SIZE, mRedSize,
+ EGL10.EGL_GREEN_SIZE, mGreenSize,
+ EGL10.EGL_BLUE_SIZE, mBlueSize,
+ EGL10.EGL_ALPHA_SIZE, mAlphaSize,
+ EGL10.EGL_DEPTH_SIZE, mDepthSize,
+ EGL10.EGL_STENCIL_SIZE, mStencilSize,
+ EGL10.EGL_RENDERABLE_TYPE, mRenderType,//egl版本 2.0
+ EGL10.EGL_NONE};
+
+ public void release() {
+ mEGL.eglMakeCurrent(mGLDisplay, EGL10.EGL_NO_SURFACE,
+ EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT);
+ mEGL.eglDestroySurface(mGLDisplay, mGLSurface);
+ mEGL.eglDestroyContext(mGLDisplay, mGLContext);
+ mEGL.eglTerminate(mGLDisplay);
+
+ LogUtils.i(DEBUG_TAG, "GL Cleaned up");
+ }
+
+ public boolean eglMakeCurrent(){
+ if(mGLContext == EGL10.EGL_NO_CONTEXT){
+ return false;
+ }else{
+ return mEGL.eglMakeCurrent(mGLDisplay, mGLSurface, mGLSurface, mGLContext);
+ }
+ }
+
+ public boolean eglMakeNoCurrent(){
+ return mEGL.eglMakeCurrent(mGLDisplay, EGL10.EGL_NO_SURFACE,
+ EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT);
+ }
+
+ private EGLConfig chooseConfig(EGL10 egl, EGLDisplay display,
+ EGLConfig[] configs) {
+ for (EGLConfig config : configs) {
+ int d = findConfigAttrib(egl, display, config,
+ EGL10.EGL_DEPTH_SIZE, 0);
+ int s = findConfigAttrib(egl, display, config,
+ EGL10.EGL_STENCIL_SIZE, 0);
+ if ((d >= mDepthSize) && (s >= mStencilSize)) {
+ int r = findConfigAttrib(egl, display, config,
+ EGL10.EGL_RED_SIZE, 0);
+ int g = findConfigAttrib(egl, display, config,
+ EGL10.EGL_GREEN_SIZE, 0);
+ int b = findConfigAttrib(egl, display, config,
+ EGL10.EGL_BLUE_SIZE, 0);
+ int a = findConfigAttrib(egl, display, config,
+ EGL10.EGL_ALPHA_SIZE, 0);
+ if ((r == mRedSize) && (g == mGreenSize)
+ && (b == mBlueSize) && (a == mAlphaSize)) {
+ return config;
+ }
+ }
+ }
+ return null;
+ }
+
+ private int findConfigAttrib(EGL10 egl, EGLDisplay display,
+ EGLConfig config, int attribute, int defaultValue) {
+ int[] value = new int[1];
+ if (egl.eglGetConfigAttrib(display, config, attribute, value)) {
+ return value[0];
+ }
+ return defaultValue;
+ }
+}
diff --git a/lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/utils/egl/GLCopyHelper.java b/lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/utils/egl/GLCopyHelper.java
new file mode 100644
index 000000000..b475f39d9
--- /dev/null
+++ b/lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/utils/egl/GLCopyHelper.java
@@ -0,0 +1,84 @@
+/*
+ * MIT License
+ *
+ * Copyright (c) 2023 Agora Community
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+package io.agora.beautyapi.faceunity.utils.egl;
+
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+import android.opengl.GLES30;
+
+public class GLCopyHelper {
+ private final int bufferCount;
+
+ public GLCopyHelper(){
+ this(1);
+ }
+
+ public GLCopyHelper(int bufferCount){
+ this.bufferCount = bufferCount;
+ }
+
+ private int[] mDstFrameBuffer;
+ private int[] mSrcFrameBuffer;
+
+ public void copy2DTextureToOesTexture(
+ int srcTexture,
+ int dstTexture,
+ int width, int height,
+ int index){
+ if(mDstFrameBuffer == null){
+ mDstFrameBuffer = new int[bufferCount];
+ GLES20.glGenFramebuffers(bufferCount, mDstFrameBuffer, 0);
+ }
+
+ if(mSrcFrameBuffer == null){
+ mSrcFrameBuffer = new int[bufferCount];
+ GLES20.glGenFramebuffers(bufferCount, mSrcFrameBuffer, 0);
+ }
+
+ GLES30.glBindFramebuffer(GLES30.GL_READ_FRAMEBUFFER, mSrcFrameBuffer[index]);
+ GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, srcTexture);
+ GLES30.glFramebufferTexture2D(GLES30.GL_READ_FRAMEBUFFER, GLES30.GL_COLOR_ATTACHMENT0, GLES30.GL_TEXTURE_2D, srcTexture, 0);
+ GLES30.glBindFramebuffer(GLES30.GL_DRAW_FRAMEBUFFER, mDstFrameBuffer[index]);
+ GLES30.glFramebufferTexture2D(GLES30.GL_DRAW_FRAMEBUFFER,
+ GLES30.GL_COLOR_ATTACHMENT0, GLES11Ext.GL_TEXTURE_EXTERNAL_OES, dstTexture, 0);
+ GLES30.glBlitFramebuffer(0, 0, width, height, 0, 0, width, height, GLES30.GL_COLOR_BUFFER_BIT, GLES30.GL_LINEAR);
+ GLES30.glBindFramebuffer(GLES30.GL_DRAW_FRAMEBUFFER, 0);
+ GLES30.glBindFramebuffer(GLES30.GL_READ_FRAMEBUFFER, 0);
+ GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, 0);
+ GLES30.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
+ }
+
+ public void release(){
+ if(mDstFrameBuffer != null){
+ GLES20.glDeleteFramebuffers(mDstFrameBuffer.length, mDstFrameBuffer, 0);
+ mDstFrameBuffer = null;
+ }
+
+ if(mSrcFrameBuffer != null){
+ GLES20.glDeleteFramebuffers(mSrcFrameBuffer.length, mSrcFrameBuffer, 0);
+ mSrcFrameBuffer = null;
+ }
+ }
+}
diff --git a/lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/utils/egl/GLFrameBuffer.java b/lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/utils/egl/GLFrameBuffer.java
new file mode 100644
index 000000000..e7588a7e6
--- /dev/null
+++ b/lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/utils/egl/GLFrameBuffer.java
@@ -0,0 +1,204 @@
+package io.agora.beautyapi.faceunity.utils.egl;
+
+import android.graphics.Matrix;
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+
+import io.agora.base.internal.video.EglBase;
+import io.agora.base.internal.video.GlRectDrawer;
+import io.agora.base.internal.video.RendererCommon;
+
+public class GLFrameBuffer {
+
+ private int mFramebufferId = -1;
+ private int mTextureId = -1;
+ private int mWidth, mHeight, mRotation;
+ private boolean isFlipV, isFlipH, isTextureInner, isTextureChanged, isSizeChanged;
+
+ private RendererCommon.GlDrawer drawer;
+
+ private float[] mTexMatrix = GLUtils.IDENTITY_MATRIX;
+
+ public GLFrameBuffer() {
+
+ }
+
+ public boolean setSize(int width, int height) {
+ if (mWidth != width || mHeight != height) {
+ mWidth = width;
+ mHeight = height;
+ isSizeChanged = true;
+ return true;
+ }
+ return false;
+ }
+
+ public void setRotation(int rotation) {
+ if (mRotation != rotation) {
+ mRotation = rotation;
+ }
+ }
+
+ public void setFlipV(boolean flipV) {
+ if (isFlipV != flipV) {
+ isFlipV = flipV;
+ }
+ }
+
+ public void setFlipH(boolean flipH) {
+ if (isFlipH != flipH) {
+ isFlipH = flipH;
+ }
+ }
+
+ public void setTextureId(int textureId){
+ if(mTextureId != textureId){
+ deleteTexture();
+ mTextureId = textureId;
+ isTextureChanged = true;
+ }
+ }
+
+ public int getTextureId(){
+ return mTextureId;
+ }
+
+ public void setTexMatrix(float[] matrix) {
+ if (matrix != null) {
+ mTexMatrix = matrix;
+ } else {
+ mTexMatrix = GLUtils.IDENTITY_MATRIX;
+ }
+ }
+
+ public void resetTransform(){
+ mTexMatrix = GLUtils.IDENTITY_MATRIX;
+ isFlipH = isFlipV = false;
+ mRotation = 0;
+ }
+
+ public int process(int textureId, int textureType) {
+ if (mWidth <= 0 && mHeight <= 0) {
+ throw new RuntimeException("setSize firstly!");
+ }
+
+ if(mTextureId == -1){
+ mTextureId = createTexture(mWidth, mHeight);
+ bindFramebuffer(mTextureId);
+ isTextureInner = true;
+ }else if(isTextureInner && isSizeChanged){
+ GLES20.glDeleteTextures(1, new int[]{mTextureId}, 0);
+ mTextureId = createTexture(mWidth, mHeight);
+ bindFramebuffer(mTextureId);
+ }else if(isTextureChanged){
+ bindFramebuffer(mTextureId);
+ }
+ isTextureChanged = false;
+ isSizeChanged = false;
+
+ if(drawer == null){
+ drawer = new GlRectDrawer();
+ }
+
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, mFramebufferId);
+ GLUtils.checkGlError("glBindFramebuffer");
+
+ Matrix transform = RendererCommon.convertMatrixToAndroidGraphicsMatrix(mTexMatrix);
+ transform.preTranslate(0.5f, 0.5f);
+ transform.preRotate(mRotation, 0.f, 0.f);
+ transform.preScale(
+ isFlipH ? -1.f: 1.f,
+ isFlipV ? -1.f: 1.f
+ );
+ transform.preTranslate(-0.5f, -0.5f);
+ float[] matrix = RendererCommon.convertMatrixFromAndroidGraphicsMatrix(transform);
+
+ synchronized (EglBase.lock){
+ if(textureType == GLES11Ext.GL_TEXTURE_EXTERNAL_OES){
+ drawer.drawOes(textureId, matrix, mWidth, mHeight, 0, 0, mWidth, mHeight);
+ }else{
+ drawer.drawRgb(textureId, matrix, mWidth, mHeight, 0, 0, mWidth, mHeight);
+ }
+ }
+
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, GLES20.GL_NONE);
+ GLES20.glFinish();
+
+ return mTextureId;
+ }
+
+ public void release(){
+ deleteTexture();
+ deleteFramebuffer();
+
+ if(drawer != null){
+ drawer.release();
+ drawer = null;
+ }
+ }
+
+
+ private void deleteFramebuffer() {
+ if (mFramebufferId != -1) {
+ GLES20.glDeleteFramebuffers(1, new int[]{mFramebufferId}, 0);
+ mFramebufferId = -1;
+ }
+ }
+
+ public int createTexture(int width, int height){
+ int[] textures = new int[1];
+ GLES20.glGenTextures(1, textures, 0);
+ GLUtils.checkGlError("glGenTextures");
+ int textureId = textures[0];
+
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
+ GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0,
+ GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
+
+ GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
+ GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
+ GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
+ GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
+ GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
+ GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
+ GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
+ GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
+
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, GLES20.GL_NONE);
+
+ return textureId;
+ }
+
+ public void resizeTexture(int textureId, int width, int height) {
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
+ GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0,
+ GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, GLES20.GL_NONE);
+ }
+
+ private void deleteTexture() {
+ if (isTextureInner && mTextureId != -1) {
+ GLES20.glDeleteTextures(1, new int[]{mTextureId}, 0);
+ }
+ isTextureInner = false;
+ mTextureId = -1;
+ }
+
+ private void bindFramebuffer(int textureId) {
+ if(mFramebufferId == -1){
+ int[] framebuffers = new int[1];
+ GLES20.glGenFramebuffers(1, framebuffers, 0);
+ GLUtils.checkGlError("glGenFramebuffers");
+ mFramebufferId = framebuffers[0];
+ }
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, mFramebufferId);
+ GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER,
+ GLES20.GL_COLOR_ATTACHMENT0,
+ GLES20.GL_TEXTURE_2D,
+ textureId, 0);
+
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, GLES20.GL_NONE);
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, GLES20.GL_NONE);
+ }
+
+}
diff --git a/lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/utils/egl/GLTextureBufferQueue.kt b/lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/utils/egl/GLTextureBufferQueue.kt
new file mode 100644
index 000000000..c8d193f8f
--- /dev/null
+++ b/lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/utils/egl/GLTextureBufferQueue.kt
@@ -0,0 +1,180 @@
+/*
+ * MIT License
+ *
+ * Copyright (c) 2023 Agora Community
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+package io.agora.beautyapi.faceunity.utils.egl
+
+import android.opengl.GLES20
+import android.util.Log
+import android.util.Size
+import java.util.concurrent.ConcurrentLinkedQueue
+
+class GLTextureBufferQueue(
+ private val glFrameBuffer: GLFrameBuffer = GLFrameBuffer(),
+ private val cacheCount: Int = 6,
+ private val loggable: Boolean = false
+) {
+ private val TAG = "GLTextureBufferQueue"
+
+ private var cacheIndex = 0
+ private val cacheTextureOuts = arrayOfNulls(cacheCount)
+ private val textureIdQueue = ConcurrentLinkedQueue()
+
+
+ fun enqueue(iN: TextureIn): Int {
+ var size = textureIdQueue.size
+ if (size < cacheCount) {
+ var out = cacheTextureOuts[cacheIndex]
+ val outSize = when (iN.rotation) {
+ 90, 270 -> Size(iN.height, iN.width)
+ else -> Size(iN.width, iN.height)
+ }
+
+ if (out == null) {
+ val textureId = glFrameBuffer.createTexture(outSize.width, outSize.height)
+ out = TextureOut(
+ 0,
+ textureId,
+ GLES20.GL_TEXTURE_2D,
+ outSize.width,
+ outSize.height,
+ iN.isFrontCamera,
+ iN.isMirror,
+ )
+ cacheTextureOuts[cacheIndex] = out
+ } else if (out.width != outSize.width || out.height != outSize.height) {
+ glFrameBuffer.resizeTexture(out.textureId, outSize.width, outSize.height)
+ out = TextureOut(
+ 0,
+ out.textureId,
+ out.textureType,
+ outSize.width,
+ outSize.height,
+ iN.isFrontCamera,
+ iN.isMirror,
+ )
+ cacheTextureOuts[cacheIndex] = out
+ } else if(out.isFrontCamera != iN.isFrontCamera){
+ out = TextureOut(
+ 0,
+ out.textureId,
+ out.textureType,
+ out.width,
+ out.height,
+ iN.isFrontCamera,
+ iN.isMirror,
+ )
+ cacheTextureOuts[cacheIndex] = out
+ }
+
+ glFrameBuffer.textureId = out.textureId
+ glFrameBuffer.setSize(out.width, out.height)
+ glFrameBuffer.resetTransform()
+ glFrameBuffer.setRotation(iN.rotation)
+ if (iN.transform != null) {
+ glFrameBuffer.setTexMatrix(iN.transform)
+ var flipH = iN.isFrontCamera
+ if(iN.isMirror){
+ flipH = !flipH
+ }
+ glFrameBuffer.setFlipH(flipH)
+ } else {
+ var flipH = !iN.isFrontCamera
+ if(iN.isMirror){
+ flipH = !flipH
+ }
+ glFrameBuffer.setFlipH(flipH)
+ }
+ glFrameBuffer.setFlipV(iN.flipV)
+ glFrameBuffer.process(iN.textureId, iN.textureType)
+ out.index = cacheIndex
+ out.tag = iN.tag
+ textureIdQueue.offer(out)
+ if(loggable){
+ Log.d(TAG, "TextureIdQueue enqueue index=$cacheIndex, size=$size")
+ }
+ cacheIndex = (cacheIndex + 1) % cacheCount
+ size++
+
+ } else {
+ if(loggable){
+ Log.e(TAG, "TextureIdQueue is full!!")
+ }
+ }
+
+ return size
+ }
+
+ fun dequeue(remove: Boolean = true): TextureOut? {
+ val size = textureIdQueue.size
+ val poll = if(remove){
+ textureIdQueue.poll()
+ }else{
+ textureIdQueue.peek()
+ }
+ if(loggable){
+ Log.d(TAG, "TextureIdQueue dequeue index=${poll?.index}, size=$size")
+ }
+ return poll
+ }
+
+ fun reset() {
+ cacheIndex = 0
+ textureIdQueue.clear()
+ }
+
+ fun release() {
+ cacheIndex = 0
+ cacheTextureOuts.forEachIndexed { index, textureOut ->
+ if (textureOut != null) {
+ GLES20.glDeleteTextures(1, intArrayOf(textureOut.textureId), 0)
+ cacheTextureOuts[index] = null
+ }
+ }
+ textureIdQueue.clear()
+ glFrameBuffer.release()
+ }
+
+ data class TextureIn(
+ val textureId: Int,
+ val textureType: Int,
+ val width: Int,
+ val height: Int,
+ val rotation: Int,
+ val flipV: Boolean,
+ val isFrontCamera: Boolean,
+ val isMirror: Boolean,
+ val transform: FloatArray?,
+ val tag: Any? = null
+ )
+
+ data class TextureOut(
+ var index: Int = 0,
+ val textureId: Int,
+ val textureType: Int,
+ val width: Int,
+ val height: Int,
+ val isFrontCamera: Boolean,
+ var tag: Any? = null
+ )
+}
\ No newline at end of file
diff --git a/lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/utils/egl/GLUtils.java b/lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/utils/egl/GLUtils.java
new file mode 100644
index 000000000..e56f743ec
--- /dev/null
+++ b/lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/utils/egl/GLUtils.java
@@ -0,0 +1,279 @@
+/*
+ * MIT License
+ *
+ * Copyright (c) 2023 Agora Community
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+package io.agora.beautyapi.faceunity.utils.egl;
+
+import android.graphics.Bitmap;
+import android.graphics.BitmapFactory;
+import android.graphics.ImageFormat;
+import android.graphics.Rect;
+import android.graphics.YuvImage;
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+import android.opengl.Matrix;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.IntBuffer;
+import java.util.Objects;
+
+import javax.microedition.khronos.egl.EGL10;
+import javax.microedition.khronos.egl.EGLContext;
+
+import io.agora.beautyapi.faceunity.utils.LogUtils;
+
+public class GLUtils {
+ private static final String TAG = "GLUtils";
+ public static final float[] IDENTITY_MATRIX = new float[16];
+
+ static {
+ Matrix.setIdentityM(IDENTITY_MATRIX, 0);
+ }
+
+ private GLUtils() {
+ }
+
+ public static Bitmap getTexture2DImage(int textureID, int width, int height) {
+ try {
+ int[] oldFboId = new int[1];
+ GLES20.glGetIntegerv(GLES20.GL_FRAMEBUFFER_BINDING, IntBuffer.wrap(oldFboId));
+
+ int[] framebuffers = new int[1];
+ GLES20.glGenFramebuffers(1, framebuffers, 0);
+ int framebufferId = framebuffers[0];
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, framebufferId);
+
+ int[] renderbuffers = new int[1];
+ GLES20.glGenRenderbuffers(1, renderbuffers, 0);
+ int renderId = renderbuffers[0];
+ GLES20.glBindRenderbuffer(GLES20.GL_RENDERBUFFER, renderId);
+ GLES20.glRenderbufferStorage(GLES20.GL_RENDERBUFFER, GLES20.GL_DEPTH_COMPONENT16, width, height);
+
+ GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, textureID, 0);
+ GLES20.glFramebufferRenderbuffer(GLES20.GL_FRAMEBUFFER, GLES20.GL_DEPTH_ATTACHMENT, GLES20.GL_RENDERBUFFER, renderId);
+ if (GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER) != GLES20.GL_FRAMEBUFFER_COMPLETE) {
+ LogUtils.e(TAG, "Framebuffer error");
+ }
+
+ ByteBuffer rgbaBuf = ByteBuffer.allocateDirect(width * height * 4);
+ rgbaBuf.position(0);
+ GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaBuf);
+
+ Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
+ bitmap.copyPixelsFromBuffer(rgbaBuf);
+
+ GLES20.glDeleteRenderbuffers(1, IntBuffer.wrap(framebuffers));
+ GLES20.glDeleteFramebuffers(1, IntBuffer.allocate(framebufferId));
+
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, oldFboId[0]);
+
+ return bitmap;
+ } catch (Exception e) {
+ LogUtils.e(TAG, e.toString());
+ }
+ return null;
+ }
+
+ public static Bitmap getTextureOESImage(int textureID, int width, int height) {
+ try {
+ int[] oldFboId = new int[1];
+ GLES20.glGetIntegerv(GLES20.GL_FRAMEBUFFER_BINDING, IntBuffer.wrap(oldFboId));
+
+ int[] framebuffers = new int[1];
+ GLES20.glGenFramebuffers(1, framebuffers, 0);
+ int framebufferId = framebuffers[0];
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, framebufferId);
+
+ int[] renderbuffers = new int[1];
+ GLES20.glGenRenderbuffers(1, renderbuffers, 0);
+ int renderId = renderbuffers[0];
+ GLES20.glBindRenderbuffer(GLES20.GL_RENDERBUFFER, renderId);
+ GLES20.glRenderbufferStorage(GLES20.GL_RENDERBUFFER, GLES20.GL_DEPTH_COMPONENT16, width, height);
+
+ GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureID, 0);
+ GLES20.glFramebufferRenderbuffer(GLES20.GL_FRAMEBUFFER, GLES20.GL_DEPTH_ATTACHMENT, GLES20.GL_RENDERBUFFER, renderId);
+ if (GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER) != GLES20.GL_FRAMEBUFFER_COMPLETE) {
+ LogUtils.e(TAG, "Framebuffer error");
+ }
+
+ ByteBuffer rgbaBuf = ByteBuffer.allocateDirect(width * height * 4);
+ rgbaBuf.position(0);
+ GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaBuf);
+
+ Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
+ bitmap.copyPixelsFromBuffer(rgbaBuf);
+
+ GLES20.glDeleteRenderbuffers(1, IntBuffer.wrap(framebuffers));
+ GLES20.glDeleteFramebuffers(1, IntBuffer.allocate(framebufferId));
+
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, oldFboId[0]);
+
+ return bitmap;
+ } catch (Exception e) {
+ LogUtils.e(TAG, e.toString());
+ }
+ return null;
+ }
+
+ public static Bitmap nv21ToBitmap(byte[] nv21, int width, int height) {
+ Bitmap bitmap = null;
+ try {
+ YuvImage image = new YuvImage(nv21, ImageFormat.NV21, width, height, null);
+ ByteArrayOutputStream stream = new ByteArrayOutputStream();
+ image.compressToJpeg(new Rect(0, 0, width, height), 80, stream);
+ bitmap = BitmapFactory.decodeByteArray(stream.toByteArray(), 0, stream.size());
+ stream.close();
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ return bitmap;
+ }
+
+ private static Bitmap readBitmap(int width, int height) {
+ ByteBuffer rgbaBuf = ByteBuffer.allocateDirect(width * height * 4);
+ rgbaBuf.position(0);
+ GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaBuf);
+
+ Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
+ bitmap.copyPixelsFromBuffer(rgbaBuf);
+ return bitmap;
+ }
+
+ public static float[] createTransformMatrix(int rotation, boolean flipH, boolean flipV) {
+ float[] renderMVPMatrix = new float[16];
+ float[] tmp = new float[16];
+ Matrix.setIdentityM(tmp, 0);
+
+ boolean _flipH = flipH;
+ boolean _flipV = flipV;
+ if (rotation % 180 != 0) {
+ _flipH = flipV;
+ _flipV = flipH;
+ }
+
+ if (_flipH) {
+ Matrix.rotateM(tmp, 0, tmp, 0, 180, 0, 1f, 0);
+ }
+ if (_flipV) {
+ Matrix.rotateM(tmp, 0, tmp, 0, 180, 1f, 0f, 0);
+ }
+
+ float _rotation = rotation;
+ if (_rotation != 0) {
+ if (_flipH != _flipV) {
+ _rotation *= -1;
+ }
+ Matrix.rotateM(tmp, 0, tmp, 0, _rotation, 0, 0, 1);
+ }
+
+ Matrix.setIdentityM(renderMVPMatrix, 0);
+ Matrix.multiplyMM(renderMVPMatrix, 0, tmp, 0, renderMVPMatrix, 0);
+ return renderMVPMatrix;
+ }
+
+ public static EGLContext getCurrGLContext() {
+ EGL10 egl = (EGL10) javax.microedition.khronos.egl.EGLContext.getEGL();
+ if (egl != null && !Objects.equals(egl.eglGetCurrentContext(), EGL10.EGL_NO_CONTEXT)) {
+ return egl.eglGetCurrentContext();
+ }
+ return null;
+ }
+
+ public static void checkGlError(String op) {
+ int error = GLES20.glGetError();
+ if (error != GLES20.GL_NO_ERROR) {
+ String msg = op + ": glError 0x" + Integer.toHexString(error);
+ LogUtils.e(TAG, msg);
+ throw new RuntimeException(msg);
+ }
+ }
+
+ public static int createProgram(String vertexSource, String fragmentSource) {
+ int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
+ if (vertexShader == 0) {
+ return 0;
+ }
+ int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
+ if (pixelShader == 0) {
+ return 0;
+ }
+ int program = GLES20.glCreateProgram();
+ checkGlError("glCreateProgram");
+ if (program == 0) {
+ LogUtils.e(TAG, "Could not create program");
+ }
+ GLES20.glAttachShader(program, vertexShader);
+ checkGlError("glAttachShader");
+ GLES20.glAttachShader(program, pixelShader);
+ checkGlError("glAttachShader");
+ GLES20.glLinkProgram(program);
+ int[] linkStatus = new int[1];
+ GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
+ if (linkStatus[0] != GLES20.GL_TRUE) {
+ LogUtils.e(TAG, "Could not link program: ");
+ LogUtils.e(TAG, GLES20.glGetProgramInfoLog(program));
+ GLES20.glDeleteProgram(program);
+ program = 0;
+ }
+ return program;
+ }
+
+ public static int loadShader(int shaderType, String source) {
+ int shader = GLES20.glCreateShader(shaderType);
+ checkGlError("glCreateShader type=" + shaderType);
+ GLES20.glShaderSource(shader, source);
+ GLES20.glCompileShader(shader);
+ int[] compiled = new int[1];
+ GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
+ if (compiled[0] == 0) {
+ LogUtils.e(TAG, "Could not compile shader " + shaderType + ":");
+ LogUtils.e(TAG, " " + GLES20.glGetShaderInfoLog(shader));
+ GLES20.glDeleteShader(shader);
+ shader = 0;
+ }
+ return shader;
+ }
+
+ public static int createTexture(int textureTarget, Bitmap bitmap, int minFilter,
+ int magFilter, int wrapS, int wrapT) {
+ int[] textureHandle = new int[1];
+
+ GLES20.glGenTextures(1, textureHandle, 0);
+ checkGlError("glGenTextures");
+ GLES20.glBindTexture(textureTarget, textureHandle[0]);
+ checkGlError("glBindTexture " + textureHandle[0]);
+ GLES20.glTexParameterf(textureTarget, GLES20.GL_TEXTURE_MIN_FILTER, minFilter);
+ GLES20.glTexParameterf(textureTarget, GLES20.GL_TEXTURE_MAG_FILTER, magFilter); //线性插值
+ GLES20.glTexParameteri(textureTarget, GLES20.GL_TEXTURE_WRAP_S, wrapS);
+ GLES20.glTexParameteri(textureTarget, GLES20.GL_TEXTURE_WRAP_T, wrapT);
+
+ if (bitmap != null) {
+ android.opengl.GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);
+ }
+
+ checkGlError("glTexParameter");
+ return textureHandle[0];
+ }
+}
diff --git a/lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/utils/egl/TextureProcessHelper.kt b/lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/utils/egl/TextureProcessHelper.kt
new file mode 100644
index 000000000..1451750b4
--- /dev/null
+++ b/lib_faceunity/src/main/java/io/agora/beautyapi/faceunity/utils/egl/TextureProcessHelper.kt
@@ -0,0 +1,214 @@
+/*
+ * MIT License
+ *
+ * Copyright (c) 2023 Agora Community
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+package io.agora.beautyapi.faceunity.utils.egl
+
+import android.opengl.GLES20
+import io.agora.beautyapi.faceunity.utils.LogUtils
+import java.util.concurrent.Callable
+import java.util.concurrent.ConcurrentLinkedQueue
+import java.util.concurrent.CountDownLatch
+import java.util.concurrent.Executors
+import java.util.concurrent.Future
+import javax.microedition.khronos.egl.EGLContext
+
+class TextureProcessHelper(
+ private val cacheCount: Int = 2
+) {
+ private val TAG = "TextureProcessHelper"
+ private val glTextureBufferQueueIn = GLTextureBufferQueue(cacheCount = cacheCount, loggable = true)
+ private val glTextureBufferQueueOut = GLTextureBufferQueue(cacheCount = cacheCount, loggable = false)
+ private val glFrameBuffer = GLFrameBuffer()
+ private val futureQueue = ConcurrentLinkedQueue>()
+ private val workerThread = Executors.newSingleThreadExecutor()
+ private val eglContextHelper =
+ EGLContextHelper()
+ private var eglContextBase: EGLContext? = null
+ private var isReleased = false
+ private var filter: ((GLTextureBufferQueue.TextureOut) -> Int)? = null
+ private var isBegin = false
+ private var frameIndex = 0
+
+ fun setFilter(filter: (GLTextureBufferQueue.TextureOut) -> Int) {
+ this.filter = filter
+ }
+
+ fun process(
+ texId: Int, texType: Int,
+ width: Int, height: Int, rotation: Int,
+ transform: FloatArray,
+ isFrontCamera: Boolean,
+ isMirror: Boolean
+ ): Int {
+ if (isReleased) {
+ return -1
+ }
+ val currGLContext = GLUtils.getCurrGLContext() ?: return -1
+
+ if (eglContextBase == null) {
+ eglContextBase = currGLContext
+ executeSync {
+ eglContextHelper.initEGL(eglContextBase)
+ eglContextHelper.eglMakeCurrent()
+ }
+ } else if (eglContextBase != currGLContext) {
+ eglContextBase = currGLContext
+ executeSync {
+ eglContextHelper.release()
+ eglContextHelper.initEGL(eglContextBase)
+ eglContextHelper.eglMakeCurrent()
+ }
+ }
+
+ glTextureBufferQueueIn.enqueue(
+ GLTextureBufferQueue.TextureIn(
+ texId,
+ texType,
+ width,
+ height,
+ rotation,
+ false,
+ isFrontCamera,
+ isMirror,
+ transform,
+ frameIndex
+ )
+ )
+ frameIndex ++
+
+ if (isReleased) {
+ return -1
+ }
+
+ futureQueue.offer(workerThread.submit(Callable {
+ if (isReleased) {
+ return@Callable -2
+ }
+
+ val frame = glTextureBufferQueueIn.dequeue(false) ?: return@Callable -2
+ val filterTexId = filter?.invoke(frame) ?: -1
+ if (filterTexId >= 0) {
+ glTextureBufferQueueOut.enqueue(
+ GLTextureBufferQueue.TextureIn(
+ filterTexId,
+ GLES20.GL_TEXTURE_2D,
+ frame.width,
+ frame.height,
+ 0,
+ false,
+ false,
+ true,
+ null,
+ frame.tag
+ )
+ )
+ } else {
+ glTextureBufferQueueOut.enqueue(
+ GLTextureBufferQueue.TextureIn(
+ frame.textureId,
+ frame.textureType,
+ frame.width,
+ frame.height,
+ 0,
+ false,
+ false,
+ true,
+ null,
+ frame.tag
+ )
+ )
+ }
+ glTextureBufferQueueIn.dequeue(true)
+ return@Callable 0
+ }))
+
+ var ret = 0
+ if (isBegin || futureQueue.size >= cacheCount) {
+ isBegin = true
+ try {
+ val get = futureQueue.poll()?.get() ?: -1
+ if (get == 0) {
+ val dequeue = glTextureBufferQueueOut.dequeue() ?: return -1
+ glFrameBuffer.setSize(dequeue.width, dequeue.height)
+ ret = glFrameBuffer.process(dequeue.textureId, dequeue.textureType)
+ }
+ }catch (e: Exception){
+ LogUtils.e(TAG, "process end with exception: $e")
+ }
+ }
+
+ return ret
+ }
+
+ fun reset(){
+ if(frameIndex == 0){
+ return
+ }
+ isBegin = false
+ frameIndex = 0
+ var future = futureQueue.poll()
+ while (future != null) {
+ future.cancel(true)
+ future = futureQueue.poll()
+ }
+ glTextureBufferQueueIn.reset()
+// glFrameBuffer.release()
+ executeSync {
+ glTextureBufferQueueOut.reset()
+ }
+ }
+
+ fun size() = futureQueue.size
+
+ fun release() {
+ isReleased = true
+ filter = null
+ isBegin = false
+ frameIndex = 0
+ var future = futureQueue.poll()
+ while (future != null) {
+ future.cancel(true)
+ future = futureQueue.poll()
+ }
+ glTextureBufferQueueIn.release()
+ glFrameBuffer.release()
+ executeSync {
+ glTextureBufferQueueOut.release()
+ if (eglContextBase != null) {
+ eglContextHelper.release()
+ eglContextBase = null
+ }
+ }
+ workerThread.shutdown()
+ }
+
+ fun executeSync(run: () -> Unit) {
+ val latch = CountDownLatch(1)
+ workerThread.execute {
+ run.invoke()
+ latch.countDown()
+ }
+ latch.await()
+ }
+}
\ No newline at end of file
diff --git a/live/build.gradle b/live/build.gradle
index b9a209670..6f1d3dc42 100644
--- a/live/build.gradle
+++ b/live/build.gradle
@@ -1 +1 @@
-apply plugin: 'com.android.library'
apply plugin: 'img-optimizer'
apply plugin: 'kotlin-android'
android {
compileSdkVersion rootProject.ext.android.compileSdkVersion
buildToolsVersion rootProject.ext.android.buildToolsVersion
aaptOptions.cruncherEnabled = false
aaptOptions.useNewCruncher = false
packagingOptions {
pickFirst "lib/armeabi/libyuvutils.so"
pickFirst "lib/arm64-v8a/libyuvutils.so"
pickFirst "lib/armeabi-v7a/libyuvutils.so"
pickFirst "lib/armeabi/libyuvtools.so"
pickFirst "lib/arm64-v8a/libyuvtools.so"
pickFirst "lib/armeabi-v7a/libyuvtools.so"
exclude "lib/arm64-v8a/libmmcv_api_handgesture.so"
exclude "lib/arm64-v8a/libmmcv_api_express.so"
exclude "lib/arm64-v8a/libMediaEncoder.so"
exclude "lib/arm64-v8a/libarcore_sdk_c.so"
exclude "lib/arm64-v8a/libmediadecoder.so"
exclude "lib/arm64-v8a/libMediaMuxer.so"
exclude "lib/arm64-v8a/libarcore_sdk_jni.so"
exclude "lib/arm64-v8a/libMediaUtils.so"
exclude "lib/arm64-v8a/libcosmosffmpeg.so"
}
defaultConfig {
minSdkVersion rootProject.ext.android.minSdkVersion
targetSdkVersion rootProject.ext.android.targetSdkVersion
versionCode rootProject.ext.android.versionCode
versionName rootProject.ext.android.versionName
manifestPlaceholders = rootProject.ext.manifestPlaceholders
ndk {
abiFilters "armeabi-v7a", "arm64-v8a"
}
javaCompileOptions {
annotationProcessorOptions {
arguments = [AROUTER_MODULE_NAME: project.getName()]
}
}
}
aaptOptions {
cruncherEnabled = false
useNewCruncher = false
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
}
kotlinOptions {
allWarningsAsErrors = true
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
}
repositories {
flatDir {
dirs 'libs', '../libs'
}
mavenCentral()
}
dependencies {
implementation 'androidx.constraintlayout:constraintlayout:2.0.0'
implementation fileTree(dir: 'libs', include: ['*.jar', '*.aar'])
implementation (name:'../libs/beautysdk-202202241203',ext:'aar')
implementation (name:'../libs/svgaplayer-release-v1.2.1',ext:'aar')
//socket.io
implementation('io.socket:socket.io-client:1.0.0') {
exclude group: 'org.json', module: 'json'
}
//common
api project(path: ':common')
api project(path:':FaceUnity')//新娱美颜
api project(':Share')//分享
annotationProcessor rootProject.ext.dependencies["arouter-compiler"]
//工具
api rootProject.ext.dependencies["blank-utilcode"]
implementation 'com.eightbitlab:blurview:1.6.6'
implementation 'com.google.code.gson:gson:2.8.6'
implementation "com.getkeepsafe.relinker:relinker:1.4.4"
//ExoPlayer,腾讯的播放器不支持无缝切换
implementation 'com.google.android.exoplayer:exoplayer:2.18.2'
implementation 'com.google.android.exoplayer:exoplayer-core:2.18.2@aar'
}
\ No newline at end of file
+apply plugin: 'com.android.library'
apply plugin: 'img-optimizer'
apply plugin: 'kotlin-android'
android {
compileSdkVersion rootProject.ext.android.compileSdkVersion
buildToolsVersion rootProject.ext.android.buildToolsVersion
aaptOptions.cruncherEnabled = false
aaptOptions.useNewCruncher = false
packagingOptions {
pickFirst "lib/armeabi/libyuvutils.so"
pickFirst "lib/arm64-v8a/libyuvutils.so"
pickFirst "lib/armeabi-v7a/libyuvutils.so"
pickFirst "lib/armeabi/libyuvtools.so"
pickFirst "lib/arm64-v8a/libyuvtools.so"
pickFirst "lib/armeabi-v7a/libyuvtools.so"
exclude "lib/arm64-v8a/libmmcv_api_handgesture.so"
exclude "lib/arm64-v8a/libmmcv_api_express.so"
exclude "lib/arm64-v8a/libMediaEncoder.so"
exclude "lib/arm64-v8a/libarcore_sdk_c.so"
exclude "lib/arm64-v8a/libmediadecoder.so"
exclude "lib/arm64-v8a/libMediaMuxer.so"
exclude "lib/arm64-v8a/libarcore_sdk_jni.so"
exclude "lib/arm64-v8a/libMediaUtils.so"
exclude "lib/arm64-v8a/libcosmosffmpeg.so"
}
defaultConfig {
minSdkVersion rootProject.ext.android.minSdkVersion
targetSdkVersion rootProject.ext.android.targetSdkVersion
versionCode rootProject.ext.android.versionCode
versionName rootProject.ext.android.versionName
manifestPlaceholders = rootProject.ext.manifestPlaceholders
ndk {
abiFilters "armeabi-v7a", "arm64-v8a"
}
javaCompileOptions {
annotationProcessorOptions {
arguments = [AROUTER_MODULE_NAME: project.getName()]
}
}
}
aaptOptions {
cruncherEnabled = false
useNewCruncher = false
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
}
kotlinOptions {
allWarningsAsErrors = true
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
}
repositories {
flatDir {
dirs 'libs', '../libs'
}
mavenCentral()
}
dependencies {
implementation 'androidx.constraintlayout:constraintlayout:2.0.0'
implementation fileTree(dir: 'libs', include: ['*.jar', '*.aar'])
implementation (name:'../libs/beautysdk-202202241203',ext:'aar')
implementation (name:'../libs/svgaplayer-release-v1.2.1',ext:'aar')
//socket.io
implementation('io.socket:socket.io-client:1.0.0') {
exclude group: 'org.json', module: 'json'
}
//common
api project(path:':lib_faceunity')//新娱美颜
api project(':Share')//分享
annotationProcessor rootProject.ext.dependencies["arouter-compiler"]
//工具
api rootProject.ext.dependencies["blank-utilcode"]
implementation 'com.eightbitlab:blurview:1.6.6'
implementation 'com.google.code.gson:gson:2.8.6'
implementation "com.getkeepsafe.relinker:relinker:1.4.4"
//ExoPlayer,腾讯的播放器不支持无缝切换
implementation 'com.google.android.exoplayer:exoplayer:2.18.2'
implementation 'com.google.android.exoplayer:exoplayer-core:2.18.2@aar'
}
\ No newline at end of file
diff --git a/live/src/main/java/com/yunbao/live/activity/LiveRyAnchorActivity.java b/live/src/main/java/com/yunbao/live/activity/LiveRyAnchorActivity.java
index 6154ec5ad..a8a7db008 100644
--- a/live/src/main/java/com/yunbao/live/activity/LiveRyAnchorActivity.java
+++ b/live/src/main/java/com/yunbao/live/activity/LiveRyAnchorActivity.java
@@ -272,6 +272,13 @@ public class LiveRyAnchorActivity extends LiveActivity implements LiveFunctionCl
RandomPkManager.getInstance().addOnRandomPkTimer(onRandomPkTimer);
+ manager.setOnMirrorChanged(new FaceManager.OnMirrorChanged() {
+ @Override
+ public void onChange(boolean falg) {
+ mLivePushViewHolder.setEnableBeauty(falg);
+ }
+ });
+
//添加开播前设置控件
mLiveReadyViewHolder = new LiveNewReadyRyViewHolder(mContext, mContainer, mLiveSDK);
mLiveReadyViewHolder.setManager(manager);
@@ -1476,7 +1483,7 @@ public class LiveRyAnchorActivity extends LiveActivity implements LiveFunctionCl
@Override
public void onLinkMicToPk(String uid, String pkhead, String pkname) {
- L.eSw("onLinkMicToPkonLinkMicToPkonLinkMicToPk");
+
}
@Override
diff --git a/live/src/main/java/com/yunbao/live/dialog/LiveFaceUnityDialogNewFragment.java b/live/src/main/java/com/yunbao/live/dialog/LiveFaceUnityDialogNewFragment.java
new file mode 100644
index 000000000..6e3f25227
--- /dev/null
+++ b/live/src/main/java/com/yunbao/live/dialog/LiveFaceUnityDialogNewFragment.java
@@ -0,0 +1,87 @@
+package com.yunbao.live.dialog;
+
+import android.app.ActionBar;
+import android.content.Context;
+import android.content.DialogInterface;
+import android.os.Bundle;
+import android.view.Gravity;
+import android.view.View;
+import android.view.Window;
+import android.view.WindowManager;
+
+import androidx.annotation.Nullable;
+
+import com.yunbao.common.dialog.AbsDialogFragment;
+import com.yunbao.faceunity.FaceManager;
+import com.yunbao.faceunity.ui.FaceUnityView;
+import com.yunbao.live.R;
+
+/**
+ * 新娱美颜SDK,暂时不用,不要删除
+ */
+public class LiveFaceUnityDialogNewFragment extends AbsDialogFragment {
+ private Context mContext;
+ private FaceUnityView faceView;
+ private FaceManager manager;
+ private View mRootView;
+
+ public LiveFaceUnityDialogNewFragment(Context mContext) {
+ this.mContext = mContext;
+ }
+
+ @Override
+ protected int getLayoutId() {
+ return 0;
+ }
+
+ @Override
+ protected View getLayoutView() {
+ faceView = new FaceUnityView(mContext);
+ return faceView;
+ }
+
+ @Override
+ protected int getDialogStyle() {
+ return R.style.dialog4;
+ }
+
+ @Override
+ protected boolean canCancel() {
+ return true;
+ }
+
+ @Override
+ protected void setWindowAttributes(Window window) {
+
+ WindowManager.LayoutParams params = window.getAttributes();
+ params.width = ActionBar.LayoutParams.MATCH_PARENT;
+ params.height = ActionBar.LayoutParams.WRAP_CONTENT;
+ params.gravity = Gravity.BOTTOM;
+ window.setAttributes(params);
+
+ }
+
+ @Override
+ public void onActivityCreated(@Nullable Bundle savedInstanceState) {
+ super.onActivityCreated(savedInstanceState);
+ manager.setFaceUnityView(faceView);
+ manager.loadConfig();
+ }
+
+ @Override
+ public void onDismiss(DialogInterface dialog) {
+ super.onDismiss(dialog);
+ if(mRootView!=null) {
+ mRootView.setVisibility(View.VISIBLE);
+ }
+ }
+
+ public void setManager(FaceManager manager) {
+ this.manager = manager;
+ }
+
+
+ public void setDismissShowUi(View mRootView) {
+ this.mRootView=mRootView;
+ }
+}
diff --git a/live/src/main/java/com/yunbao/live/presenter/LiveRyLinkMicPkPresenter.java b/live/src/main/java/com/yunbao/live/presenter/LiveRyLinkMicPkPresenter.java
index 2fd16e034..cd382ddeb 100644
--- a/live/src/main/java/com/yunbao/live/presenter/LiveRyLinkMicPkPresenter.java
+++ b/live/src/main/java/com/yunbao/live/presenter/LiveRyLinkMicPkPresenter.java
@@ -68,6 +68,7 @@ import com.yunbao.live.socket.SocketRyClient;
import com.yunbao.live.socket.SocketRyLinkMicPkUtil;
import com.yunbao.live.socket.SocketSendBean;
import com.yunbao.live.views.LiveLinkMicPkViewHolder;
+import com.yunbao.live.views.LivePlayRyViewHolder;
import com.yunbao.live.views.LivePushRyViewHolder;
import org.greenrobot.eventbus.EventBus;
@@ -685,112 +686,10 @@ public class LiveRyLinkMicPkPresenter implements View.OnClickListener {
EventBus.getDefault().post(new AnchorInfoEvent(false, u.getId(), u.getUserNiceName(), u.getAvatar()));
Log.e("eve", u.getId() + "");
L.eSw("主播接受了主播的PK邀請");
- /**
- * 加入副房间
- * 前提必须已经 通过 {@link RCRTCEngine#joinRoom(String, RCRTCRoomType, IRCRTCResultDataCallback)} 或 {@link RCRTCEngine#joinRoom(String, IRCRTCResultDataCallback)} 加入了主房间
- *
- * @param roomId 房间 ID ,长度 64 个字符,可包含:`A-Z`、`a-z`、`0-9`、`+`、`=`、`-`、`_`
- * @param callBack 加入房间回调
- * @group 房间管理
- */
RandomPkManager.getInstance().setPkStatus(RandomPkManager.PK_STATUS_START);
- /*RCRTCEngine.getInstance().joinOtherRoom(u.getId(), new IRCRTCResultDataCallback() {
- @Override
- public void onSuccess(RCRTCOtherRoom rcrtcOtherRoom) {
- rcrtcOtherRoom.registerOtherRoomEventsListener(otherRoomEventsListener);
- new Handler(Looper.getMainLooper()).post(new Runnable() {
- public void run() {
- //遍历远端用户列表
- for (int i = 0; i < rcrtcOtherRoom.getRemoteUsers().size(); i++) {
- //遍历远端用户发布的资源列表
- for (RCRTCInputStream stream : rcrtcOtherRoom.getRemoteUsers().get(i).getStreams()) {
- Log.e("ry", stream.getMediaType() + "类型");
- if (stream.getMediaType() == RCRTCMediaType.VIDEO) {
- //如果远端用户发布的是视频流,创建显示视图RCRTCVideoView,并添加到布局中显示
- RCRTCVideoView remoteView = new RCRTCVideoView(contexts);
- ((RCRTCVideoInputStream) stream).setVideoView(remoteView);
- //todo 本demo只演示添加1个远端用户的视图
- livePushRyViewHolder.mPreView1.removeAllViews();
- remoteView.setScalingType(SCALE_ASPECT_FILL);
- livePushRyViewHolder.mPreView1.addView(remoteView);
- }
- //如果要订阅所有远端用户的流。保存所有流信息,方便后面统一订阅
- inputStreamList.add(stream);
-
-
- RCRTCMixConfig config = new RCRTCMixConfig();
- RCRTCMixConfig.MediaConfig mediaConfig = new RCRTCMixConfig.MediaConfig();
- config.setMediaConfig(mediaConfig);
-//视频输出配置
- RCRTCMixConfig.MediaConfig.VideoConfig videoConfig = new RCRTCMixConfig.MediaConfig.VideoConfig();
- mediaConfig.setVideoConfig(videoConfig);
-//大流视频的输出参数
- RCRTCMixConfig.MediaConfig.VideoConfig.VideoLayout normal = new RCRTCMixConfig.MediaConfig.VideoConfig.VideoLayout();
- videoConfig.setVideoLayout(normal);
-//推荐宽、高、帧率参数值可以通过默认视频流的配置获取,也可以根据实际需求来自定义设置
-//如不设置宽高值则服务端将使用默认宽高 360 * 640
-//例:发布的视频分辨率为720 * 1280,如果不设置则观众端看到的视频分辨率为 360 * 640,
-//所以如果想让观众端看到的视频分辨率和发布视频分辨率一致,则应从发布的视频流中获取分辨率配置并设置到 mediaConfig 中
- RCRTCVideoStreamConfig defaultVideoConfig = RCRTCEngine.getInstance().getDefaultVideoStream().getVideoConfig();
- int fps = defaultVideoConfig.getVideoFps().getFps();
- int width = 960;
- int height = 720;
- normal.setWidth(width); //视频宽
- normal.setHeight(height); //视频高
- normal.setFps(fps); //视频帧率
-
- //1. 设置自适应合流布局模式
- config.setLayoutMode(RCRTCMixConfig.MixLayoutMode.ADAPTIVE);
- //2. 合流画布设置
- if (rcrtcLiveInfo == null) {
- Log.w("PkDebug", "PK合流失败,rcrtcLiveInfo为空");
- }
- rcrtcLiveInfo.setMixConfig(config, new IRCRTCResultCallback() {
- @Override
- public void onSuccess() {
- Log.e("ry", "混成功13");
- }
-
- @Override
- public void onFailed(RTCErrorCode errorCode) {
- Log.e("ry", "混失败" + errorCode);
-
- }
- });
- }
- }
- //开始订阅资源
- rtcRoom.getLocalUser().subscribeStreams(inputStreamList, new IRCRTCResultCallback() {
- @Override
- public void onSuccess() {
- Log.i("ry", "订阅资源成功");
- }
-
- @Override
- public void onFailed(RTCErrorCode rtcErrorCode) {
- Log.i("ry", "订阅资源失败: " + rtcErrorCode.getReason());
- ToastUtil.show(mContext.getString(R.string.live_pk_link_error));
- }
- });
- }
- });
- }
-
- @Override
- public void onFailed(RTCErrorCode rtcErrorCode) {
- Log.i("ry", "11111加入其他房间失败 :" + rtcErrorCode.getReason());
- }
- });*/
LivePushRyViewHolder.btn_close.setVisibility(View.VISIBLE);
LiveRyAnchorActivity.isDRPK = 1;
ScreenDimenUtil util = ScreenDimenUtil.getInstance();
- int mScreenWdith = util.getScreenWdith();
- /*LinearLayout.LayoutParams params = new LinearLayout.LayoutParams(LinearLayout.LayoutParams.MATCH_PARENT, mScreenWdith * 720 / 960);
- params.weight = 1;
- params.topMargin = mContext.getResources().getDimensionPixelOffset(R.dimen.live_top);
- livePushRyViewHolder.camera.setLayoutParams(params);
- livePushRyViewHolder.mPreView1.setLayoutParams(params);
- livePushRyViewHolder.mPreView1.setVisibility(View.VISIBLE);*/
livePushRyViewHolder.setAnPkRtc(u);//设置对方主播视图
final SocketSendBean msg1 = new SocketSendBean()
@@ -802,6 +701,7 @@ public class LiveRyLinkMicPkPresenter implements View.OnClickListener {
.param("pkhead", u.getAvatar())
.param("pkname", u.getUserNiceName());
msg1.create();
+ LivePushRyViewHolder.btn_close.setVisibility(View.VISIBLE);
/*Conversation.ConversationType conversationType = Conversation.ConversationType.CHATROOM;
TextMessage messageContent = TextMessage.obtain(msg1.mResult.toString());
@@ -839,19 +739,6 @@ public class LiveRyLinkMicPkPresenter implements View.OnClickListener {
LivePushRyViewHolder.btn_close.setVisibility(View.VISIBLE);
LiveRyAnchorActivity.isDRPK = 1;
- /*JSONObject msg1 = new JSONObject();
- msg1.put("uid", CommonAppConfig.getInstance().getUid());
- msg1.put("pkuid", CommonAppConfig.getInstance().getUid());
- msg1.put("pkhead", CommonAppConfig.getInstance().getUserBean().getAvatarThumb());
- msg1.put("pkname", CommonAppConfig.getInstance().getUserBean().getUserNiceName());
- EventBus.getDefault().post(new AnchorInfoEvent(false, bean.getId(), bean.getUserNiceName(), bean.getAvatar()));
-
- if (bean != null && bean.isRandomPk()) {
- msg1.put("random_pk", bean.isRandomPk() ? 1 : 0);
- msg1.put("is_ladders", bean.getRankPkImgUrl());
- }*/
-
-
/*---------------------------------------------------------------- */
final SocketSendBean msg1 = new SocketSendBean()
.param("_method_", SOCKET_LINK_MIC_PK)
@@ -898,116 +785,6 @@ public class LiveRyLinkMicPkPresenter implements View.OnClickListener {
onLinkMicPkStart(mApplyUid, 2);
}
});
-
- /*IMRTCManager.getInstance().responseJoinOtherRoom(mApplyUid, true, msg1.toString(), new IRCRTCResultCallback() {
- @Override
- public void onSuccess() {
- RCRTCEngine.getInstance().joinOtherRoom(mApplyUid, new IRCRTCResultDataCallback() {
- @Override
- public void onSuccess(RCRTCOtherRoom rcrtcOtherRoom) {
- rcrtcOtherRoom.registerOtherRoomEventsListener(otherRoomEventsListener);
- ToastUtil.show(WordUtil.isNewZh() ? "接受成功" : "Success");
- new Handler(Looper.getMainLooper()).post(new Runnable() {
- public void run() {
- for (int i = 0; i < rcrtcOtherRoom.getRemoteUsers().size(); i++) {
- //遍历远端用户发布的资源列表
- for (RCRTCInputStream stream : rcrtcOtherRoom.getRemoteUsers().get(i).getStreams()) {
- if (stream.getMediaType() == RCRTCMediaType.VIDEO) {
- //如果远端用户发布的是视频流,创建显示视图RCRTCVideoView,并添加到布局中显示
- RCRTCVideoView remoteView = new RCRTCVideoView(contexts);
- ((RCRTCVideoInputStream) stream).setVideoView(remoteView);
- //todo 本demo只演示添加1个远端用户的视图
- livePushRyViewHolder.mPreView1.removeAllViews();
- remoteView.setScalingType(SCALE_ASPECT_FILL);
- livePushRyViewHolder.mPreView1.addView(remoteView);
- }
- //如果要订阅所有远端用户的流。保存所有流信息,方便后面统一订阅
- inputStreamList.add(stream);
- }
- }
-
- Log.e("ry", "asa" + inputStreamList.size());
- //开始订阅资源
- rtcRoom.getLocalUser().subscribeStreams(inputStreamList, new IRCRTCResultCallback() {
- @Override
- public void onSuccess() {
- Log.i("ry", "订阅资源成功");
- List streams = new ArrayList<>();
- streams.add(RCRTCEngine.getInstance().getDefaultVideoStream());
- RCRTCMixConfig config = new RCRTCMixConfig();
- RCRTCMixConfig.MediaConfig mediaConfig = new RCRTCMixConfig.MediaConfig();
- config.setMediaConfig(mediaConfig);
-//视频输出配置
- RCRTCMixConfig.MediaConfig.VideoConfig videoConfig = new RCRTCMixConfig.MediaConfig.VideoConfig();
- mediaConfig.setVideoConfig(videoConfig);
-//大流视频的输出参数
- RCRTCMixConfig.MediaConfig.VideoConfig.VideoLayout normal = new RCRTCMixConfig.MediaConfig.VideoConfig.VideoLayout();
- videoConfig.setVideoLayout(normal);
-//推荐宽、高、帧率参数值可以通过默认视频流的配置获取,也可以根据实际需求来自定义设置
-//如不设置宽高值则服务端将使用默认宽高 360 * 640
-//例:发布的视频分辨率为720 * 1280,如果不设置则观众端看到的视频分辨率为 360 * 640,
-//所以如果想让观众端看到的视频分辨率和发布视频分辨率一致,则应从发布的视频流中获取分辨率配置并设置到 mediaConfig 中
- RCRTCVideoStreamConfig defaultVideoConfig = RCRTCEngine.getInstance().getDefaultVideoStream().getVideoConfig();
- int fps = defaultVideoConfig.getVideoFps().getFps();
- int width = 960;
- int height = 720;
- normal.setWidth(width); //视频宽
- normal.setHeight(height); //视频高
- normal.setFps(fps); //视频帧率
-
- //1. 设置自适应合流布局模式
- config.setLayoutMode(RCRTCMixConfig.MixLayoutMode.ADAPTIVE);
- //2. 合流画布设置
- rcrtcLiveInfo.setMixConfig(config, new IRCRTCResultCallback() {
- @Override
- public void onSuccess() {
- Log.e("ry", "混成功14");
- }
-
- @Override
- public void onFailed(RTCErrorCode errorCode) {
- Log.e("ry", "混失败" + errorCode);
-
- }
- });
- }
-
- @Override
- public void onFailed(RTCErrorCode rtcErrorCode) {
- Log.i("ry", "订阅资源失败: " + rtcErrorCode);
- ToastUtil.show(mContext.getString(R.string.live_pk_link_error));
- }
- });
- }
- });
- }
-
- @Override
- public void onFailed(RTCErrorCode rtcErrorCode) {
- Log.e("ry", mApplyUid + "加入其他房间失败 :" + rtcErrorCode);
- Log.i("ry", mApplyUid + "加入其他房间失败 :" + rtcErrorCode);
- }
- });
-
- new Handler(Looper.getMainLooper()).post(new Runnable() {
- public void run() {
- Bus.get().post(new LiveAudienceEvent()
- .setType(LiveAudienceEvent.LiveAudienceType.UN_LEAVELIVE));
- LiveRyAnchorActivity.isDRPK = 1;
- LivePushRyViewHolder.btn_close.setVisibility(View.VISIBLE);
- SocketRyLinkMicPkUtil.linkMicPkAccept(mSocketRyClient, mApplyUid, mApplyUrl, mApplyNmae);
- EventBus.getDefault().post(new LiveAudienceEvent().setType(LiveAudienceEvent.LiveAudienceType.UP_PK_TWO).setObject(mApplyUid));
- onLinkMicPkStart(mApplyUid, 2);
- }
- });
- }
-
- @Override
- public void onFailed(RTCErrorCode errorCode) {
- ToastUtil.show("接受失败");
- }
- });*/
-
}
//与用户连麦
@@ -2321,7 +2098,6 @@ public class LiveRyLinkMicPkPresenter implements View.OnClickListener {
if (mIsAnchor) {
((LiveRyAnchorActivity) mContext).setPkBtnVisible(false);
mPkTimeCount = PK_TIME_MAX;
-
} else {
// mPkTimeCount=mPkTimeFromServer;
mPkTimeCount = PK_TIME_MAX;
diff --git a/live/src/main/java/com/yunbao/live/socket/SocketRyClient.java b/live/src/main/java/com/yunbao/live/socket/SocketRyClient.java
index 75b078f2e..c76564e77 100644
--- a/live/src/main/java/com/yunbao/live/socket/SocketRyClient.java
+++ b/live/src/main/java/com/yunbao/live/socket/SocketRyClient.java
@@ -1260,6 +1260,7 @@ public class SocketRyClient {
mListener.onLinkMicPkApply(u, map.getString("stream"), 1);
break;
case 2://收到对方主播PK回调
+ L.eSw("收到对方主播PK回调");
RandomPkManager.getInstance().setPkStatus(RandomPkManager.PK_STATUS_START);
mListener.onLinkMicToPk(map.getString("uid"), map.getString("pkhead"), map.getString("pkname"));
mListener.onLinkMicPkStart(map.getString("uid"), map.getString("pkhead"), map.getString("pkname"), map.getString("is_ladders"));// mListener.onLinkMicPkStart(map.getString("uid"));
@@ -1274,6 +1275,7 @@ public class SocketRyClient {
mListener.onLinkMicPkRefuse();
break;
case 4://所有人收到PK开始址的回调
+ L.eSw("所有人收到PK开始址的回调");
// RandomPkManager.getInstance().setPkStatus(RandomPkManager.PK_STATUS_START);
EventBus.getDefault().post(new LiveAudienceEvent().setType(LiveAudienceEvent.LiveAudienceType.PK_TWO_START).setObject(map.getString("pkuid")));
mListener.onLinkMicPkStart(map.getString("pkuid"), map.getString("pkhead"), map.getString("pkname"), map.getString("is_ladders"));
diff --git a/live/src/main/java/com/yunbao/live/views/LiveNewReadyRyViewHolder.java b/live/src/main/java/com/yunbao/live/views/LiveNewReadyRyViewHolder.java
index dcca21861..75e3aaa8b 100644
--- a/live/src/main/java/com/yunbao/live/views/LiveNewReadyRyViewHolder.java
+++ b/live/src/main/java/com/yunbao/live/views/LiveNewReadyRyViewHolder.java
@@ -44,7 +44,6 @@ import com.yunbao.common.interfaces.CommonCallback;
import com.yunbao.common.interfaces.ImageResultCallback;
import com.yunbao.common.interfaces.OnItemClickListener;
import com.yunbao.common.manager.IMLoginManager;
-import com.yunbao.common.manager.SWManager;
import com.yunbao.common.utils.Bus;
import com.yunbao.common.utils.DialogUitl;
import com.yunbao.common.utils.L;
@@ -66,6 +65,7 @@ import com.yunbao.live.activity.LiveRyAnchorActivity;
import com.yunbao.live.dialog.LiveAnchorEditCallMeDialog;
import com.yunbao.live.dialog.LiveAnchorSayPopDialog;
import com.yunbao.live.dialog.LiveFaceUnityDialogFragment;
+import com.yunbao.live.dialog.LiveFaceUnityDialogNewFragment;
import com.yunbao.live.dialog.LiveNewRoomClassDialogFragment;
import com.yunbao.live.dialog.LiveNewRoomTypeDialogFragment;
import com.yunbao.live.dialog.LiveTimeDialogFragment;
@@ -77,10 +77,7 @@ import org.greenrobot.eventbus.ThreadMode;
import java.io.File;
import java.util.Locale;
-import cn.rongcloud.rtc.api.RCRTCEngine;
-import cn.rongcloud.rtc.api.stream.RCRTCCameraOutputStream;
-import cn.rongcloud.rtc.api.stream.RCRTCVideoStreamConfig;
-import cn.rongcloud.rtc.base.RCRTCParamsType;
+import io.agora.beautyapi.faceunity.agora.SWManager;
public class LiveNewReadyRyViewHolder extends AbsViewHolder implements View.OnClickListener {
@@ -328,36 +325,6 @@ public class LiveNewReadyRyViewHolder extends AbsViewHolder implements View.OnCl
liveOpenCustomPopup.setSelectClarity(selectClarity);
}
SWManager.get().setDimensions(selectClarity);
-
- /*//設置開播分辨率
- RCRTCParamsType.RCRTCVideoResolution rcrtcVideoResolution = RCRTCParamsType.RCRTCVideoResolution.RESOLUTION_480_848;
- int minRate = 200;
- int maxRate = 900;
- switch (selectClarity) {
- case 0:
- rcrtcVideoResolution = RCRTCParamsType.RCRTCVideoResolution.RESOLUTION_480_848;
- minRate = 200;
- maxRate = 900;
- break;
- case 1:
- rcrtcVideoResolution = RCRTCParamsType.RCRTCVideoResolution.RESOLUTION_720_1280;
- minRate = 250;
- maxRate = 2200;
- break;
- case 2:
- rcrtcVideoResolution = RCRTCParamsType.RCRTCVideoResolution.RESOLUTION_1080_1920;
- minRate = 400;
- maxRate = 4000;
- break;
- }
- RCRTCVideoStreamConfig config =
- RCRTCVideoStreamConfig.Builder.create()
- .setMinRate(minRate)
- .setMaxRate(maxRate)
- .setVideoFps(RCRTCParamsType.RCRTCVideoFps.Fps_15)
- .setVideoResolution(rcrtcVideoResolution)
- .build();
- RCRTCEngine.getInstance().getDefaultVideoStream().setVideoConfig(config);*/
Log.e("切换分辨率", "时间戳" + System.currentTimeMillis());
//重新发布一下流
Bus.get().post(new LivePushRyEvent());
@@ -569,8 +536,7 @@ public class LiveNewReadyRyViewHolder extends AbsViewHolder implements View.OnCl
} else if (i == R.id.btn_locaiton) {
switchLocation();
} else if (i == R.id.btn_horizontally) {
- RCRTCCameraOutputStream cameraStream = RCRTCEngine.getInstance().getDefaultVideoStream();
- cameraStream.setPreviewMirror(!cameraStream.isPreviewMirror());
+ SWManager.get().setMirrorMode();
} else if (i == R.id.btn_robot) {
new XPopup.Builder(mContext)
.asCustom(new LiveRobotSettingCustomPopup(mContext))
@@ -614,6 +580,20 @@ public class LiveNewReadyRyViewHolder extends AbsViewHolder implements View.OnCl
}
}
+ public void setFaceUnityNew(boolean init){
+ LiveFaceUnityDialogNewFragment fragment = new LiveFaceUnityDialogNewFragment(mContext);
+ fragment.setManager(manager);
+ fragment.setDismissShowUi(mRootView);
+ if (mContext instanceof LiveRyAnchorActivity) {
+ fragment.show(((LiveRyAnchorActivity) mContext).getSupportFragmentManager(), "FaceUnity");
+ mRootView.setVisibility(View.INVISIBLE);
+ if (init) {
+ fragment.dismiss();
+ }
+ }
+ }
+
+
/**
* 打开心愿单窗口
*/
diff --git a/live/src/main/java/com/yunbao/live/views/LivePlayRyViewHolder.java b/live/src/main/java/com/yunbao/live/views/LivePlayRyViewHolder.java
index 204f15b59..df57ab96b 100644
--- a/live/src/main/java/com/yunbao/live/views/LivePlayRyViewHolder.java
+++ b/live/src/main/java/com/yunbao/live/views/LivePlayRyViewHolder.java
@@ -38,7 +38,6 @@ import com.yunbao.common.http.HttpCallback;
import com.yunbao.common.http.HttpClient;
import com.yunbao.common.interfaces.OnItemClickListener;
import com.yunbao.common.manager.IMLoginManager;
-import com.yunbao.common.manager.SWAuManager;
import com.yunbao.common.utils.Bus;
import com.yunbao.common.utils.DialogUitl;
import com.yunbao.common.utils.DpUtil;
@@ -53,7 +52,6 @@ import com.yunbao.live.R;
import com.yunbao.live.activity.LiveActivity;
import com.yunbao.live.activity.LiveAudienceActivity;
import com.yunbao.live.event.LiveAudienceEvent;
-import com.yunbao.live.utils.LiveExoPlayerManager;
import org.greenrobot.eventbus.EventBus;
import org.greenrobot.eventbus.Subscribe;
@@ -89,6 +87,7 @@ import cn.rongcloud.rtc.base.RCRTCRoomType;
import cn.rongcloud.rtc.base.RCRTCStreamType;
import cn.rongcloud.rtc.base.RTCErrorCode;
import cn.rongcloud.rtc.core.RendererCommon;
+import io.agora.beautyapi.faceunity.agora.SWAuManager;
public class LivePlayRyViewHolder extends LiveRoomPlayViewHolder {
@@ -121,7 +120,7 @@ public class LivePlayRyViewHolder extends LiveRoomPlayViewHolder {
static int vHeight;//视频高
private TextView debugView;
- private LiveExoPlayerManager mPlayer;
+ //private LiveExoPlayerManager mPlayer;
private boolean isPk = false;
private boolean userJoinLinkMic = false;//用户是否已加入房间
@@ -174,10 +173,6 @@ public class LivePlayRyViewHolder extends LiveRoomPlayViewHolder {
params.height = vHeight;
mPkContainer.requestLayout();
- mPlayer = new LiveExoPlayerManager(mContext);
- mPlayer.setMainView(mVideoView);
- mPlayer.setListener(new ExoPlayerListener());
-
debugView = new TextView(mContext);
debugView.setBackgroundColor(Color.WHITE);
}
@@ -185,7 +180,7 @@ public class LivePlayRyViewHolder extends LiveRoomPlayViewHolder {
public void initSwEngine(String liveUid) {
this.liveUid = liveUid;
swAuManager = SWAuManager.get();
- swAuManager.setAudienceContainer(playFrameLayout);
+ swAuManager.setAudienceContainer(ry_view);
swAuManager.initRtcEngine((Activity) mContext);
swAuManager.setupRemoteVideo(Integer.parseInt(liveUid));
swAuManager.joinRoom(CommonAppConfig.getInstance().getUid(), CommonAppConfig.SWToken, SWAuManager.getChannelName(liveUid));
@@ -229,10 +224,6 @@ public class LivePlayRyViewHolder extends LiveRoomPlayViewHolder {
Log.i(TAG, "setLiveBeanLandscape: " + landscape + " isPk: " + isPk);
this.landscape = landscape;
this.videoLandscape = landscape;
- if (mPlayer != null) {
- mPlayer.setViewResizeMode(landscape == VIDEO_VERTICAL);
- }
-
if (landscape == 2) {
Log.i(TAG, "还原9:16");
RelativeLayout.LayoutParams params = (RelativeLayout.LayoutParams) mVideoView.getLayoutParams();
@@ -297,13 +288,7 @@ public class LivePlayRyViewHolder extends LiveRoomPlayViewHolder {
*/
@Override
public void resumePlay() {
- if (!mPlayer.isPlaying()) {
- new Handler(Looper.getMainLooper())
- .postDelayed(() -> {
- mPlayer.replay();
- // ToastUtil.show("强制播放" + val);
- }, 100);
- }
+
}
/**
@@ -317,30 +302,6 @@ public class LivePlayRyViewHolder extends LiveRoomPlayViewHolder {
PLAY_MODEL = playModel;
waitNextUrl = null;
Log.i(TAG, "play" + " url:" + url + " playModel: " + playModel + " landscape: " + landscape + " videoLandscape" + videoLandscape);
- if (playModel != PLAY_MODEL_DEF && !url.contains(videoFps[0] + ".flv")) {
- mPlayer.setViewResizeMode(landscape == VIDEO_VERTICAL);
- if (landscape == VIDEO_VERTICAL && !isPk) {
- url = url.replace(".flv", videoRatioVertical[playModel] + videoFps[0] + ".flv");
- } else if (landscape == VIDEO_HORIZONTAL || isPk) {
- url = url.replace(".flv", videoRatioHorizontal[playModel] + videoFps[0] + ".flv");
- }
- } else if (!url.contains(videoFps[0] + ".flv")) {
- mPlayer.setViewResizeMode(false);
- }
- Log.e("purl121", url);
-
- if (TextUtils.isEmpty(url) || mVideoView == null) {
- return;
- }
-
- if (TextUtils.isEmpty(url) || mVideoView == null) {
- return;
- }
- if (mPlayer.isPlaying()) {
- mPlayer.stop();
- mPlayer.clearUrl();
- }
- mPlayer.startUrl(url);
purl = url;
onPrepared();
}
@@ -351,14 +312,12 @@ public class LivePlayRyViewHolder extends LiveRoomPlayViewHolder {
PLAY_MODEL = playModel;
Log.i(TAG, "switchStream: " + " url:" + url + " playModel: " + playModel + " landscape: " + landscape + " videoLandscape = " + videoLandscape + " ispk = " + isPk + " bean = " + roomModel.getEnterRoomInfo().getIsconnection());
if (playModel != PLAY_MODEL_DEF && !url.contains(videoFps[0] + ".flv")) {
- mPlayer.setViewResizeMode(landscape == VIDEO_VERTICAL);
if (landscape == VIDEO_VERTICAL && !isPk) {
url = url.replace(".flv", videoRatioVertical[playModel] + videoFps[0] + ".flv");
} else if (landscape == VIDEO_HORIZONTAL || isPk) {
url = url.replace(".flv", videoRatioHorizontal[playModel] + videoFps[0] + ".flv");
}
} else if (!url.contains(videoFps[0] + ".flv")) {
- mPlayer.setViewResizeMode(false);
}
Log.e("purl121", url);
@@ -370,7 +329,6 @@ public class LivePlayRyViewHolder extends LiveRoomPlayViewHolder {
if (TextUtils.isEmpty(url) || mVideoView == null) {
return;
}
- mPlayer.switchUrl(url);
purl = url;
}
@@ -383,7 +341,6 @@ public class LivePlayRyViewHolder extends LiveRoomPlayViewHolder {
Log.i(TAG, "switchStreamPk: isPk1" + isPk + " tmp = " + !tmpPk + " isPk2 = " + this.isPk);
if (this.isPk && tmpPk) return;
if (isPk && !tmpPk || this.isPk) {
- mPlayer.setViewResizeMode(false);
String url;
if (PLAY_MODEL != -1) {
url = srcUrl.replace(".flv", videoRatioHorizontal[PLAY_MODEL] + videoFps[0] + ".flv");
@@ -393,7 +350,6 @@ public class LivePlayRyViewHolder extends LiveRoomPlayViewHolder {
if (!tmpPk) {
waitNextUrl = url;
}
- mPlayer.switchUrl(srcUrl);
tmpPk = true;
} else if (!isPk) {
tmpPk = false;
@@ -404,14 +360,12 @@ public class LivePlayRyViewHolder extends LiveRoomPlayViewHolder {
} else {
waitNextUrl = null;
}
- mPlayer.switchUrl(srcUrl);
}
}
@Override
public void clearFrame() {
super.clearFrame();
- mPlayer.clearFrame();
RelativeLayout.LayoutParams params = (RelativeLayout.LayoutParams) mVideoView.getLayoutParams();
params.height = ViewGroup.LayoutParams.WRAP_CONTENT;
params.topMargin = mContext.getResources().getDimensionPixelOffset(R.dimen.live_top);
@@ -436,10 +390,6 @@ public class LivePlayRyViewHolder extends LiveRoomPlayViewHolder {
public void release() {
mEnd = true;
mStarted = false;
- if (mPlayer != null) {
- mPlayer.stop();
- mPlayer.release();
- }
Bus.getOff(this);
EventBus.getDefault().unregister(this);
L.e(TAG, "release------->");
@@ -454,9 +404,6 @@ public class LivePlayRyViewHolder extends LiveRoomPlayViewHolder {
mCover.setVisibility(View.VISIBLE);
}
}
- if (mPlayer != null) {
- mPlayer.stop();
- }
stopPlay2();
}
@@ -481,10 +428,6 @@ public class LivePlayRyViewHolder extends LiveRoomPlayViewHolder {
params1.addRule(RelativeLayout.ALIGN_TOP);
ry_view.requestLayout();
isPk = true;
- if (mPlayer.getUrl().contains("848_24.flv") || mPlayer.getUrl().contains("1280_24.flv")) {
- tmpPk = false;
- switchStreamPk(true);
- }
}
/**
@@ -501,7 +444,6 @@ public class LivePlayRyViewHolder extends LiveRoomPlayViewHolder {
} else {
height = ViewGroup.LayoutParams.WRAP_CONTENT;
}
- mPlayer.setViewResizeMode(landscape == VIDEO_VERTICAL);
if (landscape == 2) {
Log.i(TAG, "onPrepared:还原9:16 land=" + videoLandscape);
RelativeLayout.LayoutParams params = (RelativeLayout.LayoutParams) mVideoView.getLayoutParams();
@@ -804,20 +746,13 @@ public class LivePlayRyViewHolder extends LiveRoomPlayViewHolder {
} else {
ToastUtil.show("You have successfully exited the voice connection");
}
- if (mPlayer.getNowPlayer() != null) {
- mPlayer.play();
- Log.e("ry", mPlayer.isPlaying() + "purl" + purl);
- if (!mPlayer.isPlaying()) {
- mPlayer.switchUrl(purl);
- }
- ry_view.removeAllViews();
- ry_view.getLayoutParams().height = ViewGroup.LayoutParams.WRAP_CONTENT;
- onPrepared();
- rcrtcRoom = null;
- MicStatusManager.getInstance().clear();
+ ry_view.removeAllViews();
+ ry_view.getLayoutParams().height = ViewGroup.LayoutParams.WRAP_CONTENT;
+ onPrepared();
+ rcrtcRoom = null;
+ MicStatusManager.getInstance().clear();
- resumePlay();
- }
+ resumePlay();
if (onMicCallback != null) {
onMicCallback.onMikUpdate();
@@ -1021,7 +956,7 @@ public class LivePlayRyViewHolder extends LiveRoomPlayViewHolder {
Log.i(TAG, "资源流 type: " + stream.getMediaType());
if (stream.getMediaType() == RCRTCMediaType.VIDEO) {
//暂停播放
- mPlayer.stop();
+
//如果远端用户发布的是视频流,创建显示视图RCRTCVideoView,并添加到布局中显示
//如果远端用户发布的是视频流,创建显示视图RCRTCVideoView,并添加到布局中显示
@@ -1314,7 +1249,6 @@ public class LivePlayRyViewHolder extends LiveRoomPlayViewHolder {
} else if (videoLandscape == VIDEO_HORIZONTAL) {
url = url.replace(".flv", videoRatioHorizontal[PLAY_MODEL_480] + videoFps[0] + ".flv");
}
- mPlayer.switchUrl(url);
}
private class ExoPlayerListener implements Player.Listener {
@@ -1364,7 +1298,6 @@ public class LivePlayRyViewHolder extends LiveRoomPlayViewHolder {
loadingListener.onPlayer();
}
if (waitNextUrl != null) {
- mPlayer.switchUrl(waitNextUrl);
waitNextUrl = null;
}
}
diff --git a/live/src/main/java/com/yunbao/live/views/LivePlaySwViewHolder.java b/live/src/main/java/com/yunbao/live/views/LivePlaySwViewHolder.java
deleted file mode 100644
index f421d920f..000000000
--- a/live/src/main/java/com/yunbao/live/views/LivePlaySwViewHolder.java
+++ /dev/null
@@ -1,1374 +0,0 @@
-package com.yunbao.live.views;
-
-import static com.lzy.okgo.utils.HttpUtils.runOnUiThread;
-
-import android.Manifest;
-import android.app.Dialog;
-import android.content.Context;
-import android.graphics.Color;
-import android.media.AudioManager;
-import android.os.Build;
-import android.os.Handler;
-import android.os.Looper;
-import android.text.TextUtils;
-import android.util.Log;
-import android.view.Gravity;
-import android.view.View;
-import android.view.ViewGroup;
-import android.view.ViewParent;
-import android.widget.FrameLayout;
-import android.widget.ImageView;
-import android.widget.RelativeLayout;
-import android.widget.TextView;
-
-import com.google.android.exoplayer2.PlaybackException;
-import com.google.android.exoplayer2.Player;
-import com.google.android.exoplayer2.ui.StyledPlayerView;
-import com.google.android.exoplayer2.video.VideoSize;
-import com.lxj.xpopup.XPopup;
-import com.lxj.xpopup.core.BasePopupView;
-import com.lxj.xpopup.interfaces.XPopupCallback;
-import com.lzf.easyfloat.EasyFloat;
-import com.lzy.okserver.OkDownload;
-import com.yunbao.common.bean.EnterRoomNewModel;
-import com.yunbao.common.dialog.LiveFontSizeSettingDialog;
-import com.yunbao.common.http.HttpCallback;
-import com.yunbao.common.http.HttpClient;
-import com.yunbao.common.interfaces.OnItemClickListener;
-import com.yunbao.common.manager.IMLoginManager;
-import com.yunbao.common.utils.Bus;
-import com.yunbao.common.utils.DialogUitl;
-import com.yunbao.common.utils.DpUtil;
-import com.yunbao.common.utils.L;
-import com.yunbao.common.utils.MicStatusManager;
-import com.yunbao.common.utils.ScreenDimenUtil;
-import com.yunbao.common.utils.SpUtil;
-import com.yunbao.common.utils.ToastUtil;
-import com.yunbao.common.utils.WordUtil;
-import com.yunbao.common.views.LiveClarityCustomPopup;
-import com.yunbao.live.R;
-import com.yunbao.live.activity.LiveActivity;
-import com.yunbao.live.activity.LiveAudienceActivity;
-import com.yunbao.live.event.LiveAudienceEvent;
-import com.yunbao.live.utils.LiveExoPlayerManager;
-
-import org.greenrobot.eventbus.EventBus;
-import org.greenrobot.eventbus.Subscribe;
-import org.greenrobot.eventbus.ThreadMode;
-
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.List;
-import java.util.Locale;
-
-import cn.rongcloud.rtc.api.RCRTCEngine;
-import cn.rongcloud.rtc.api.RCRTCRemoteUser;
-import cn.rongcloud.rtc.api.RCRTCRoom;
-import cn.rongcloud.rtc.api.RCRTCRoomConfig;
-import cn.rongcloud.rtc.api.callback.IRCRTCResultCallback;
-import cn.rongcloud.rtc.api.callback.IRCRTCResultDataCallback;
-import cn.rongcloud.rtc.api.callback.IRCRTCRoomEventsListener;
-import cn.rongcloud.rtc.api.callback.IRCRTCSwitchRoleCallback;
-import cn.rongcloud.rtc.api.callback.IRCRTCSwitchRoleDataCallback;
-import cn.rongcloud.rtc.api.callback.IRCRTCVideoInputFrameListener;
-import cn.rongcloud.rtc.api.stream.RCRTCInputStream;
-import cn.rongcloud.rtc.api.stream.RCRTCLiveInfo;
-import cn.rongcloud.rtc.api.stream.RCRTCOutputStream;
-import cn.rongcloud.rtc.api.stream.RCRTCVideoInputStream;
-import cn.rongcloud.rtc.api.stream.RCRTCVideoStreamConfig;
-import cn.rongcloud.rtc.api.stream.RCRTCVideoView;
-import cn.rongcloud.rtc.base.RCRTCLiveRole;
-import cn.rongcloud.rtc.base.RCRTCMediaType;
-import cn.rongcloud.rtc.base.RCRTCParamsType;
-import cn.rongcloud.rtc.base.RCRTCRemoteVideoFrame;
-import cn.rongcloud.rtc.base.RCRTCRoomType;
-import cn.rongcloud.rtc.base.RCRTCStreamType;
-import cn.rongcloud.rtc.base.RTCErrorCode;
-import cn.rongcloud.rtc.core.RendererCommon;
-
-public class LivePlaySwViewHolder extends LiveRoomPlayViewHolder {
-
- private static final String TAG = "LivePlayViewHolder";
- private ViewGroup mRoot;
- private ViewGroup mSmallContainer;
- private ViewGroup mLeftContainer;
- private ViewGroup mRightContainer;
- private RelativeLayout mPkContainer;
- public StyledPlayerView mVideoView;
-
- private View mLoading, mLoading2;
- private ImageView mCover;
- private boolean mPaused;//是否切后台了
- private boolean mStarted;//是否开始了播放
- private boolean mEnd;//是否结束了播放
- public static ImageView leave;
-
- private boolean mPausedPlay;//是否被动暂停了播放
- public int landscape; //1h 2s
- public Context contexts;
- public FrameLayout ry_view;
-
- private static final int VIDEO_VERTICAL = 2;
- private static final int VIDEO_HORIZONTAL = 1;
- int videoLandscape = -1; // 视频方向,2=竖屏,1=横屏
-
- static int vHeight;//视频高
- private TextView debugView;
- private LiveExoPlayerManager mPlayer;
- private boolean isPk = false;
- private boolean userJoinLinkMic = false;//用户是否已加入房间
-
- //0未申请1申请中2连麦中
- RCRTCRoom rcrtcRoom;
- String purl, srcUrl;
-
- public int getLandscape() {
- return landscape;
- }
-
- public LivePlaySwViewHolder(Context context, ViewGroup parentView, int landscapes) {
- super(context, parentView);
- contexts = context;
- landscape = landscapes;
- Log.i("收收收", landscape + "");
- }
-
-
- @Override
- protected int getLayoutId() {
- return R.layout.view_live_play_ksy;
- }
-
- @Override
- public void init() {
- Log.i(TAG, "init: 初始化播放器ViewHolder");
- EventBus.getDefault().register(this);
- Bus.getOn(this);
- mRoot = (ViewGroup) findViewById(R.id.root);
- mSmallContainer = (ViewGroup) findViewById(R.id.small_container);
- mLeftContainer = (ViewGroup) findViewById(R.id.left_container);
- mRightContainer = (ViewGroup) findViewById(R.id.right_container);
- mPkContainer = (RelativeLayout) findViewById(R.id.pk_container);
- mVideoView = (StyledPlayerView) findViewById(R.id.video_view);
- ry_view = (FrameLayout) findViewById(R.id.ry_view);
- leave = (ImageView) findViewById(R.id.leave);
- mLoading = findViewById(R.id.loading);
- mLoading2 = findViewById(R.id.loading2);
- mCover = (ImageView) findViewById(R.id.cover);
- ScreenDimenUtil util = ScreenDimenUtil.getInstance();
- int mScreenWdith = util.getScreenWdith();
- vHeight = mScreenWdith * 720 / 960;
-
- RelativeLayout.LayoutParams params = (RelativeLayout.LayoutParams) mPkContainer.getLayoutParams();
- params.height = vHeight;
- mPkContainer.requestLayout();
-
- mPlayer = new LiveExoPlayerManager(mContext);
- mPlayer.setMainView(mVideoView);
- mPlayer.setListener(new ExoPlayerListener());
- debugView = new TextView(mContext);
- debugView.setBackgroundColor(Color.WHITE);
-
- }
-
- @Override
- public void hideCover() {
-// if (mCover != null) {
-// if (mCover.getVisibility()==View.VISIBLE){
-//// new Handler().postDelayed(new Runnable() {
-//// @Override
-//// public void run() {
-////
-//// }
-//// },200);
-// mCover.setVisibility(View.GONE);
-// mLoading2.setVisibility(View.GONE);
-// }
-//
-//
-// }
- }
-
- @Override
- public void setCover(String coverUrl) {
-// if (mCover != null) {
-// mCover.setVisibility(View.VISIBLE);
-// mLoading2.setVisibility(View.VISIBLE);
-// ImgLoader.displayBlurLive(mContext, coverUrl, mCover, 400, 600);
-//
-// }
- }//
-
- @Override
- public synchronized void setLiveBeanLandscape(int landscape) {
-// landscape=1;
- Log.i(TAG, "setLiveBeanLandscape: " + landscape + " isPk: " + isPk);
- this.landscape = landscape;
- this.videoLandscape = landscape;
- if (mPlayer != null) {
- mPlayer.setViewResizeMode(landscape == VIDEO_VERTICAL);
- }
-
- if (landscape == 2) {
- Log.i(TAG, "还原9:16");
- RelativeLayout.LayoutParams params = (RelativeLayout.LayoutParams) mVideoView.getLayoutParams();
- params.height = RelativeLayout.LayoutParams.MATCH_PARENT;
- params.topMargin = 0;
- mVideoView.setLayoutParams(params);
- mVideoView.requestLayout();
-
- RelativeLayout.LayoutParams params1 = (RelativeLayout.LayoutParams) ry_view.getLayoutParams();
- params1.height = RelativeLayout.LayoutParams.MATCH_PARENT;
- params1.topMargin = 0;
- ry_view.setLayoutParams(params1);
- ry_view.requestLayout();
- RelativeLayout.LayoutParams params2 = (RelativeLayout.LayoutParams) mCover.getLayoutParams();
- params2.height = RelativeLayout.LayoutParams.MATCH_PARENT;
- params2.topMargin = 0;
- mCover.setLayoutParams(params2);
- mCover.requestLayout();
-
- } else {
- Log.i(TAG, "还原16:9");
- RelativeLayout.LayoutParams params = (RelativeLayout.LayoutParams) mVideoView.getLayoutParams();
- params.height = vHeight;
- params.topMargin = mContext.getResources().getDimensionPixelOffset(R.dimen.live_top);
- params.addRule(RelativeLayout.ALIGN_TOP);
- mVideoView.setLayoutParams(params);
- mVideoView.requestLayout();
-
- RelativeLayout.LayoutParams params1 = (RelativeLayout.LayoutParams) ry_view.getLayoutParams();
- params1.height = vHeight;
- params1.topMargin = mContext.getResources().getDimensionPixelOffset(R.dimen.live_top);
- params1.addRule(RelativeLayout.ALIGN_TOP);
- ry_view.setLayoutParams(params1);
- ry_view.requestLayout();
- RelativeLayout.LayoutParams params2 = (RelativeLayout.LayoutParams) mCover.getLayoutParams();
- params2.height = DpUtil.dp2px(270);
- params2.topMargin = DpUtil.dp2px(120);
- mCover.setLayoutParams(params2);
- mCover.requestLayout();
- }
- }
-
- @Override
- public void setLiveEnterRoomNewModel(EnterRoomNewModel data) {
- super.setLiveEnterRoomNewModel(data);
- isPk = data.getEnterRoomInfo().getIsconnection().equals("1");
- if (isPk) {
- setLiveBeanLandscape(1);
- }
- }
-
- /**
- * 暂停播放
- */
- @Override
- public void pausePlay() {
-
- }
-
- /**
- * 暂停播放后恢复
- */
- @Override
- public void resumePlay() {
- if (!mPlayer.isPlaying()) {
- new Handler(Looper.getMainLooper())
- .postDelayed(() -> {
- mPlayer.replay();
- // ToastUtil.show("强制播放" + val);
- }, 100);
- }
- }
-
- /**
- * 开始播放
- *
- * @param url 流地址
- */
- @Override
- public void play(String url, int playModel) {
- srcUrl = url;
- PLAY_MODEL = playModel;
- waitNextUrl = null;
- Log.i(TAG, "play" + " url:" + url + " playModel: " + playModel + " landscape: " + landscape + " videoLandscape" + videoLandscape);
- if (playModel != PLAY_MODEL_DEF && !url.contains(videoFps[0] + ".flv")) {
- mPlayer.setViewResizeMode(landscape == VIDEO_VERTICAL);
- if (landscape == VIDEO_VERTICAL && !isPk) {
- url = url.replace(".flv", videoRatioVertical[playModel] + videoFps[0] + ".flv");
- } else if (landscape == VIDEO_HORIZONTAL || isPk) {
- url = url.replace(".flv", videoRatioHorizontal[playModel] + videoFps[0] + ".flv");
- }
- } else if (!url.contains(videoFps[0] + ".flv")) {
- mPlayer.setViewResizeMode(false);
- }
- Log.e("purl121", url);
-
- if (TextUtils.isEmpty(url) || mVideoView == null) {
- return;
- }
-
- if (TextUtils.isEmpty(url) || mVideoView == null) {
- return;
- }
- if (mPlayer.isPlaying()) {
- mPlayer.stop();
- mPlayer.clearUrl();
- }
- mPlayer.startUrl(url);
- purl = url;
- onPrepared();
- }
-
- @Override
- public void switchStream(String url, int playModel) {
- srcUrl = url;
- PLAY_MODEL = playModel;
- Log.i(TAG, "switchStream: " + " url:" + url + " playModel: " + playModel + " landscape: " + landscape + " videoLandscape = " + videoLandscape + " ispk = " + isPk + " bean = " + roomModel.getEnterRoomInfo().getIsconnection());
- if (playModel != PLAY_MODEL_DEF && !url.contains(videoFps[0] + ".flv")) {
- mPlayer.setViewResizeMode(landscape == VIDEO_VERTICAL);
- if (landscape == VIDEO_VERTICAL && !isPk) {
- url = url.replace(".flv", videoRatioVertical[playModel] + videoFps[0] + ".flv");
- } else if (landscape == VIDEO_HORIZONTAL || isPk) {
- url = url.replace(".flv", videoRatioHorizontal[playModel] + videoFps[0] + ".flv");
- }
- } else if (!url.contains(videoFps[0] + ".flv")) {
- mPlayer.setViewResizeMode(false);
- }
- Log.e("purl121", url);
-
- if (TextUtils.isEmpty(url) || mVideoView == null) {
- return;
- }
-
-
- if (TextUtils.isEmpty(url) || mVideoView == null) {
- return;
- }
- mPlayer.switchUrl(url);
- purl = url;
- }
-
- boolean tmpPk = false;
- String waitNextUrl = null;
-
- @Override
- public void switchStreamPk(boolean isPk) {
- super.switchStreamPk(isPk);
- Log.i(TAG, "switchStreamPk: isPk1" + isPk + " tmp = " + !tmpPk + " isPk2 = " + this.isPk);
- if (this.isPk && tmpPk) return;
- if (isPk && !tmpPk || this.isPk) {
- mPlayer.setViewResizeMode(false);
- String url;
- if (PLAY_MODEL != -1) {
- url = srcUrl.replace(".flv", videoRatioHorizontal[PLAY_MODEL] + videoFps[0] + ".flv");
- } else {
- url = srcUrl.replace(".flv", videoRatioHorizontal[1] + videoFps[0] + ".flv");
- }
- if (!tmpPk) {
- waitNextUrl = url;
- }
- mPlayer.switchUrl(srcUrl);
- tmpPk = true;
- } else if (!isPk) {
- tmpPk = false;
- if (landscape == VIDEO_VERTICAL && PLAY_MODEL != -1) {
- waitNextUrl = srcUrl.replace(".flv", videoRatioVertical[PLAY_MODEL] + videoFps[0] + ".flv");
- } else if (landscape == VIDEO_HORIZONTAL && PLAY_MODEL != -1) {
- waitNextUrl = srcUrl.replace(".flv", videoRatioHorizontal[PLAY_MODEL] + videoFps[0] + ".flv");
- } else {
- waitNextUrl = null;
- }
- mPlayer.switchUrl(srcUrl);
- }
- }
-
- @Override
- public void clearFrame() {
- super.clearFrame();
- mPlayer.clearFrame();
- RelativeLayout.LayoutParams params = (RelativeLayout.LayoutParams) mVideoView.getLayoutParams();
- params.height = ViewGroup.LayoutParams.WRAP_CONTENT;
- params.topMargin = mContext.getResources().getDimensionPixelOffset(R.dimen.live_top);
- params.addRule(RelativeLayout.ALIGN_TOP);
- mVideoView.setLayoutParams(params);
- mVideoView.requestLayout();
-
- RelativeLayout.LayoutParams params1 = (RelativeLayout.LayoutParams) ry_view.getLayoutParams();
- params1.height = ViewGroup.LayoutParams.WRAP_CONTENT;
- params1.topMargin = mContext.getResources().getDimensionPixelOffset(R.dimen.live_top);
- params1.addRule(RelativeLayout.ALIGN_TOP);
- ry_view.setLayoutParams(params1);
- ry_view.requestLayout();
- RelativeLayout.LayoutParams params2 = (RelativeLayout.LayoutParams) mCover.getLayoutParams();
- params2.height = DpUtil.dp2px(270);
- params2.topMargin = DpUtil.dp2px(120);
- mCover.setLayoutParams(params2);
- mCover.requestLayout();
- }
-
- @Override
- public void release() {
- mEnd = true;
- mStarted = false;
- if (mPlayer != null) {
- mPlayer.stop();
- mPlayer.release();
- }
- Bus.getOff(this);
- EventBus.getDefault().unregister(this);
- L.e(TAG, "release------->");
- }
-
- @Override
- public void stopPlay() {
- Log.i(TAG, "stopPlay: ");
- if (mCover != null) {
- mCover.setAlpha(1f);
- if (mCover.getVisibility() != View.VISIBLE) {
- mCover.setVisibility(View.VISIBLE);
- }
- }
- if (mPlayer != null) {
- mPlayer.stop();
- }
- stopPlay2();
- }
-
- @Override
- public void stopPlay2() {
-
- }
-
- @Override
- public void setViewUP(int i) {
- if (mVideoView == null) return;
- Log.e("PK状态", "" + i);
- RelativeLayout.LayoutParams params = (RelativeLayout.LayoutParams) mVideoView.getLayoutParams();
- params.height = vHeight;
- params.topMargin = contexts.getResources().getDimensionPixelOffset(R.dimen.live_top);
- params.addRule(RelativeLayout.ALIGN_TOP);
- mVideoView.requestLayout();
-
- RelativeLayout.LayoutParams params1 = (RelativeLayout.LayoutParams) ry_view.getLayoutParams();
- params1.height = vHeight;
- params1.topMargin = contexts.getResources().getDimensionPixelOffset(R.dimen.live_top);
- params1.addRule(RelativeLayout.ALIGN_TOP);
- ry_view.requestLayout();
- isPk = true;
- if (mPlayer.getUrl().contains("848_24.flv") || mPlayer.getUrl().contains("1280_24.flv")) {
- tmpPk = false;
- switchStreamPk(true);
- }
- }
-
- /**
- * 播放开始
- */
- public void onPrepared() {
- if (mEnd) {
- release();
- return;
- }
- int height;
- if (videoLandscape == VIDEO_VERTICAL) {
- height = ViewGroup.LayoutParams.MATCH_PARENT;
- } else {
- height = ViewGroup.LayoutParams.WRAP_CONTENT;
- }
- mPlayer.setViewResizeMode(landscape == VIDEO_VERTICAL);
- if (landscape == 2) {
- Log.i(TAG, "onPrepared:还原9:16 land=" + videoLandscape);
- RelativeLayout.LayoutParams params = (RelativeLayout.LayoutParams) mVideoView.getLayoutParams();
- params.height = height;
- params.topMargin = 0;
- params.addRule(RelativeLayout.ALIGN_TOP);
- mVideoView.setLayoutParams(params);
- mVideoView.requestLayout();
-
- RelativeLayout.LayoutParams params1 = (RelativeLayout.LayoutParams) ry_view.getLayoutParams();
- params1.height = height;
- params1.topMargin = 0;
- ry_view.setLayoutParams(params1);
- ry_view.requestLayout();
- RelativeLayout.LayoutParams params2 = (RelativeLayout.LayoutParams) mCover.getLayoutParams();
- params2.height = height;
- params2.topMargin = 0;
- mCover.setLayoutParams(params2);
- mCover.requestLayout();
- } else {
- Log.i(TAG, "onPrepared:还原16:9");
- RelativeLayout.LayoutParams params = (RelativeLayout.LayoutParams) mVideoView.getLayoutParams();
- params.height = height;
- params.topMargin = 0;
- if (videoLandscape == VIDEO_HORIZONTAL) {
- params.topMargin = mContext.getResources().getDimensionPixelOffset(R.dimen.live_top);
- }
- params.addRule(RelativeLayout.ALIGN_TOP);
- mVideoView.setLayoutParams(params);
- mVideoView.requestLayout();
-
- RelativeLayout.LayoutParams params1 = (RelativeLayout.LayoutParams) ry_view.getLayoutParams();
- params1.height = height;
- params1.topMargin = 0;
- params1.addRule(RelativeLayout.ALIGN_TOP);
- if (videoLandscape == VIDEO_HORIZONTAL) {
- params1.topMargin = mContext.getResources().getDimensionPixelOffset(R.dimen.live_top);
- }
- ry_view.setLayoutParams(params1);
- ry_view.requestLayout();
- RelativeLayout.LayoutParams params2 = (RelativeLayout.LayoutParams) mCover.getLayoutParams();
- params2.height = height;
- params2.topMargin = DpUtil.dp2px(120);
- mCover.setLayoutParams(params2);
- mCover.requestLayout();
- }
- }
-
- @Override
- public void setPkview() {
- Log.i("收到socket--->", "变成16:9");
- String url = srcUrl;
- isPk = true;
- switchStreamPk(true);
- RelativeLayout.LayoutParams params = (RelativeLayout.LayoutParams) mVideoView.getLayoutParams();
- params.height = vHeight;
- params.topMargin = mContext.getResources().getDimensionPixelOffset(R.dimen.live_top);
- params.addRule(RelativeLayout.ALIGN_TOP);
- mVideoView.requestLayout();
- }
-
- @Override
- public synchronized void setPkEndview() {
-
- Log.i("收收收", landscape + "");
- isPk = false;
- tmpPk = false;
- switchStreamPk(false);
- if (landscape == 2) {
- Log.i("收到socket--->", "还原9:16");
- RelativeLayout.LayoutParams params = (RelativeLayout.LayoutParams) mVideoView.getLayoutParams();
- params.height = RelativeLayout.LayoutParams.MATCH_PARENT;
- params.topMargin = 0;
- mVideoView.requestLayout();
-
- } else {
- RelativeLayout.LayoutParams params = (RelativeLayout.LayoutParams) mVideoView.getLayoutParams();
- params.height = vHeight;
- params.topMargin = mContext.getResources().getDimensionPixelOffset(R.dimen.live_top);
- params.addRule(RelativeLayout.ALIGN_TOP);
- mVideoView.requestLayout();
- }
-// if (detailsView != null) {
-// mVideoView.removeView(detailsView);
-// detailsView = null;
-// }
- }
-
- public void removeDetailView() {
-// if (detailsView != null) {
-// mVideoView.removeView(detailsView);
-// detailsView = null;
-// }
- }
-
- @Override
- public ViewGroup getSmallContainer() {
- return mSmallContainer;
- }
-
-
- @Override
- public ViewGroup getRightContainer() {
- return mRightContainer;
- }
-
- @Override
- public ViewGroup getPkContainer() {
- return mPkContainer;
- }
-
- @Override
- public void changeToLeft() {
- if (mVideoView != null) {
-
- RelativeLayout.LayoutParams params = (RelativeLayout.LayoutParams) mVideoView.getLayoutParams();
- params.height = vHeight;
-// params.height = mContext.getResources().getDimensionPixelOffset(R.dimen.live_view);
- params.topMargin = mContext.getResources().getDimensionPixelOffset(R.dimen.live_top);
- params.addRule(RelativeLayout.ALIGN_TOP);
- mVideoView.requestLayout();
-
- }
- if (mLoading != null && mLeftContainer != null) {
- ViewParent viewParent = mLoading.getParent();
- if (viewParent != null) {
- ((ViewGroup) viewParent).removeView(mLoading);
- }
- FrameLayout.LayoutParams params = new FrameLayout.LayoutParams(DpUtil.dp2px(24), DpUtil.dp2px(24));
- params.gravity = Gravity.CENTER;
- mLoading.setLayoutParams(params);
- mLeftContainer.addView(mLoading);
- }
- }
-
- @Override
- public void changeToBig() {
- if (mVideoView != null) {
- RelativeLayout.LayoutParams params = new RelativeLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT);
- mVideoView.setLayoutParams(params);
-
- }
- if (mLoading != null && mRoot != null) {
- ViewParent viewParent = mLoading.getParent();
- if (viewParent != null) {
- ((ViewGroup) viewParent).removeView(mLoading);
- }
- RelativeLayout.LayoutParams params = new RelativeLayout.LayoutParams(DpUtil.dp2px(24), DpUtil.dp2px(24));
- params.addRule(RelativeLayout.CENTER_IN_PARENT);
- mLoading.setLayoutParams(params);
- mRoot.addView(mLoading);
- }
- }
-
- @Override
- public void onResume() {
- if (!mPausedPlay && mPaused && mVideoView != null) {
-// mVideoView.runInForeground();
-// mVideoView.start();
- }
- mPaused = false;
-// if (mPlayer.isPlaying() == 1) {
-// mPlayer.resumeVideo();
-// mPlayer.resumeAudio();
-// }
- }
-
- @Override
- public void onPause() {
-// if (!mPausedPlay && mVideoView != null) {
-// mVideoView.runInBackground(false);
-// }
-// mPaused = true;
-// mPlayer.pauseVideo();
-// mPlayer.pauseAudio();
- }
-
- @Override
- public void onDestroy() {
- release();
- }
-
- //全屏
- @Override
- public void fullScreen() {
- RelativeLayout.LayoutParams params = (RelativeLayout.LayoutParams) mVideoView.getLayoutParams();
- params.height = RelativeLayout.LayoutParams.MATCH_PARENT;
- params.topMargin = 0;
- params.addRule(RelativeLayout.ALIGN_TOP);
- mVideoView.requestLayout();
- }
-
- //小屏
- @Override
- public void smallScreen() {
- RelativeLayout.LayoutParams params = (RelativeLayout.LayoutParams) mVideoView.getLayoutParams();
- params.height = vHeight;
- params.topMargin = mContext.getResources().getDimensionPixelOffset(R.dimen.live_top);
- // mPlayer.setRenderRotation(V2TXLiveDef.V2TXLiveRotation.V2TXLiveRotation0);
- params.addRule(RelativeLayout.ALIGN_TOP);
- mVideoView.requestLayout();
- }
-
-
- @Subscribe(threadMode = ThreadMode.MAIN)
- public void onUpdata(String str) {
- if ("UsertoRY".equals(str)) {
- DialogUitl.showSimpleDialog(mContext, mContext.getString(R.string.mic_tag), new DialogUitl.SimpleCallback() {
- @Override
- public void onConfirmClick(Dialog dialog, String content) {
- UsertoRY();
- dialog = null;
- }
- });
-
- } else if ("inviteMic".equals(str)) {
- String content = "邀請您進行語音連麥";
- String confirm = "接受";
- String cancel = "拒絕";
- if (!WordUtil.isNewZh()) {
- content = "You are invited to join the voice connection";
- confirm = "accept";
- cancel = "cancel";
- }
- DialogUitl.Builder builder = new DialogUitl.Builder(mContext)
- .setContent(content)
- .setTitle(mLiveBean.getUserNiceName())
- .setConfirmString(confirm)
- .setCancelString(cancel)
- .setView(R.layout.dialog_live_mic_invite)
- .setClickCallback(new DialogUitl.SimpleCallback() {
- @Override
- public void onConfirmClick(Dialog dialog, String content) {
- String[] permissions;
- if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU) {
- permissions = new String[]{
- Manifest.permission.READ_MEDIA_IMAGES,
- Manifest.permission.RECORD_AUDIO
- };
- } else {
- permissions = new String[]{
- Manifest.permission.READ_EXTERNAL_STORAGE,
- Manifest.permission.WRITE_EXTERNAL_STORAGE,
- Manifest.permission.RECORD_AUDIO
- };
- }
- LiveAudienceActivity.mProcessResultUtil.requestPermissions(permissions, new Runnable() {
- @Override
- public void run() {
- UsertoRY();
-
- }
- });
- }
- });
- builder.build().show();
-
- } else if ("endMic".equals(str)) {
- if (rcrtcRoom != null) {
- // 开始切换为观众身份
- RCRTCEngine.getInstance().getRoom().getLocalUser().switchToAudience(new IRCRTCSwitchRoleCallback() {
-
- /**
- * 当切换失败且SDK处于无法回退状态时回调,该角色已经无法使用当前角色继续进行音视频。
- * SDK内部会退出房间并清理所有资源,该用户只能重新加入房间才能继续音视频。
- */
- @Override
- public void onKicked() {
-
- }
-
- @Override
- public void onSuccess() {
- Log.e("ry", "下麦成功");
- // 该用户切换为观众成功,可以以观众身份进行音视频
- //退出rtc播放
- leaveRoom();
- }
-
- /**
- * 当切换失败且不影响当前角色继续音视频时回调
- * @param errorCode 失败错误码
- */
- @Override
- public void onFailed(RTCErrorCode errorCode) {
- Log.e("ry", "下麦失败" + errorCode);
- leaveRoom();
- }
-
- private void leaveRoom() {
- RCRTCEngine.getInstance().leaveRoom(new IRCRTCResultCallback() {
- @Override
- public void onSuccess() {
- userJoinLinkMic = false;
- Log.e("ry", "退出多人房间成功");
- new Handler(Looper.getMainLooper()).post(new Runnable() {
- public void run() {
- if (WordUtil.isNewZh()) {
- ToastUtil.show("已成功退出語音連麥");
- } else {
- ToastUtil.show("You have successfully exited the voice connection");
- }
- if (mPlayer.getNowPlayer() != null) {
- mPlayer.play();
- Log.e("ry", mPlayer.isPlaying() + "purl" + purl);
- if (!mPlayer.isPlaying()) {
- mPlayer.switchUrl(purl);
- }
- ry_view.removeAllViews();
- ry_view.getLayoutParams().height = ViewGroup.LayoutParams.WRAP_CONTENT;
- onPrepared();
- rcrtcRoom = null;
- MicStatusManager.getInstance().clear();
-
- resumePlay();
- }
-
- if (onMicCallback != null) {
- onMicCallback.onMikUpdate();
- }
- }
- });
- }
-
- @Override
- public void onFailed(RTCErrorCode errorCode) {
- Log.e("ry", errorCode + "退出多人房间成功");
- userJoinLinkMic = false;
- }
- });
-
- }
- });
- }
- } else if ("Debug".equals(str)) {
- if (EasyFloat.isShow("Debug")) {
- EasyFloat.dismiss("Debug");
- return;
- }
- EasyFloat.with(mContext)
- .setDragEnable(true)
- .setTag("Debug")
- .setLayout(debugView)
- .show();
- }
- }
-
- @Subscribe(threadMode = ThreadMode.MAIN)
- public void onOpenDrawer(LiveAudienceEvent event) {
- if (event.getType() == LiveAudienceEvent.LiveAudienceType.LIVE_VIDEO) {
- int ban = 0;//全模式都可以选择
- switch (roomModel.getClarityType() - 1) {
- case PLAY_MODEL_720://仅允许720(高清),ban掉1080(超高清)模式
- ban = LiveClarityCustomPopup.BAN_1080;
- break;
- case -1://没有该字段
- case PLAY_MODEL_480://仅允许480(流畅),ban掉1080(超高清)和720(高清)模式
- ban = LiveClarityCustomPopup.BAN_720;
- break;
-
- }
- LiveClarityCustomPopup liveClarityCustomPopup = new LiveClarityCustomPopup(mContext,
- IMLoginManager.get(mContext).getInt(PLAY_MODEL_KEY, PLAY_MODEL_ANCHOR),
- ban,
- true);
- new XPopup.Builder(mContext)
- .setPopupCallback(new XPopupCallback() {
- @Override
- public void onCreated(BasePopupView popupView) {
-
- }
-
- @Override
- public void beforeShow(BasePopupView popupView) {
-
- }
-
- @Override
- public void onShow(BasePopupView popupView) {
-
- }
-
- @Override
- public void onDismiss(BasePopupView popupView) {
- int selectClarity = liveClarityCustomPopup.getSelectClarity();
- if (selectClarity == PLAY_MODEL || selectClarity == IMLoginManager.get(mContext).getInt(LiveRoomPlayViewHolder.PLAY_MODEL_KEY, PLAY_MODEL_ANCHOR))
- return;
- if (selectClarity == PLAY_MODEL_ANCHOR) {
- switchStream(srcUrl, PLAY_MODEL_DEF);
- setAudiencePlayModel(selectClarity);
- } else {
- switchStream(srcUrl, selectClarity);
- }
- IMLoginManager.get(mContext).put(LiveRoomPlayViewHolder.PLAY_MODEL_KEY, selectClarity);
- showToast();
- }
-
- private void showToast() {
- if (WordUtil.isNewZh()) {
- DialogUitl.showToast(mContext, "設置成功\n" +
- "正在為你轉換中", 3000);
- } else {
- DialogUitl.showToast(mContext, "successful\n" +
- "It's being converted for you", 3000);
- }
- }
-
- @Override
- public void beforeDismiss(BasePopupView popupView) {
-
- }
-
- @Override
- public boolean onBackPressed(BasePopupView popupView) {
- return false;
- }
-
- @Override
- public void onKeyBoardStateChanged(BasePopupView popupView, int height) {
-
- }
-
- @Override
- public void onDrag(BasePopupView popupView, int value, float percent, boolean upOrLeft) {
-
- }
-
- @Override
- public void onClickOutside(BasePopupView popupView) {
-
- }
- })
- .asCustom(liveClarityCustomPopup)
- .show();
- } else if (event.getType() == LiveAudienceEvent.LiveAudienceType.FONT_SIZE) {
- int fount = 0;
- try {
- fount = Integer.parseInt(SpUtil.getStringValue("pd_live_room_fount_size"));
- } catch (Exception ignored) {
-
- }
- new LiveFontSizeSettingDialog(mContext, fount).setOnItemClickListener(new OnItemClickListener() {
- @Override
- public void onItemClick(Integer bean, int position) {
- EventBus.getDefault().post(new LiveAudienceEvent()
- .setNums(bean)
- .setType(LiveAudienceEvent.LiveAudienceType.LIVE_FONT_SIZE));
- }
- }).showDialog();
- }
- }
-
- /**
- * 把观众转换成主播
- */
- public List userinputStreamList = new ArrayList<>();
-
-
- public void UsertoRY() {
- userinputStreamList.clear();
- Log.e("ry", "主播同意了UsertoRY");
- if (userJoinLinkMic) {//已经在房间内,不再joinRoom直接去连麦
- subscribeMic(rcrtcRoom);
- return;
- }
- RCRTCRoomConfig roomConfig = RCRTCRoomConfig.Builder.create()
- // 根据实际场景,选择音视频直播:LIVE_AUDIO_VIDEO 或音频直播:LIVE_AUDIO
- .setRoomType(RCRTCRoomType.LIVE_AUDIO_VIDEO)
- .setLiveRole(RCRTCLiveRole.AUDIENCE)
- .build();
- RCRTCEngine.getInstance().joinRoom(LiveActivity.mLiveUid, roomConfig, new IRCRTCResultDataCallback() {
-
-
- @Override
- public void onSuccess(RCRTCRoom data) {
- userJoinLinkMic = true;
- rcrtcRoom = data;
- subscribeMic(data);
-
- }
-
- @Override
- public void onFailed(RTCErrorCode errorCode) {
- Log.e("ry", LiveActivity.mLiveUid + errorCode + "");
- if (errorCode.getValue() == 50007) {//userJoinLinkMic可能失效,直接leaveRoom
- userJoinLinkMic = false;
- RCRTCEngine.getInstance().leaveRoom(new IRCRTCResultCallback() {
-
- @Override
- public void onFailed(RTCErrorCode errorCode) {
-
- }
-
- @Override
- public void onSuccess() {
- UsertoRY();
- }
- });
- } else {
- ToastUtil.show("房间失败" + errorCode);
- }
-
- }
- });
-
-
- }
-
- private void subscribeMic(RCRTCRoom data) {
- runOnUiThread(new Runnable() {
- @Override
- public void run() {
- RCRTCEngine.getInstance().enableSpeaker(true);
- //遍历远端用户发布的资源列表
- Log.i(TAG, "软件资源数量:" + data.getLiveStreams().size());
- for (RCRTCInputStream stream : data.getLiveStreams()) {
- Log.i(TAG, "资源流 type: " + stream.getMediaType());
- if (stream.getMediaType() == RCRTCMediaType.VIDEO) {
- //暂停播放
- mPlayer.stop();
- //如果远端用户发布的是视频流,创建显示视图RCRTCVideoView,并添加到布局中显示
-
- //如果远端用户发布的是视频流,创建显示视图RCRTCVideoView,并添加到布局中显示
- RCRTCVideoView remoteView = new RCRTCVideoView(contexts);
- ((RCRTCVideoInputStream) stream).setVideoView(remoteView);
- //todo 本demo只演示添加1个远端用户的视图
- remoteView.setLayoutParams(new ViewGroup.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT));
- remoteView.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FIT);
- ry_view.addView(remoteView);
- Log.e("ry", stream.getMediaType() + "rcrtcOtherRoom成功 :" + data.getLiveStreams().size());
- }
- userinputStreamList.add(stream);
- }
- rcrtcRoom.getLocalUser().subscribeStreams(userinputStreamList, new IRCRTCResultCallback() {
-
- @Override
- public void onFailed(RTCErrorCode errorCode) {
- Log.e("ry", userinputStreamList.size() + LiveActivity.mLiveUid + "订阅失败" + errorCode.toString());
- ToastUtil.show(mContext.getString(R.string.live_pk_link_error));
- }
-
- @Override
- public void onSuccess() {
- Log.e("ry", "订阅资源成功");
-// new Handler().postDelayed(new Runnable() {
-// @Override
-// public void run() {
- toMic();
-// }
-// }, 3000);
-
- }
- });
- }
- });
- }
-
- /**
- * 主房间事件监听
- * 详细说明请参考文档:https://www.rongcloud.cn/docs/api/android/rtclib_v4/cn/rongcloud/rtc/api/callback/IRCRTCRoomEventsListener.html
- */
- public IRCRTCRoomEventsListener roomEventsListener = new IRCRTCRoomEventsListener() {
- @Override
- public void onRemoteUserPublishResource(RCRTCRemoteUser rcrtcRemoteUser, List list) {
- Log.e("ry", "远端来了" + list.size());
- //TODO 按需在此订阅远端用户发布的资源
- rcrtcRoom.getLocalUser().subscribeStreams(list, new IRCRTCResultCallback() {
- @Override
- public void onSuccess() {
- Log.e("ry", "远端成功" + list.size());
-
- }
-
- @Override
- public void onFailed(RTCErrorCode rtcErrorCode) {
- Log.e("ry", "远端失败" + list.size());
- ToastUtil.show(mContext.getString(R.string.live_pk_link_error));
- }
- });
- }
-
- @Override
- public void onRemoteUserMuteAudio(RCRTCRemoteUser remoteUser, RCRTCInputStream stream, boolean mute) {
-
- }
-
- @Override
- public void onRemoteUserMuteVideo(RCRTCRemoteUser remoteUser, RCRTCInputStream stream, boolean mute) {
-
- }
-
- @Override
- public void onRemoteUserUnpublishResource(RCRTCRemoteUser remoteUser, List streams) {
-
- }
-
- @Override
- public void onUserJoined(RCRTCRemoteUser remoteUser) {
-
- }
-
- @Override
- public void onUserLeft(RCRTCRemoteUser remoteUser) {
-
- }
-
- @Override
- public void onUserOffline(RCRTCRemoteUser remoteUser) {
-
- }
-
- @Override
- public void onPublishLiveStreams(List streams) {
-
- }
-
- @Override
- public void onUnpublishLiveStreams(List streams) {
-
- }
- };
-
-
- public List userinputStreamList1 = new ArrayList<>();
- public List userinputStreamList2 = new ArrayList<>();
-
- public void toMic() {
- RCRTCEngine.getInstance().getDefaultAudioStream().setAudioQuality(RCRTCParamsType.AudioQuality.MUSIC, RCRTCParamsType.AudioScenario.MUSIC_CLASSROOM);
- userinputStreamList1.clear();
- userinputStreamList2.clear();
- RCRTCParamsType.RCRTCVideoResolution rcrtcVideoResolution = RCRTCParamsType.RCRTCVideoResolution.RESOLUTION_480_848;
- int minRate = 200;
- int maxRate = 900;
- switch (IMLoginManager.get(mContext).getSelectClarity()) {
- case 0:
- rcrtcVideoResolution = landscape == 1 ? RCRTCParamsType.RCRTCVideoResolution.parseVideoResolution(960, 720) : RCRTCParamsType.RCRTCVideoResolution.RESOLUTION_480_848;
- minRate = landscape == 1 ? 900 : 200;
- maxRate = landscape == 1 ? 700 : 900;
- break;
- case 1:
- rcrtcVideoResolution = landscape == 1 ? RCRTCParamsType.RCRTCVideoResolution.parseVideoResolution(960, 720) : RCRTCParamsType.RCRTCVideoResolution.RESOLUTION_720_1280;
- minRate = landscape == 1 ? 900 : 250;
- maxRate = landscape == 1 ? 700 : 2200;
- break;
- case 2:
- rcrtcVideoResolution = landscape == 1 ? RCRTCParamsType.RCRTCVideoResolution.parseVideoResolution(960, 720) : RCRTCParamsType.RCRTCVideoResolution.RESOLUTION_1080_1920;
- minRate = landscape == 1 ? 900 : 400;
- maxRate = landscape == 1 ? 700 : 4000;
- break;
- }
- // 示例代码使用480x640分辨率演示
- RCRTCVideoStreamConfig config = RCRTCVideoStreamConfig
- .Builder.create()
- //设置分辨率
- .setVideoResolution(rcrtcVideoResolution)
- //设置帧率
- .setVideoFps(RCRTCParamsType.RCRTCVideoFps.Fps_24)
- //设置最小码率,480P下推荐200
- .setMinRate(minRate)
- //设置最大码率,480P下推荐900
- .setMaxRate(maxRate)
- .build();
- RCRTCEngine.getInstance().getDefaultVideoStream().setVideoConfig(config);
-// 创建视图渲染视图
-// RCRTCVideoView videoView = new RCRTCVideoView(contexts);
-// RCRTCEngine.getInstance().getDefaultVideoStream().setVideoView(videoView);
-//TODO 示例代码使用获取本地默认视频流、音频流演示
- ArrayList streams = new ArrayList<>();
-// streams.add(RCRTCEngine.getInstance().getDefaultVideoStream());
- streams.add(RCRTCEngine.getInstance().getDefaultAudioStream());
- if (landscape == 1) {
- new Handler(Looper.getMainLooper()).post(new Runnable() {
- @Override
- public void run() {
- setLiveBeanLandscape(1);
- }
- });
-
- }
-// 开启摄像头
-// RCRTCEngine.getInstance().getDefaultVideoStream().startCamera(null);
-// 开始切换为主播身份
- RCRTCEngine.getInstance().getRoom().getLocalUser().switchToBroadcaster(streams, new IRCRTCSwitchRoleDataCallback() {
- /**
- * 当切换失败且SDK处于无法回退状态时回调,该角色已经无法使用当前角色继续进行音视频。
- * SDK内部会退出房间并清理所有资源,该用户只能重新加入房间才能继续音视频。
- */
- @Override
- public void onKicked() {
- Log.e("ry", "切换onKicked");
-
- }
-
- @Override
- public void onSuccess(RCRTCLiveInfo data) {
- rcrtcRoom.registerRoomListener(roomEventsListener);
- //该用户切换为主播成功,可以以主播身份进行音视频
- Log.e("ry", "切换成功");
- new Handler(Looper.getMainLooper()).post(new Runnable() {
- public void run() {
- if (onMicCallback != null) {
- onMicCallback.onMikUpdate();
- }
- //遍历远端用户列表
- for (int i = 0; i < rcrtcRoom.getRemoteUsers().size(); i++) {
- Log.e("ry", rcrtcRoom.getRemoteUsers().get(i).getUserId() + "收到rcrtcOtherRoom" + rcrtcRoom.getRemoteUsers().size());
- //遍历远端用户发布的资源列表
- for (RCRTCInputStream stream : rcrtcRoom.getRemoteUsers().get(i).getStreams()) {
- Log.e("ry", i + "收到" + stream.getMediaType() + "实打实打算" + rcrtcRoom.getRemoteUsers().get(i).getUserId());
- if (stream.getMediaType() == RCRTCMediaType.VIDEO) {
- if (userinputStreamList1.size() == 0) {
-
- //如果远端用户发布的是视频流,创建显示视图RCRTCVideoView,并添加到布局中显示
- RCRTCVideoView remoteView = new RCRTCVideoView(contexts);
- ((RCRTCVideoInputStream) stream).setStreamType(RCRTCStreamType.NORMAL);
- ((RCRTCVideoInputStream) stream).setVideoView(remoteView);
- ((RCRTCVideoInputStream) stream).setVideoFrameListener(new IRCRTCVideoInputFrameListener() {
- @Override
- public void onFrame(RCRTCRemoteVideoFrame videoFrame) {
- String debugText = " 2、安卓主播开播, 安卓用户申请连麦后 ,安卓用户这边底部画面不全 :" + videoFrame.getFrameType().name() + "\n" +
- "rotation:" + videoFrame.getRotation() + "\n" +
- "timestampNs:" + videoFrame.getTimestampNs() + "\n" +
- "分辨率:" + videoFrame.getBuffer().getHeight() + "x" + videoFrame.getBuffer().getWidth() + "\n" +
- "当前时间:" + new SimpleDateFormat("HH:mm:ss", Locale.CHINA).format(new Date()) + "\n";
- //Log.d("ry", "onFrame: " + debugText);
- new Handler(Looper.getMainLooper()).post(() -> {
- debugView.setText(debugText);
-
- });
- }
- });
- //todo 本demo只演示添加1个远端用户的视图
- ry_view.removeAllViews();
- remoteView.setLayoutParams(new ViewGroup.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT));
- remoteView.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FILL);
- ry_view.addView(remoteView);
- }
- userinputStreamList1.add(stream);
- }
- userinputStreamList2.add(stream);
- }
- }
- //开始订阅资源
- rcrtcRoom.getLocalUser().subscribeStreams(userinputStreamList2, new IRCRTCResultCallback() {
- @Override
- public void onSuccess() {
- Log.e("ry", "订阅资源成功");
- MicStatusManager.getInstance().setMicData(MicStatusManager.MIC_TYPE_OPEN, LiveActivity.mLiveUid);
- AudioManager audioManager = (AudioManager) contexts.getSystemService(Context.AUDIO_SERVICE);
- RCRTCEngine.getInstance().enableSpeaker(true);
- audioManager.setMode(AudioManager.MODE_IN_COMMUNICATION);
- audioManager.setSpeakerphoneOn(true);
-
- HttpClient.getInstance().get("live.joinDrLm", "live.joinDrLm")
- .params("roomid", LiveActivity.mLiveUid)
- .execute(new HttpCallback() {
- @Override
- public void onSuccess(int code, String msg, String[] info) {
- Log.e("ry", code + "热热热");
- }
- });
- }
-
- @Override
- public void onFailed(RTCErrorCode rtcErrorCode) {
- Log.e("ry", "订阅资源失败: " + rtcErrorCode.getReason());
- ToastUtil.show(mContext.getString(R.string.live_pk_link_error));
- }
- });
- }
- });
-
-
- }
-
- /**
- * 当切换失败且不影响当前角色继续音视频时回调
- * @param errorCode 失败错误码
- */
- @Override
- public void onFailed(RTCErrorCode errorCode) {
- Log.e("ry", errorCode + "切换失败");
-
- }
- });
- }
-
- @Override
- public void setLoadViewListener(LoadingListener listener) {
- super.setLoadViewListener(listener);
- }
-
- private void showLoadingDialog() {
- if (PLAY_MODEL == PLAY_MODEL_480) return;
-
- new DialogUitl.Builder(mContext)
- .setTitle("網絡提示")
- .setContent("系統監測到您的網絡不穩定,對此將清晰度變成了流暢,您可以在側邊菜單中的「清晰度」選擇調回。")
- .setView(R.layout.dialog_simple_tip)
- .setClickCallback(new DialogUitl.SimpleCallback() {
- @Override
- public void onConfirmClick(Dialog dialog, String content) {
- dialog.dismiss();
- }
- }).build().show();
- PLAY_MODEL = PLAY_MODEL_480;
- String url = srcUrl;
- if (videoLandscape == VIDEO_VERTICAL) {
- url = url.replace(".flv", videoRatioVertical[PLAY_MODEL_480] + videoFps[0] + ".flv");
- } else if (videoLandscape == VIDEO_HORIZONTAL) {
- url = url.replace(".flv", videoRatioHorizontal[PLAY_MODEL_480] + videoFps[0] + ".flv");
- }
- mPlayer.switchUrl(url);
- }
-
- private class ExoPlayerListener implements Player.Listener {
- String TAG = "播放流";
-
- @Override
- public void onPlayerError(PlaybackException error) {
- Player.Listener.super.onPlayerError(error);
- debugView.setText("播放出错code=" + error.errorCode + " msg=" + error.getErrorCodeName());
- }
-
- @Override
- public void onVideoSizeChanged(VideoSize videoSize) {
- Player.Listener.super.onVideoSizeChanged(videoSize);
- Log.i(TAG, "onVideoSizeChanged: width = " + videoSize.width + " height = " + videoSize.height);
- //ToastUtil.show("分辨率: 宽 = " + videoSize.width + " 高 = " + videoSize.height);
- if (videoSize.height > videoSize.width) {
- videoLandscape = VIDEO_VERTICAL;
- } else {
- videoLandscape = VIDEO_HORIZONTAL;
- }
- }
-
- @Override
- public void onPlaybackStateChanged(int playbackState) {
- Player.Listener.super.onPlaybackStateChanged(playbackState);
- if (playbackState == Player.STATE_BUFFERING) {
- //showLoadingDialog();
- mLoading.setVisibility(View.VISIBLE);
- OkDownload.getInstance().pauseAll();
- Log.i(TAG, "onPlaybackStateChanged: 缓存中");
- } else {
- mLoading.setVisibility(View.INVISIBLE);
- Log.i(TAG, "onPlaybackStateChanged: 播放中");
- }
- }
-
- @Override
- public void onIsPlayingChanged(boolean isPlaying) {
- Player.Listener.super.onIsPlayingChanged(isPlaying);
- if (isPlaying) {
- hideCover();
- mLoading.setVisibility(View.INVISIBLE);
- Log.i(TAG, "onIsPlayingChanged: 开始播放 | waitNextUrl = " + waitNextUrl);
- OkDownload.getInstance().startAll();
- if (loadingListener != null) {
- loadingListener.onPlayer();
- }
- if (waitNextUrl != null) {
- mPlayer.switchUrl(waitNextUrl);
- waitNextUrl = null;
- }
- }
- }
- }
-
- private int mPkTimeCount = 0;
-
-
- public interface PlayViewLayoutInterface {
- void playViewLayout(int width, int height);
- }
-
- private PlayViewLayoutInterface layoutInterface;
-
- public void setLayoutInterface(PlayViewLayoutInterface layoutInterface) {
- mVideoView.post(new Runnable() {
- @Override
- public void run() {
- int width = mVideoView.getMeasuredWidth();
- int height = mVideoView.getMeasuredHeight();
- if (layoutInterface != null) {
- layoutInterface.playViewLayout(width, height);
- }
- }
- });
- }
-
-
-}
diff --git a/live/src/main/java/com/yunbao/live/views/LivePushRyViewHolder.java b/live/src/main/java/com/yunbao/live/views/LivePushRyViewHolder.java
index 70a60f04c..d9f2c2e11 100644
--- a/live/src/main/java/com/yunbao/live/views/LivePushRyViewHolder.java
+++ b/live/src/main/java/com/yunbao/live/views/LivePushRyViewHolder.java
@@ -46,8 +46,6 @@ import com.yunbao.common.http.live.LiveNetManager;
import com.yunbao.common.manager.IMLoginManager;
import com.yunbao.common.manager.IMRTCManager;
import com.yunbao.common.manager.RandomPkManager;
-import com.yunbao.common.manager.SWAuManager;
-import com.yunbao.common.manager.SWManager;
import com.yunbao.common.utils.Bus;
import com.yunbao.common.utils.DialogUitl;
import com.yunbao.common.utils.DpUtil;
@@ -83,8 +81,8 @@ import cn.rongcloud.rtc.api.stream.RCRTCInputStream;
import cn.rongcloud.rtc.api.stream.RCRTCLiveInfo;
import cn.rongcloud.rtc.base.RCRTCParamsType;
import cn.rongcloud.rtc.base.RTCErrorCode;
-import cn.rongcloud.rtc.core.CameraVideoCapturer;
-import io.agora.rtc2.RtcEngine;
+import io.agora.beautyapi.faceunity.agora.SWAuManager;
+import io.agora.beautyapi.faceunity.agora.SWManager;
import io.rong.imlib.IRongCallback;
import io.rong.imlib.RongIMClient;
import io.rong.imlib.model.Conversation;
@@ -96,7 +94,6 @@ import io.rong.message.TextMessage;
*/
public class LivePushRyViewHolder extends AbsRyLivePushViewHolder implements ITXLivePushListener {
- private RtcEngine mRtcEngine;
private int mMeiBaiVal;//基础美颜 美白
private int mMoPiVal;//基础美颜 磨皮
private int mHongRunVal;//基础美颜 红润
@@ -521,6 +518,7 @@ public class LivePushRyViewHolder extends AbsRyLivePushViewHolder implements ITX
// TODO: 2024/4/13 退出对方主播直播间
SWManager.get().exitChannelToUid(Integer.parseInt(CommonAppConfig.getInstance().getUid()),pkUid1);
SWManager.get().updateMyChannelView((FrameLayout) mBigContainer);
+ btn_close.setVisibility(View.GONE);//隐藏连麦按钮
EventBus.getDefault().post(new AnchorInfoEvent(true, "", "", ""));
closeButtonGone();
@@ -590,164 +588,6 @@ public class LivePushRyViewHolder extends AbsRyLivePushViewHolder implements ITX
}
});
initRtcEngine();
-
- /* // 构建 RoomConfig,指定房间类型和主播身份:
- RCRTCRoomConfig roomConfig = RCRTCRoomConfig.Builder.create()
- // 根据实际场景,选择音视频直播:LIVE_AUDIO_VIDEO 或音频直播:LIVE_AUDIO
- .setRoomType(RCRTCRoomType.LIVE_AUDIO_VIDEO)
- .setLiveRole(BROADCASTER)
- .build();
-
-
- //调用 RCRTCEngine 下的 joinRoom 方法创建并加入一个直播房间:
- final CommonAppConfig appConfig = CommonAppConfig.getInstance();
-
- RCRTCEngine.getInstance().joinRoom(appConfig.getUid(), roomConfig, new IRCRTCResultDataCallback() {
- @Override
- public void onFailed(RTCErrorCode errorCode) {
- ToastUtil.show("开播失败" + errorCode);
- }
-
- @Override
- public void onSuccess(final RCRTCRoom room) {
-
- // 保存房间对象
- rtcRoom = room;
- IMRTCManager.getInstance().setRtcRoom(room);
-
- new Handler(Looper.getMainLooper()).post(new Runnable() {
- public void run() {
-
- RCRTCConfig config = RCRTCConfig.Builder.create()
- //是否硬解码
- .enableHardwareDecoder(true)
- //是否硬编码
- .enableHardwareEncoder(true)
- .build();
-
-
- RCRTCEngine.getInstance().init(contexts, config);
- RCRTCEngine.getInstance().getDefaultAudioStream().setAudioQuality(RCRTCParamsType.AudioQuality.MUSIC_HIGH, RCRTCParamsType.AudioScenario.MUSIC_CHATROOM);
- //設置開播分辨率
- RCRTCParamsType.RCRTCVideoResolution rcrtcVideoResolution = RCRTCParamsType.RCRTCVideoResolution.RESOLUTION_480_848;
- int minRate = 200;
- int maxRate = 900;
- switch (IMLoginManager.get(mContext).getSelectClarity()) {
- case 0:
- rcrtcVideoResolution = RCRTCParamsType.RCRTCVideoResolution.RESOLUTION_480_848;
- minRate = 200;
- maxRate = 900;
- break;
- case 1:
- rcrtcVideoResolution = RCRTCParamsType.RCRTCVideoResolution.RESOLUTION_720_1280;
- minRate = 250;
- maxRate = 2200;
- break;
- case 2:
- rcrtcVideoResolution = RCRTCParamsType.RCRTCVideoResolution.RESOLUTION_1080_1920;
- minRate = 400;
- maxRate = 4000;
- break;
- }
-
- RCRTCVideoStreamConfig videoConfigBuilder = RCRTCVideoStreamConfig.Builder.create()
- //设置分辨率
- .setVideoResolution(rcrtcVideoResolution)
- //设置帧率
- .setVideoFps(RCRTCParamsType.RCRTCVideoFps.Fps_24)
- //设置最小码率,480P下推荐200
- .setMinRate(minRate)
- //设置最大码率,480P下推荐900
- .setMaxRate(maxRate)
- .build();
-
- // 创建本地视频显示视图
- RCRTCEngine.getInstance().getDefaultVideoStream().setVideoConfig(videoConfigBuilder);
- RCRTCEngine.getInstance().getDefaultVideoStream().enableTinyStream(false);
- RCRTCVideoView rongRTCVideoView = new RCRTCVideoView(contexts);
- rongRTCVideoView.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FILL);
- RCRTCEngine.getInstance().getDefaultVideoStream().setVideoView(rongRTCVideoView);
- RCRTCEngine.getInstance().getDefaultVideoStream().startCamera(new IRCRTCResultDataCallback() {
- @Override
- public void onSuccess(Boolean data) {
- //设置摄像头最大缩放比例
- boolean zoom = RCRTCEngine.getInstance().getDefaultVideoStream().setCameraZoomFactor(1.0f);
- // ToastUtil.show("设置比例="+zoom);
- Log.i("摄像头", "onSuccess: 打开摄像头");
- isNeedOpenCamera = false;
- }
-
- @Override
- public void onFailed(RTCErrorCode errorCode) {
- Log.i("摄像头", "onFailed: 打开摄像头失败 " + errorCode);
- }
- });
- RCRTCEngine.getInstance().registerEventListener(new IRCRTCEngineEventListener() {
- @Override
- public void onKicked(String roomId, RCRTCParamsType.RCRTCKickedReason kickedReason) {
-
- }
-
- @Override
- public void onError(RTCErrorCode errorCode) {
- super.onError(errorCode);
- Log.i("摄像头", "onError: 错误码" + errorCode);
- }
-
- @Override
- public void onLocalVideoEventNotify(RCRTCVideoEventCode event) {
- super.onLocalVideoEventNotify(event);
- Log.i("摄像头", "onLocalVideoEventNotify: 本地视频事件" + event.code);
- if (event.code == 3) {
- isNeedOpenCamera = true;
- }
- }
- });
- //设置摄像头最大缩放比例
- // RCRTCEngine.getInstance().getDefaultVideoStream().setCameraZoomFactor(RCRTCEngine.getInstance().getDefaultVideoStream().getCameraMaxZoomFactor());
- mPreView.addView(rongRTCVideoView);
- tencentTRTCBeautyManager = new TencentTRTCBeautyManager(mContext);
-
- //加入房间成功后可以通过 RCRTCLocalUser 对象发布本地默认音视频流,包括:麦克风采集的音频和摄像头采集的视频。
- RCRTCEngine.getInstance().getDefaultVideoStream().setEncoderMirror(true);
- if (rtcRoom == null || rtcRoom.getLocalUser() == null) {
- if (room == null || room.getLocalUser() == null) {
- ToastUtil.show("开播失败 请稍后再试,错误代码:room is null");
- ((LiveRyAnchorActivity) mContext).endLives();
- return;
- }
- rtcRoom = room;
- IMRTCManager.getInstance().setRtcRoom(room);
- }
- rtcRoom.getLocalUser().publishDefaultLiveStreams(new IRCRTCResultDataCallback() {
- @Override
- public void onSuccess(RCRTCLiveInfo rcrtcLiveInfo1) {
- rcrtcLiveInfo = rcrtcLiveInfo1;
-
- room.registerRoomListener(roomEventsListener);
-
- //美颜
-// new Handler(Looper.getMainLooper()).post(new Runnable() {
-// public void run() {
-// //旧美颜不需要了
-//
-//
-// }
-// });
- }
-
- @Override
- public void onFailed(RTCErrorCode rtcErrorCode) {
- Log.e("ry", "rtcErrorCode" + rtcErrorCode);
- }
- });
- }
-
- });
-
-
- }
- });*/
}
/**
@@ -784,6 +624,7 @@ public class LivePushRyViewHolder extends AbsRyLivePushViewHolder implements ITX
public void onLinkMicAnchorClose(){
swManager.updateMyChannelView((FrameLayout) mBigContainer);
+ LivePushRyViewHolder.btn_close.setVisibility(View.GONE);
}
@Override
@@ -825,17 +666,7 @@ public class LivePushRyViewHolder extends AbsRyLivePushViewHolder implements ITX
} else {
mCameraFront = true;
}
- RCRTCEngine.getInstance().getDefaultVideoStream().switchCamera(new CameraVideoCapturer.CameraSwitchHandler() {
- @Override
- public void onCameraSwitchDone(boolean isFrontCamera) {
-
- }
-
- @Override
- public void onCameraSwitchError(String errorDescription) {
-
- }
- });
+ swManager.switchCamera();
}
/**
@@ -982,7 +813,6 @@ public class LivePushRyViewHolder extends AbsRyLivePushViewHolder implements ITX
public void startBgm(String path) {
mBgmPath = path;
-
}
@Override
@@ -1107,6 +937,12 @@ public class LivePushRyViewHolder extends AbsRyLivePushViewHolder implements ITX
});
}
+ public void setEnableBeauty(boolean flag){
+ if(swManager!=null){
+ swManager.setEnableBeauty(flag);
+ }
+ }
+
@Subscribe(threadMode = ThreadMode.MAIN)
public void updateSub(LiveAudienceEvent event) {
if (event.getType() == LIVE_PK_END) {
@@ -1116,4 +952,12 @@ public class LivePushRyViewHolder extends AbsRyLivePushViewHolder implements ITX
}
Log.i("PK----->", "updateSub: " + isPk + "|" + event.getType());
}
+
+ @Override
+ public void onDestroy() {
+ super.onDestroy();
+ if(swManager!=null){
+ swManager.exitChannelAll();
+ }
+ }
}
diff --git a/main/build.gradle b/main/build.gradle
index d1895104e..7e6ea783f 100644
--- a/main/build.gradle
+++ b/main/build.gradle
@@ -72,7 +72,7 @@ dependencies {
//短视频
api project(':video')
api project(':common')
- api project(':FaceUnity')//新娱美颜
+ api project(':lib_faceunity')//新娱美颜
api project(':Share')//分享
implementation 'androidx.appcompat:appcompat:1.3.0'
implementation 'com.google.android.material:material:1.4.0'
diff --git a/pluginsForAnchor/build.gradle b/pluginsForAnchor/build.gradle
index ef5b9896d..b4c3f739f 100644
--- a/pluginsForAnchor/build.gradle
+++ b/pluginsForAnchor/build.gradle
@@ -85,7 +85,7 @@ android {
doLast {
delete(fileTree(dir: outputDir, includes: [
'model/ai_bgseg_green.bundle',
- 'model/ai_face_processor.bundle',
+ //'model/ai_face_processor.bundle',
//'model/ai_face_processor_lite.bundle',
'model/ai_hairseg.bundle',
'model/ai_hand_processor.bundle',
diff --git a/settings.gradle b/settings.gradle
index b391e1573..16ddb4134 100644
--- a/settings.gradle
+++ b/settings.gradle
@@ -7,3 +7,4 @@ include ':pluginsForAnchor'
//include ':lib_huawei'
include ':lib_google'
include ':IAP6Helper'
+include ':lib_faceunity'