add[声望升级-接入美颜]

This commit is contained in:
Martin 2024-04-18 09:47:38 +08:00
parent 0db8eefbe6
commit 583740e866
41 changed files with 3303 additions and 1920 deletions

View File

@ -1,7 +1,7 @@
apply plugin: 'com.android.library'
apply plugin: 'img-optimizer'
apply plugin: 'kotlin-android'
apply plugin: 'kotlin-android-extensions'
apply plugin: 'kotlin-parcelize'
android {
@ -58,15 +58,15 @@ repositories {
}
}
dependencies {
implementation fileTree(dir: 'libs', include: ['*.jar'])
implementation rootProject.ext.dependencies["appcompat-androidx"]
implementation rootProject.ext.dependencies["recyclerview-androidx"]
implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
api fileTree(dir: 'libs', include: ['*.jar'])
api rootProject.ext.dependencies["appcompat-androidx"]
api rootProject.ext.dependencies["recyclerview-androidx"]
api "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
//common
implementation project(path: ':common')
api project(path: ':common')
implementation 'com.faceunity:core:8.3.1'
implementation 'com.faceunity:model:8.3.1'
api 'com.faceunity:core:8.7.0'
api 'com.faceunity:model:8.7.0'
//implementation 'com.faceunity:nama:8.3.1' //-

View File

@ -96,11 +96,17 @@ public class FaceManager implements SensorEventListener {
faceUnityView.setIFaceUnityInter(new FaceUnityView.IFaceUnityInter() {
@Override
public void onPause() {
if(onMirrorChanged!=null){
onMirrorChanged.onChange(false);
}
pauseFace = true;
}
@Override
public void onStart() {
if(onMirrorChanged!=null){
onMirrorChanged.onChange(true);
}
pauseFace = false;
}
});
@ -295,7 +301,18 @@ public class FaceManager implements SensorEventListener {
}
OnMirrorChanged onMirrorChanged;
public void setOnMirrorChanged(OnMirrorChanged onMirrorChanged) {
this.onMirrorChanged = onMirrorChanged;
}
public interface FaceStatusChanged {
void onFaceChanged(int num);
}
public interface OnMirrorChanged{
void onChange(boolean falg);
}
}

View File

@ -52,10 +52,10 @@ public class FURenderer extends IFURenderer {
/* 特效FURenderKit*/
private FURenderKit mFURenderKit;
public FURenderKit mFURenderKit;
/* AI道具*/
public static String BUNDLE_AI_FACE = "model" + File.separator + "ai_face_processor_lite.bundle";
public static String BUNDLE_AI_FACE = "model" + File.separator + "ai_face_processor.bundle";
public static String BUNDLE_AI_HUMAN = "model" + File.separator + "ai_human_processor.bundle";
/* GL 线程 ID */

View File

@ -15,7 +15,7 @@ public class FaceUnityConfig {
/************************** 算法Model ******************************/
// 人脸识别
public static String BUNDLE_AI_FACE = "model" + File.separator + "ai_face_processor_lite.bundle";
public static String BUNDLE_AI_FACE = "model" + File.separator + "ai_face_processor.bundle";
// 手势
public static String BUNDLE_AI_HAND = "model" + File.separator + "ai_hand_processor.bundle";

View File

@ -1,7 +1,7 @@
apply plugin: 'com.android.library'
apply plugin: 'img-optimizer'
apply plugin: 'kotlin-android'
apply plugin: 'kotlin-android-extensions'
apply plugin: 'kotlin-parcelize'
android {

View File

@ -131,7 +131,7 @@ android {
variant.mergeAssetsProvider.configure {
doLast {
delete(fileTree(dir: outputDir, includes: ['model/ai_bgseg_green.bundle',
'model/ai_face_processor.bundle',
//'model/ai_face_processor.bundle',
//'model/ai_face_processor_lite.bundle',
'model/ai_hairseg.bundle',
'model/ai_hand_processor.bundle',
@ -158,7 +158,7 @@ android {
]))
println "isPluginModel = " + rootProject.ext.manifestPlaceholders.isPluginModel
if (rootProject.ext.manifestPlaceholders.isPluginModel) {
delete(fileTree(dir: outputDir, includes: ['model/ai_face_processor_lite.bundle',
delete(fileTree(dir: outputDir, includes: ['model/ai_face_processor.bundle',
'graphics/face_beautification.bundle']))
} else {
println "不删除bundle"

View File

@ -274,6 +274,7 @@ public class AppContext extends CommonAppContext {
});
configSPApp();
//初始化美颜SDK
// FaceManager.initFaceUnity(this);
}

View File

@ -52,4 +52,10 @@ allprojects {
task clean(type: Delete) {
delete rootProject.buildDir
}
ext {
IS_PUBLISH_LOCAL=true
LIB_VERSION="1.0.6"
// AGORA_RTC_SDK="io.agora.rtc:agora-special-full:4.1.1.28"
// AGORA_RTC_SDK= "${rootProject.rootDir.absolutePath}/sdk"
AGORA_RTC_SDK="io.agora.rtc:full-sdk:4.2.6"
}

View File

@ -1,5 +1,6 @@
apply plugin: 'com.android.library'
apply plugin: 'img-optimizer'
apply plugin: 'kotlin-android'
android {
@ -226,6 +227,6 @@ dependencies {
//
api 'com.github.xiaohaibin:XBanner:androidx_v1.2.6'
//SDK
api 'io.agora.rtc:agora-special-full:4.1.1.28'
//api 'io.agora.rtc:agora-special-full:4.2.6.245'
}

View File

@ -9,9 +9,9 @@ ext {
]
manifestPlaceholders = [
//
serverHost : "https://napi.yaoulive.com",
//serverHost : "https://napi.yaoulive.com",
//
// serverHost : "https://ceshi.yaoulive.com",
serverHost : "https://ceshi.yaoulive.com",
//

View File

@ -23,8 +23,6 @@ android.enableJetifier=true
systemProp.http.proxyHost=127.0.0.1
systemProp.https.proxyHost=127.0.0.1
systemProp.https.proxyPort=7890
systemProp.http.proxyPort=7890
#systemProp.https.proxyPort=10809
#systemProp.http.proxyPort=10809
systemProp.https.proxyPort=10809
systemProp.http.proxyPort=10809
#android.enableR8.fullMode=true

4
lib_faceunity/.gitignore vendored Normal file
View File

@ -0,0 +1,4 @@
/build
/src/main/assets/makeup
/src/main/assets/sticker
authpack.java

View File

@ -0,0 +1,79 @@
apply plugin: 'com.android.library'
apply plugin: 'maven-publish'
apply plugin: 'kotlin-android'
android {
compileSdkVersion 31
buildToolsVersion "30.0.3"
defaultConfig {
minSdkVersion 21
targetSdkVersion 31
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
consumerProguardFiles "consumer-rules.pro"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
kotlinOptions {
jvmTarget = '1.8'
}
}
dependencies {
testImplementation 'junit:junit:4.13.2'
androidTestImplementation 'androidx.test.ext:junit:1.1.3'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.4.0'
if (new File("$AGORA_RTC_SDK").exists()) {
api fileTree(dir: "${AGORA_RTC_SDK}", include: ['*.jar', '*.aar'])
} else {
api "$AGORA_RTC_SDK"
}
api project(path: ':FaceUnity')
}
// Because the components are created only during the afterEvaluate phase, you must
// configure your publications using the afterEvaluate() lifecycle method.
afterEvaluate {
publishing {
publications {
// Creates a Maven publication called "release".
release(MavenPublication) {
// Applies the component for the release build variant.
from components.release
// You can then customize attributes of the publication as shown below.
groupId = 'com.github.AgoraIO-Community.BeautyAPI'
artifactId = 'FaceUnity'
version = "$LIB_VERSION"
}
// Creates a Maven publication called debug.
debug(MavenPublication) {
// Applies the component for the debug build variant.
from components.debug
groupId = 'com.github.AgoraIO-Community.BeautyAPI'
artifactId = 'FaceUnity'
version = "$LIB_VERSION"
}
}
if(IS_PUBLISH_LOCAL){
repositories {
maven {
url = "file://${rootProject.projectDir.path}/maven"
println("maven publish to ${url}")
}
}
}
}
}

View File

21
lib_faceunity/proguard-rules.pro vendored Normal file
View File

@ -0,0 +1,21 @@
# Add project specific ProGuard rules here.
# You can control the set of applied configuration files using the
# proguardFiles setting in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile

View File

@ -0,0 +1,5 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:tools="http://schemas.android.com/tools"
package="io.agora.beautyapi.faceunity">
<uses-sdk tools:overrideLibrary="io.agora.beautyapi.faceunity" />
</manifest>

View File

@ -0,0 +1,179 @@
/*
* MIT License
*
* Copyright (c) 2023 Agora Community
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package io.agora.beautyapi.faceunity
import android.content.Context
import android.view.View
import com.faceunity.core.faceunity.FURenderKit
import io.agora.base.VideoFrame
import io.agora.rtc2.Constants
import io.agora.rtc2.RtcEngine
const val VERSION = "1.0.6"
enum class CaptureMode{
Agora, // 使用声网内部的祼数据接口进行处理
Custom // 自定义模式需要自己调用onFrame接口将原始视频帧传给BeautyAPI做处理
}
interface IEventCallback{
/**
* 统计数据回调每处理完一帧后会回调一次
*
* @param stats 美颜统计数据
*/
fun onBeautyStats(stats: BeautyStats)
}
data class BeautyStats(
val minCostMs:Long, // 统计区间内的最小值
val maxCostMs: Long, // 统计区间内的最大值
val averageCostMs: Long // 统计区间内的平均值
)
enum class MirrorMode {
// 没有镜像正常画面的定义:前置拍到画面和手机看到画面是左右不一致的,后置拍到画面和手机看到画面是左右一致的
MIRROR_LOCAL_REMOTE, //本地远端都镜像,前置默认,本地和远端贴纸都正常
MIRROR_LOCAL_ONLY, // 仅本地镜像,远端不镜像,,远端贴纸正常,本地贴纸镜像。用于打电话场景,电商直播场景(保证电商直播后面的告示牌文字是正的);这种模式因为本地远端是反的,所以肯定有一边的文字贴纸方向会是反的
MIRROR_REMOTE_ONLY, // 仅远端镜像,本地不镜像,远端贴纸正常,本地贴纸镜像
MIRROR_NONE // 本地远端都不镜像,后置默认,本地和远端贴纸都正常
}
data class CameraConfig(
val frontMirror: MirrorMode = MirrorMode.MIRROR_LOCAL_REMOTE, // 前置默认镜像:本地远端都镜像
val backMirror: MirrorMode = MirrorMode.MIRROR_NONE // 后置默认镜像:本地远端都不镜像
)
data class Config(
val context: Context, // Android Context 上下文
val rtcEngine: RtcEngine, // 声网Rtc引擎
val fuRenderKit: FURenderKit, // 美颜SDK处理句柄
val eventCallback: IEventCallback? = null, // 事件回调
val captureMode: CaptureMode = CaptureMode.Agora, // 处理模式
val statsDuration: Long = 1000, // 统计区间
val statsEnable: Boolean = false, // 是否开启统计
val cameraConfig: CameraConfig = CameraConfig() // 摄像头镜像配置
)
enum class ErrorCode(val value: Int) {
ERROR_OK(0), // 一切正常
ERROR_HAS_NOT_INITIALIZED(101), // 没有调用Initialize或调用失败情况下调用了其他API
ERROR_HAS_INITIALIZED(102), // 已经Initialize成功后再次调用报错
ERROR_HAS_RELEASED(103), // 已经调用release销毁后还调用其他API
ERROR_PROCESS_NOT_CUSTOM(104), // 非Custom处理模式下调用onFrame接口从外部传入视频帧
ERROR_VIEW_TYPE_ERROR(105), // 当调用setupLocalVideo时view类型错误时返回
ERROR_FRAME_SKIPPED(106), // 当处理帧忽略时在onFrame返回
}
enum class BeautyPreset {
CUSTOM, // 不使用推荐的美颜参数
DEFAULT // 默认的
}
fun createFaceUnityBeautyAPI(): FaceUnityBeautyAPI = FaceUnityBeautyAPIImpl()
interface FaceUnityBeautyAPI {
/**
* 初始化API
*
* @param config 配置参数
* @return 见ErrorCode
*/
fun initialize(config: Config): Int
/**
* 开启/关闭美颜
*
* @param enable true开启; false: 关闭
* @return 见ErrorCode
*/
fun enable(enable: Boolean): Int
/**
* 本地视图渲染由内部来处理镜像问题
*
* @param view SurfaceView或TextureView
* @param renderMode 渲染缩放模式
* @return 见ErrorCode
*/
fun setupLocalVideo(view: View, renderMode: Int = Constants.RENDER_MODE_HIDDEN): Int
/**
* 当ProcessMode==Custom时由外部传入原始视频帧
*
* @param videoFrame 原始视频帧
* @return 见ErrorCode
*/
fun onFrame(videoFrame: VideoFrame): Int
/**
* 声网提供的美颜最佳默认参数
*
* @return 见ErrorCode
*/
fun setBeautyPreset(preset: BeautyPreset = BeautyPreset.DEFAULT): Int
/**
* 更新摄像头配置
*/
fun updateCameraConfig(config: CameraConfig): Int
/**
* 是否是前置摄像头
* PS只在美颜处理中才能知道准确的值否则会一直是true
*/
fun isFrontCamera(): Boolean
/**
* 获取镜像状态
*
* @return 镜像状态true: 镜像false非镜像
*/
fun getMirrorApplied(): Boolean
/**
* 在处理线程里执行操作
*
* @param run 操作run
*/
fun runOnProcessThread(run: ()->Unit)
/**
* 私参配置用于不对外api的调用多用于测试
*/
fun setParameters(key: String, value: String)
/**
* 释放资源一旦释放后这个实例将无法使用
*
* @return 见ErrorCode
*/
fun release(): Int
}

View File

@ -0,0 +1,818 @@
/*
* MIT License
*
* Copyright (c) 2023 Agora Community
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package io.agora.beautyapi.faceunity
import android.graphics.Matrix
import android.opengl.GLES11Ext
import android.opengl.GLES20
import android.view.SurfaceView
import android.view.TextureView
import android.view.View
import com.faceunity.core.entity.FUBundleData
import com.faceunity.core.entity.FURenderInputData
import com.faceunity.core.enumeration.CameraFacingEnum
import com.faceunity.core.enumeration.FUInputBufferEnum
import com.faceunity.core.enumeration.FUInputTextureEnum
import com.faceunity.core.enumeration.FUTransformMatrixEnum
import com.faceunity.core.faceunity.FUAIKit
import com.faceunity.core.faceunity.FURenderKit
import com.faceunity.core.model.facebeauty.FaceBeauty
import com.faceunity.core.model.facebeauty.FaceBeautyFilterEnum
import io.agora.base.TextureBufferHelper
import io.agora.base.VideoFrame
import io.agora.base.VideoFrame.I420Buffer
import io.agora.base.VideoFrame.SourceType
import io.agora.base.VideoFrame.TextureBuffer
import io.agora.base.internal.video.EglBase
import io.agora.base.internal.video.YuvHelper
import io.agora.beautyapi.faceunity.utils.FuDeviceUtils
import io.agora.beautyapi.faceunity.utils.LogUtils
import io.agora.beautyapi.faceunity.utils.StatsHelper
import io.agora.beautyapi.faceunity.utils.egl.GLFrameBuffer
import io.agora.beautyapi.faceunity.utils.egl.TextureProcessHelper
import io.agora.rtc2.Constants
import io.agora.rtc2.gl.EglBaseProvider
import io.agora.rtc2.video.IVideoFrameObserver
import io.agora.rtc2.video.VideoCanvas
import java.io.File
import java.nio.ByteBuffer
import java.util.Collections
import java.util.concurrent.Callable
class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver {
private val TAG = "FaceUnityBeautyAPIImpl"
private val reportId = "scenarioAPI"
private val reportCategory = "beauty_android_$VERSION"
private var beautyMode = 0 // 0: 自动根据buffer类型切换1固定使用OES纹理2固定使用i4203: 单纹理模式
private var enableTextureAsync = true // 是否开启纹理+异步缓存处理不支持在预览中实时切换。对于GPU性能好的手机可以减小美颜处理耗时对于中端机开启后效果也不明显。
private var textureBufferHelper: TextureBufferHelper? = null
private var wrapTextureBufferHelper: TextureBufferHelper? = null
private var byteBuffer: ByteBuffer? = null
private var byteArray: ByteArray? = null
private var config: Config? = null
private var enable: Boolean = false
private var enableChange: Boolean = false
private var isReleased: Boolean = false
private var captureMirror = false
private var renderMirror = false
private val identityMatrix = Matrix()
private var mTextureProcessHelper: TextureProcessHelper? = null
private var statsHelper: StatsHelper? = null
private var skipFrame = 0
private enum class ProcessSourceType{
UNKNOWN,
TEXTURE_OES_ASYNC,
TEXTURE_2D_ASYNC,
TEXTURE_OES,
TEXTURE_2D,
I420
}
private var currProcessSourceType = ProcessSourceType.UNKNOWN
private var deviceLevel = FuDeviceUtils.DEVICEINFO_UNKNOWN
private var isFrontCamera = true
private var cameraConfig = CameraConfig()
private var localVideoRenderMode = Constants.RENDER_MODE_HIDDEN
private val pendingProcessRunList = Collections.synchronizedList(mutableListOf<()->Unit>())
private val transformGLFrameBuffer = GLFrameBuffer()
override fun initialize(config: Config): Int {
if (this.config != null) {
LogUtils.e(TAG, "initialize >> The beauty api has been initialized!")
return ErrorCode.ERROR_HAS_INITIALIZED.value
}
this.config = config
this.cameraConfig = config.cameraConfig
if (config.captureMode == CaptureMode.Agora) {
config.rtcEngine.registerVideoFrameObserver(this)
}
statsHelper = StatsHelper(config.statsDuration){
this.config?.eventCallback?.onBeautyStats(it)
}
LogUtils.i(TAG, "initialize >> config = $config")
LogUtils.i(TAG, "initialize >> beauty api version=$VERSION, beauty sdk version=${FURenderKit.getInstance().getVersion()}")
// config face beauty
if (deviceLevel == FuDeviceUtils.DEVICEINFO_UNKNOWN) {
deviceLevel = FuDeviceUtils.judgeDeviceLevel(config.context)
FUAIKit.getInstance().faceProcessorSetFaceLandmarkQuality(deviceLevel)
if (deviceLevel > FuDeviceUtils.DEVICE_LEVEL_MID) {
FUAIKit.getInstance().fuFaceProcessorSetDetectSmallFace(true)
}
}
LogUtils.i(TAG, "initialize >> FuDeviceUtils deviceLevel=$deviceLevel")
config.rtcEngine.sendCustomReportMessage(reportId, reportCategory, "initialize", "config=$config, deviceLevel=$deviceLevel", 0)
return ErrorCode.ERROR_OK.value
}
override fun enable(enable: Boolean): Int {
LogUtils.i(TAG, "enable >> enable = $enable")
if (config == null) {
LogUtils.e(TAG, "enable >> The beauty api has not been initialized!")
return ErrorCode.ERROR_HAS_NOT_INITIALIZED.value
}
if (isReleased) {
LogUtils.e(TAG, "enable >> The beauty api has been released!")
return ErrorCode.ERROR_HAS_RELEASED.value
}
if(config?.captureMode == CaptureMode.Custom){
skipFrame = 2
LogUtils.i(TAG, "enable >> skipFrame = $skipFrame")
}
config?.rtcEngine?.sendCustomReportMessage(reportId, reportCategory, "enable", "enable=$enable", 0)
if(this.enable != enable){
this.enable = enable
enableChange = true
LogUtils.i(TAG, "enable >> enableChange")
}
return ErrorCode.ERROR_OK.value
}
override fun setupLocalVideo(view: View, renderMode: Int): Int {
val rtcEngine = config?.rtcEngine
if(rtcEngine == null){
LogUtils.e(TAG, "setupLocalVideo >> The beauty api has not been initialized!")
return ErrorCode.ERROR_HAS_NOT_INITIALIZED.value
}
LogUtils.i(TAG, "setupLocalVideo >> view=$view, renderMode=$renderMode")
localVideoRenderMode = renderMode
rtcEngine.sendCustomReportMessage(reportId, reportCategory, "enable", "view=$view, renderMode=$renderMode", 0)
if (view is TextureView || view is SurfaceView) {
val canvas = VideoCanvas(view, renderMode, 0)
canvas.mirrorMode = Constants.VIDEO_MIRROR_MODE_DISABLED
rtcEngine.setupLocalVideo(canvas)
return ErrorCode.ERROR_OK.value
}
return ErrorCode.ERROR_VIEW_TYPE_ERROR.value
}
override fun onFrame(videoFrame: VideoFrame): Int {
val conf = config
if(conf == null){
LogUtils.e(TAG, "onFrame >> The beauty api has not been initialized!")
return ErrorCode.ERROR_HAS_NOT_INITIALIZED.value
}
if (isReleased) {
LogUtils.e(TAG, "onFrame >> The beauty api has been released!")
return ErrorCode.ERROR_HAS_RELEASED.value
}
if (conf.captureMode != CaptureMode.Custom) {
LogUtils.e(TAG, "onFrame >> The capture mode is not Custom!")
return ErrorCode.ERROR_PROCESS_NOT_CUSTOM.value
}
if (processBeauty(videoFrame)) {
return ErrorCode.ERROR_OK.value
}
LogUtils.i(TAG, "onFrame >> Skip Frame.")
return ErrorCode.ERROR_FRAME_SKIPPED.value
}
override fun updateCameraConfig(config: CameraConfig): Int {
LogUtils.i(TAG, "updateCameraConfig >> oldCameraConfig=$cameraConfig, newCameraConfig=$config")
cameraConfig = CameraConfig(config.frontMirror, config.backMirror)
this.config?.rtcEngine?.sendCustomReportMessage(reportId, reportCategory, "updateCameraConfig", "config=$config", 0)
return ErrorCode.ERROR_OK.value
}
override fun runOnProcessThread(run: () -> Unit) {
if (config == null) {
LogUtils.e(TAG, "runOnProcessThread >> The beauty api has not been initialized!")
return
}
if (isReleased) {
LogUtils.e(TAG, "runOnProcessThread >> The beauty api has been released!")
return
}
if (textureBufferHelper?.handler?.looper?.thread == Thread.currentThread()) {
run.invoke()
} else if (textureBufferHelper != null) {
textureBufferHelper?.handler?.post(run)
} else {
pendingProcessRunList.add(run)
}
}
override fun isFrontCamera() = isFrontCamera
override fun setParameters(key: String, value: String) {
when(key){
"beauty_mode" -> beautyMode = value.toInt()
"enableTextureAsync" -> enableTextureAsync = value.toBoolean()
}
}
override fun setBeautyPreset(preset: BeautyPreset): Int {
val conf = config
if(conf == null){
LogUtils.e(TAG, "setBeautyPreset >> The beauty api has not been initialized!")
return ErrorCode.ERROR_HAS_NOT_INITIALIZED.value
}
if (isReleased) {
LogUtils.e(TAG, "setBeautyPreset >> The beauty api has been released!")
return ErrorCode.ERROR_HAS_RELEASED.value
}
LogUtils.i(TAG, "setBeautyPreset >> preset = $preset")
config?.rtcEngine?.sendCustomReportMessage(reportId, reportCategory, "enable", "preset=$preset", 0)
val recommendFaceBeauty = FaceBeauty(FUBundleData("graphics" + File.separator + "face_beautification.bundle"))
if (preset == BeautyPreset.DEFAULT) {
recommendFaceBeauty.filterName = FaceBeautyFilterEnum.FENNEN_1
recommendFaceBeauty.filterIntensity = 0.7
// 美牙
recommendFaceBeauty.toothIntensity = 0.3
// 亮眼
recommendFaceBeauty.eyeBrightIntensity = 0.3
// 大眼
recommendFaceBeauty.eyeEnlargingIntensity = 0.5
// 红润
recommendFaceBeauty.redIntensity = 0.5 * 2
// 美白
recommendFaceBeauty.colorIntensity = 0.75 * 2
// 磨皮
recommendFaceBeauty.blurIntensity = 0.75 * 6
if (deviceLevel > FuDeviceUtils.DEVICE_LEVEL_MID) {
val score = FUAIKit.getInstance().getFaceProcessorGetConfidenceScore(0)
if (score > 0.95) {
recommendFaceBeauty.blurType = 3
recommendFaceBeauty.enableBlurUseMask = true
} else {
recommendFaceBeauty.blurType = 2
recommendFaceBeauty.enableBlurUseMask = false
}
} else {
recommendFaceBeauty.blurType = 2
recommendFaceBeauty.enableBlurUseMask = false
}
// 嘴型
recommendFaceBeauty.mouthIntensity = 0.3
// 瘦鼻
recommendFaceBeauty.noseIntensity = 0.1
// 额头
recommendFaceBeauty.forHeadIntensity = 0.3
// 下巴
recommendFaceBeauty.chinIntensity = 0.0
// 瘦脸
recommendFaceBeauty.cheekThinningIntensity = 0.3
// 窄脸
recommendFaceBeauty.cheekNarrowIntensity = 0.0
// 小脸
recommendFaceBeauty.cheekSmallIntensity = 0.0
// v脸
recommendFaceBeauty.cheekVIntensity = 0.0
}
conf.fuRenderKit.faceBeauty = recommendFaceBeauty
return ErrorCode.ERROR_OK.value
}
override fun release(): Int {
val conf = config
val fuRenderer = conf?.fuRenderKit
if(fuRenderer == null){
LogUtils.e(TAG, "release >> The beauty api has not been initialized!")
return ErrorCode.ERROR_HAS_NOT_INITIALIZED.value
}
if (isReleased) {
LogUtils.e(TAG, "setBeautyPreset >> The beauty api has been released!")
return ErrorCode.ERROR_HAS_RELEASED.value
}
LogUtils.i(TAG, "release")
if (conf.captureMode == CaptureMode.Agora) {
conf.rtcEngine.registerVideoFrameObserver(null)
}
conf.rtcEngine.sendCustomReportMessage(reportId, reportCategory, "release", "", 0)
isReleased = true
textureBufferHelper?.let {
textureBufferHelper = null
it.handler.removeCallbacksAndMessages(null)
it.invoke {
fuRenderer.release()
mTextureProcessHelper?.release()
mTextureProcessHelper = null
transformGLFrameBuffer.release()
null
}
// it.handler.looper.quit()
it.dispose()
}
wrapTextureBufferHelper?.let {
wrapTextureBufferHelper = null
it.dispose()
}
statsHelper?.reset()
statsHelper = null
pendingProcessRunList.clear()
return ErrorCode.ERROR_OK.value
}
private fun processBeauty(videoFrame: VideoFrame): Boolean {
if (isReleased) {
LogUtils.e(TAG, "processBeauty >> The beauty api has been released!")
return false
}
val cMirror =
if (isFrontCamera) {
when (cameraConfig.frontMirror) {
MirrorMode.MIRROR_LOCAL_REMOTE -> true
MirrorMode.MIRROR_LOCAL_ONLY -> false
MirrorMode.MIRROR_REMOTE_ONLY -> true
MirrorMode.MIRROR_NONE -> false
}
} else {
when (cameraConfig.backMirror) {
MirrorMode.MIRROR_LOCAL_REMOTE -> true
MirrorMode.MIRROR_LOCAL_ONLY -> false
MirrorMode.MIRROR_REMOTE_ONLY -> true
MirrorMode.MIRROR_NONE -> false
}
}
val rMirror =
if (isFrontCamera) {
when (cameraConfig.frontMirror) {
MirrorMode.MIRROR_LOCAL_REMOTE -> false
MirrorMode.MIRROR_LOCAL_ONLY -> true
MirrorMode.MIRROR_REMOTE_ONLY -> true
MirrorMode.MIRROR_NONE -> false
}
} else {
when (cameraConfig.backMirror) {
MirrorMode.MIRROR_LOCAL_REMOTE -> false
MirrorMode.MIRROR_LOCAL_ONLY -> true
MirrorMode.MIRROR_REMOTE_ONLY -> true
MirrorMode.MIRROR_NONE -> false
}
}
if (captureMirror != cMirror || renderMirror != rMirror) {
LogUtils.w(TAG, "processBeauty >> enable=$enable, captureMirror=$captureMirror->$cMirror, renderMirror=$renderMirror->$rMirror")
captureMirror = cMirror
if(renderMirror != rMirror){
renderMirror = rMirror
config?.rtcEngine?.setLocalRenderMode(
localVideoRenderMode,
if(renderMirror) Constants.VIDEO_MIRROR_MODE_ENABLED else Constants.VIDEO_MIRROR_MODE_DISABLED
)
}
textureBufferHelper?.invoke {
mTextureProcessHelper?.reset()
}
skipFrame = 2
return false
}
val oldIsFrontCamera = isFrontCamera
isFrontCamera = videoFrame.sourceType == SourceType.kFrontCamera
if(oldIsFrontCamera != isFrontCamera){
LogUtils.w(TAG, "processBeauty >> oldIsFrontCamera=$oldIsFrontCamera, isFrontCamera=$isFrontCamera")
return false
}
if(enableChange){
enableChange = false
textureBufferHelper?.invoke {
mTextureProcessHelper?.reset()
}
return false
}
if(!enable){
return true
}
if (textureBufferHelper == null) {
textureBufferHelper = TextureBufferHelper.create(
"FURender",
EglBaseProvider.instance().rootEglBase.eglBaseContext
)
textureBufferHelper?.invoke {
synchronized(pendingProcessRunList){
val iterator = pendingProcessRunList.iterator()
while (iterator.hasNext()){
iterator.next().invoke()
iterator.remove()
}
}
}
LogUtils.i(TAG, "processBeauty >> create texture buffer, beautyMode=$beautyMode")
}
if (wrapTextureBufferHelper == null) {
wrapTextureBufferHelper = TextureBufferHelper.create(
"FURenderWrap",
EglBaseProvider.instance().rootEglBase.eglBaseContext
)
LogUtils.i(TAG, "processBeauty >> create texture buffer wrap, beautyMode=$beautyMode")
}
val startTime = System.currentTimeMillis()
val processTexId = when (beautyMode) {
2 -> processBeautySingleBuffer(videoFrame)
3 -> {
if (enableTextureAsync) {
processBeautySingleTextureAsync(videoFrame)
} else {
processBeautySingleTexture(videoFrame)
}
}
else -> processBeautyAuto(videoFrame)
}
if(config?.statsEnable == true){
val costTime = System.currentTimeMillis() - startTime
statsHelper?.once(costTime)
}
if (processTexId <= 0) {
LogUtils.w(TAG, "processBeauty >> processTexId <= 0")
return false
}
if(skipFrame > 0){
skipFrame --
LogUtils.w(TAG, "processBeauty >> skipFrame=$skipFrame")
return false
}
val processBuffer: TextureBuffer = wrapTextureBufferHelper?.wrapTextureBuffer(
videoFrame.rotatedWidth,
videoFrame.rotatedHeight,
TextureBuffer.Type.RGB,
processTexId,
identityMatrix
) ?: return false
videoFrame.replaceBuffer(processBuffer, 0, videoFrame.timestampNs)
return true
}
private fun processBeautyAuto(videoFrame: VideoFrame): Int {
val buffer = videoFrame.buffer
return if (buffer is TextureBuffer) {
if (enableTextureAsync) {
processBeautySingleTextureAsync(videoFrame)
} else {
processBeautySingleTexture(videoFrame)
}
} else {
processBeautySingleBuffer(videoFrame)
}
}
private fun processBeautySingleTextureAsync(videoFrame: VideoFrame): Int {
val texBufferHelper = wrapTextureBufferHelper ?: return -1
val textureBuffer = videoFrame.buffer as? TextureBuffer ?: return -1
when(textureBuffer.type){
TextureBuffer.Type.OES -> {
if(currProcessSourceType != ProcessSourceType.TEXTURE_OES_ASYNC){
LogUtils.i(TAG, "processBeauty >> process source type change old=$currProcessSourceType, new=${ProcessSourceType.TEXTURE_OES_ASYNC}")
if (currProcessSourceType != ProcessSourceType.UNKNOWN) {
skipFrame = 3
}
currProcessSourceType = ProcessSourceType.TEXTURE_OES_ASYNC
return -1
}
}
else -> {
if(currProcessSourceType != ProcessSourceType.TEXTURE_2D_ASYNC){
LogUtils.i(TAG, "processBeauty >> process source type change old=$currProcessSourceType, new=${ProcessSourceType.TEXTURE_2D_ASYNC}")
if (currProcessSourceType != ProcessSourceType.UNKNOWN) {
skipFrame = 3
}
currProcessSourceType = ProcessSourceType.TEXTURE_2D_ASYNC
skipFrame = 6
return -1
}
}
}
if(mTextureProcessHelper == null) {
mTextureProcessHelper = TextureProcessHelper()
mTextureProcessHelper?.setFilter { frame ->
val fuRenderKit = config?.fuRenderKit ?: return@setFilter -1
val input = FURenderInputData(frame.width, frame.height)
input.texture = FURenderInputData.FUTexture(
FUInputTextureEnum.FU_ADM_FLAG_COMMON_TEXTURE,
frame.textureId
)
val isFront = frame.isFrontCamera
input.renderConfig.let {
if (isFront) {
it.cameraFacing = CameraFacingEnum.CAMERA_FRONT
it.inputBufferMatrix = FUTransformMatrixEnum.CCROT0
it.inputTextureMatrix = FUTransformMatrixEnum.CCROT0
it.outputMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL
it.deviceOrientation = 270
} else {
it.cameraFacing = CameraFacingEnum.CAMERA_BACK
it.inputBufferMatrix = FUTransformMatrixEnum.CCROT0
it.inputTextureMatrix = FUTransformMatrixEnum.CCROT0
it.outputMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL
it.deviceOrientation = 270
}
}
if (isReleased) {
return@setFilter -1
}
val ret = textureBufferHelper?.invoke {
synchronized(EglBase.lock){
return@invoke fuRenderKit.renderWithInput(input).texture?.texId ?: -1
}
}
return@setFilter ret ?: -1
}
}
return texBufferHelper.invoke {
if(isReleased){
return@invoke -1
}
return@invoke mTextureProcessHelper?.process(
textureBuffer.textureId,
when (textureBuffer.type) {
TextureBuffer.Type.OES -> GLES11Ext.GL_TEXTURE_EXTERNAL_OES
else -> GLES20.GL_TEXTURE_2D
},
textureBuffer.width,
textureBuffer.height,
videoFrame.rotation,
textureBuffer.transformMatrixArray,
isFrontCamera,
(isFrontCamera && !captureMirror) || (!isFrontCamera && captureMirror)
)?: -1
}
}
private fun processBeautySingleTexture(videoFrame: VideoFrame): Int {
val texBufferHelper = wrapTextureBufferHelper ?: return -1
val textureBuffer = videoFrame.buffer as? TextureBuffer ?: return -1
when(textureBuffer.type){
TextureBuffer.Type.OES -> {
if(currProcessSourceType != ProcessSourceType.TEXTURE_OES){
LogUtils.i(TAG, "processBeauty >> process source type change old=$currProcessSourceType, new=${ProcessSourceType.TEXTURE_OES}")
if (currProcessSourceType != ProcessSourceType.UNKNOWN) {
skipFrame = 3
}
currProcessSourceType = ProcessSourceType.TEXTURE_OES
return -1
}
}
else -> {
if(currProcessSourceType != ProcessSourceType.TEXTURE_2D){
LogUtils.i(TAG, "processBeauty >> process source type change old=$currProcessSourceType, new=${ProcessSourceType.TEXTURE_2D}")
if (currProcessSourceType != ProcessSourceType.UNKNOWN) {
skipFrame = 3
}
currProcessSourceType = ProcessSourceType.TEXTURE_2D
skipFrame = 6
return -1
}
}
}
val width = videoFrame.rotatedWidth
val height = videoFrame.rotatedHeight
val isFront = videoFrame.sourceType == SourceType.kFrontCamera
val rotation = videoFrame.rotation
return texBufferHelper.invoke {
val fuRenderKit = config?.fuRenderKit ?: return@invoke -1
transformGLFrameBuffer.setSize(width, height)
transformGLFrameBuffer.resetTransform()
transformGLFrameBuffer.setTexMatrix(textureBuffer.transformMatrixArray)
transformGLFrameBuffer.setRotation(rotation)
var flipH = isFront
if((isFrontCamera && !captureMirror) || (!isFrontCamera && captureMirror)){
flipH = !flipH
}
transformGLFrameBuffer.setFlipH(flipH)
val transformTexId = transformGLFrameBuffer.process(
textureBuffer.textureId, when (textureBuffer.type) {
TextureBuffer.Type.OES -> GLES11Ext.GL_TEXTURE_EXTERNAL_OES
else -> GLES20.GL_TEXTURE_2D
}
)
val input = FURenderInputData(width, height)
input.texture = FURenderInputData.FUTexture(
FUInputTextureEnum.FU_ADM_FLAG_COMMON_TEXTURE,
transformTexId
)
input.renderConfig.let {
if (isFront) {
it.cameraFacing = CameraFacingEnum.CAMERA_FRONT
it.inputBufferMatrix = FUTransformMatrixEnum.CCROT0
it.inputTextureMatrix = FUTransformMatrixEnum.CCROT0
it.outputMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL
it.deviceOrientation = 270
} else {
it.cameraFacing = CameraFacingEnum.CAMERA_BACK
it.inputBufferMatrix = FUTransformMatrixEnum.CCROT0
it.inputTextureMatrix = FUTransformMatrixEnum.CCROT0
it.outputMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL
it.deviceOrientation = 270
}
}
if (isReleased) {
return@invoke -1
}
synchronized(EglBase.lock){
return@invoke fuRenderKit.renderWithInput(input).texture?.texId ?: -1
}
}
}
private fun processBeautySingleBuffer(videoFrame: VideoFrame): Int {
val texBufferHelper = textureBufferHelper ?: return -1
if(currProcessSourceType != ProcessSourceType.I420){
LogUtils.i(TAG, "processBeauty >> process source type change old=$currProcessSourceType, new=${ProcessSourceType.I420}")
if (currProcessSourceType != ProcessSourceType.UNKNOWN) {
skipFrame = 3
}
currProcessSourceType = ProcessSourceType.I420
return -1
}
val bufferArray = getNV21Buffer(videoFrame) ?: return -1
val buffer = videoFrame.buffer
val width = buffer.width
val height = buffer.height
val isFront = videoFrame.sourceType == SourceType.kFrontCamera
val mirror = (isFrontCamera && !captureMirror) || (!isFrontCamera && captureMirror)
val rotation = videoFrame.rotation
return texBufferHelper.invoke(Callable {
if(isReleased){
return@Callable -1
}
val fuRenderKit = config?.fuRenderKit ?: return@Callable -1
val input = FURenderInputData(width, height)
input.imageBuffer = FURenderInputData.FUImageBuffer(
FUInputBufferEnum.FU_FORMAT_NV21_BUFFER,
bufferArray
)
input.renderConfig.let {
if (isFront) {
it.cameraFacing = CameraFacingEnum.CAMERA_FRONT
it.inputBufferMatrix = if(mirror) {
when (rotation) {
0 -> FUTransformMatrixEnum.CCROT0
180 -> FUTransformMatrixEnum.CCROT180
else -> FUTransformMatrixEnum.CCROT90
}
} else {
when (rotation) {
0 -> FUTransformMatrixEnum.CCROT0_FLIPHORIZONTAL
180 -> FUTransformMatrixEnum.CCROT0_FLIPVERTICAL
else -> FUTransformMatrixEnum.CCROT90_FLIPHORIZONTAL
}
}
it.inputTextureMatrix = if(mirror) {
when (rotation) {
0 -> FUTransformMatrixEnum.CCROT0
180 -> FUTransformMatrixEnum.CCROT180
else -> FUTransformMatrixEnum.CCROT90
}
} else {
when (rotation) {
0 -> FUTransformMatrixEnum.CCROT0_FLIPHORIZONTAL
180 -> FUTransformMatrixEnum.CCROT0_FLIPVERTICAL
else -> FUTransformMatrixEnum.CCROT90_FLIPHORIZONTAL
}
}
it.deviceOrientation = when(rotation){
0 -> 270
180 -> 90
else -> 0
}
it.outputMatrix = FUTransformMatrixEnum.CCROT0
} else {
it.cameraFacing = CameraFacingEnum.CAMERA_BACK
it.inputBufferMatrix = if(mirror) {
when (rotation) {
0 -> FUTransformMatrixEnum.CCROT0_FLIPHORIZONTAL
180 -> FUTransformMatrixEnum.CCROT0_FLIPVERTICAL
else -> FUTransformMatrixEnum.CCROT90_FLIPVERTICAL
}
} else {
when (rotation) {
0 -> FUTransformMatrixEnum.CCROT0
180 -> FUTransformMatrixEnum.CCROT180
else -> FUTransformMatrixEnum.CCROT270
}
}
it.inputTextureMatrix = if(mirror) {
when (rotation) {
0 -> FUTransformMatrixEnum.CCROT0_FLIPHORIZONTAL
180 -> FUTransformMatrixEnum.CCROT0_FLIPVERTICAL
else -> FUTransformMatrixEnum.CCROT90_FLIPVERTICAL
}
} else {
when (rotation) {
0 -> FUTransformMatrixEnum.CCROT0
180 -> FUTransformMatrixEnum.CCROT180
else -> FUTransformMatrixEnum.CCROT270
}
}
it.deviceOrientation = when(rotation){
0 -> 270
180 -> 90
else -> 0
}
it.outputMatrix = FUTransformMatrixEnum.CCROT0
}
}
mTextureProcessHelper?.let {
if(it.size() > 0){
it.reset()
return@Callable -1
}
}
synchronized(EglBase.lock){
return@Callable fuRenderKit.renderWithInput(input).texture?.texId ?: -1
}
})
}
private fun getNV21Buffer(videoFrame: VideoFrame): ByteArray? {
val buffer = videoFrame.buffer
val width = buffer.width
val height = buffer.height
val size = (width * height * 3.0f / 2.0f + 0.5f).toInt()
if (byteBuffer == null || byteBuffer?.capacity() != size || byteArray == null || byteArray?.size != size) {
byteBuffer?.clear()
byteBuffer = ByteBuffer.allocateDirect(size)
byteArray = ByteArray(size)
return null
}
val outArray = byteArray ?: return null
val outBuffer = byteBuffer ?: return null
val i420Buffer = buffer as? I420Buffer ?: buffer.toI420()
YuvHelper.I420ToNV12(
i420Buffer.dataY, i420Buffer.strideY,
i420Buffer.dataV, i420Buffer.strideV,
i420Buffer.dataU, i420Buffer.strideU,
outBuffer, width, height
)
outBuffer.position(0)
outBuffer.get(outArray)
if(buffer !is I420Buffer){
i420Buffer.release()
}
return outArray
}
// IVideoFrameObserver implements
override fun onCaptureVideoFrame(sourceType: Int, videoFrame: VideoFrame?): Boolean {
videoFrame ?: return false
return processBeauty(videoFrame)
}
override fun onPreEncodeVideoFrame(sourceType: Int, videoFrame: VideoFrame?) = false
override fun onMediaPlayerVideoFrame(videoFrame: VideoFrame?, mediaPlayerId: Int) = false
override fun onRenderVideoFrame(
channelId: String?,
uid: Int,
videoFrame: VideoFrame?
) = false
override fun getVideoFrameProcessMode() = IVideoFrameObserver.PROCESS_MODE_READ_WRITE
override fun getVideoFormatPreference() = IVideoFrameObserver.VIDEO_PIXEL_DEFAULT
override fun getRotationApplied() = false
override fun getMirrorApplied() = captureMirror && !enable
override fun getObservedFramePosition() = IVideoFrameObserver.POSITION_POST_CAPTURER
}

View File

@ -1,10 +1,11 @@
package com.yunbao.common.manager;
package io.agora.beautyapi.faceunity.agora;
import android.app.Activity;
import android.content.Context;
import android.view.SurfaceView;
import android.widget.FrameLayout;
import com.yunbao.common.CommonAppConfig;
import com.yunbao.common.CommonAppContext;
import com.yunbao.common.manager.base.BaseCacheManager;

View File

@ -1,4 +1,4 @@
package com.yunbao.common.manager;
package io.agora.beautyapi.faceunity.agora;
import android.app.Activity;
import android.content.Context;
@ -10,11 +10,20 @@ import com.yunbao.common.CommonAppContext;
import com.yunbao.common.bean.SwTokenModel;
import com.yunbao.common.http.base.HttpCallback;
import com.yunbao.common.http.live.LiveNetManager;
import com.yunbao.common.manager.IMLoginManager;
import com.yunbao.common.manager.base.BaseCacheManager;
import com.yunbao.common.utils.L;
import com.yunbao.common.utils.StringUtil;
import com.yunbao.common.utils.ToastUtil;
import com.yunbao.faceunity.utils.FURenderer;
import io.agora.beautyapi.faceunity.BeautyPreset;
import io.agora.beautyapi.faceunity.CameraConfig;
import io.agora.beautyapi.faceunity.CaptureMode;
import io.agora.beautyapi.faceunity.Config;
import io.agora.beautyapi.faceunity.FaceUnityBeautyAPI;
import io.agora.beautyapi.faceunity.FaceUnityBeautyAPIKt;
import io.agora.beautyapi.faceunity.MirrorMode;
import io.agora.rtc2.ChannelMediaOptions;
import io.agora.rtc2.Constants;
import io.agora.rtc2.IRtcEngineEventHandler;
@ -35,8 +44,9 @@ public class SWManager extends BaseCacheManager {
private Activity mContext;
public static SWManager manager;
private RtcEngineEx mRtcEngine;
private final FaceUnityBeautyAPI faceUnityBeautyAPI = FaceUnityBeautyAPIKt.createFaceUnityBeautyAPI();
private int uid;
VideoEncoderConfiguration cfg;
private VideoEncoderConfiguration cfg;
private FrameLayout anchorContainer; //主播视图
private FrameLayout pkContainer1; //pk主播视图1
private FrameLayout pkContainer2; //pk主播视图2
@ -111,8 +121,23 @@ public class SWManager extends BaseCacheManager {
// 创建一个 SurfaceView 对象并将其作为 FrameLayout 的子对象
SurfaceView surfaceView = new SurfaceView(mContext);
anchorContainer.addView(surfaceView);
Config config = new Config(mContext, mRtcEngine, FURenderer.INSTANCE.mFURenderKit, null, CaptureMode.Agora, 0, false, new CameraConfig(MirrorMode.MIRROR_NONE,MirrorMode.MIRROR_NONE));
faceUnityBeautyAPI.initialize(config);
faceUnityBeautyAPI.enable(true);
faceUnityBeautyAPI.setBeautyPreset(BeautyPreset.CUSTOM);
//FaceUnityBeautyManage.getInstance().mFURenderKit.setFaceBeauty();
// 设置视图
mRtcEngine.setupLocalVideo(new VideoCanvas(surfaceView, VideoCanvas.RENDER_MODE_HIDDEN, uid));
faceUnityBeautyAPI.setupLocalVideo(surfaceView, Constants.RENDER_MODE_HIDDEN);
//faceUnityBeautyAPI.updateCameraConfig(new CameraConfig(MirrorMode.MIRROR_NONE,MirrorMode.MIRROR_NONE));
//mRtcEngine.setupLocalVideo(new VideoCanvas(surfaceView, VideoCanvas.RENDER_MODE_HIDDEN, uid));
}
public void setEnableBeauty(boolean flag){
if(faceUnityBeautyAPI!=null){
faceUnityBeautyAPI.enable(flag);
}
}
/**
@ -133,9 +158,51 @@ public class SWManager extends BaseCacheManager {
break;
}
mRtcEngine.setVideoEncoderConfiguration(cfg);
// 创建一个 SurfaceView 对象并将其作为 FrameLayout 的子对象
SurfaceView surfaceView = new SurfaceView(mContext);
anchorContainer.addView(surfaceView);
// 设置视图
mRtcEngine.setupLocalVideo(new VideoCanvas(surfaceView, VideoCanvas.RENDER_MODE_HIDDEN, uid));
}
}
/**
* 设置镜像模式
*/
public void setMirrorMode(){
if(cfg!=null&&mRtcEngine!=null){
L.eSw("setMirrorMode设置镜像"+cfg.mirrorMode);
if(cfg.mirrorMode==VideoEncoderConfiguration.MIRROR_MODE_TYPE.MIRROR_MODE_ENABLED){
cfg.mirrorMode = VideoEncoderConfiguration.MIRROR_MODE_TYPE.MIRROR_MODE_DISABLED; //取消镜像
}else{
cfg.mirrorMode = VideoEncoderConfiguration.MIRROR_MODE_TYPE.MIRROR_MODE_ENABLED; //设置镜像
}
mRtcEngine.setVideoEncoderConfiguration(cfg);
SurfaceView surfaceView = new SurfaceView(mContext);
anchorContainer.addView(surfaceView);
mRtcEngine.setupLocalVideo(new VideoCanvas(surfaceView, VideoCanvas.RENDER_MODE_HIDDEN, uid));
}
}
/**
* 设置镜像模式
*/
public void switchCamera(){
if(mRtcEngine!=null){
mRtcEngine.switchCamera();
}
}
/**
* 设置美颜
*/
public void setBeautPreset(){
if(mRtcEngine!=null){
}
}
/**
* 创建直播间
*/
@ -231,6 +298,15 @@ public class SWManager extends BaseCacheManager {
mRtcEngine.leaveChannelEx(rtcConnection);
}
/**
* 退出所有频道
*/
public void exitChannelAll(){
if(mRtcEngine!=null){
mRtcEngine.leaveChannel();
}
}
//设置对方主播视图
private void setupRemoteVideo(int uid) {
SurfaceView surfaceView = new SurfaceView(mContext);
@ -292,6 +368,12 @@ public class SWManager extends BaseCacheManager {
super.onLeaveChannel(stats);
L.eSw("onLeaveChannel退出頻道");
}
@Override
public void onLocalVideoStateChanged(Constants.VideoSourceType source, int state, int error) {
super.onLocalVideoStateChanged(source, state, error);
L.eSw("onLocalVideoStateChanged_source"+source+" state_"+state+" error_"+error);
}
};
private void refreshToken() {

View File

@ -0,0 +1,607 @@
/*
* MIT License
*
* Copyright (c) 2023 Agora Community
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
*
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package io.agora.beautyapi.faceunity.utils;
import android.annotation.TargetApi;
import android.app.ActivityManager;
import android.content.Context;
import android.os.Build;
import android.text.TextUtils;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileFilter;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
public class FuDeviceUtils {
public static final String TAG = "FuDeviceUtils";
public static final int DEVICE_LEVEL_HIGH = 2;
public static final int DEVICE_LEVEL_MID = 1;
public static final int DEVICE_LEVEL_LOW = 0;
/**
* The default return value of any method in this class when an
* error occurs or when processing fails (Currently set to -1). Use this to check if
* the information about the device in question was successfully obtained.
*/
public static final int DEVICEINFO_UNKNOWN = -1;
private static final FileFilter CPU_FILTER = new FileFilter() {
@Override
public boolean accept(File pathname) {
String path = pathname.getName();
//regex is slow, so checking char by char.
if (path.startsWith("cpu")) {
for (int i = 3; i < path.length(); i++) {
if (!Character.isDigit(path.charAt(i))) {
return false;
}
}
return true;
}
return false;
}
};
/**
* Calculates the total RAM of the device through Android API or /proc/meminfo.
*
* @param c - Context object for current running activity.
* @return Total RAM that the device has, or DEVICEINFO_UNKNOWN = -1 in the event of an error.
*/
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
public static long getTotalMemory(Context c) {
// memInfo.totalMem not supported in pre-Jelly Bean APIs.
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
ActivityManager.MemoryInfo memInfo = new ActivityManager.MemoryInfo();
ActivityManager am = (ActivityManager) c.getSystemService(Context.ACTIVITY_SERVICE);
am.getMemoryInfo(memInfo);
if (memInfo != null) {
return memInfo.totalMem;
} else {
return DEVICEINFO_UNKNOWN;
}
} else {
long totalMem = DEVICEINFO_UNKNOWN;
try {
FileInputStream stream = new FileInputStream("/proc/meminfo");
try {
totalMem = parseFileForValue("MemTotal", stream);
totalMem *= 1024;
} finally {
stream.close();
}
} catch (IOException e) {
e.printStackTrace();
}
return totalMem;
}
}
/**
* Method for reading the clock speed of a CPU core on the device. Will read from either
* {@code /sys/devices/system/cpu/cpu0/cpufreq/cpuinfo_max_freq} or {@code /proc/cpuinfo}.
*
* @return Clock speed of a core on the device, or -1 in the event of an error.
*/
public static int getCPUMaxFreqKHz() {
int maxFreq = DEVICEINFO_UNKNOWN;
try {
for (int i = 0; i < getNumberOfCPUCores(); i++) {
String filename =
"/sys/devices/system/cpu/cpu" + i + "/cpufreq/cpuinfo_max_freq";
File cpuInfoMaxFreqFile = new File(filename);
if (cpuInfoMaxFreqFile.exists() && cpuInfoMaxFreqFile.canRead()) {
byte[] buffer = new byte[128];
FileInputStream stream = new FileInputStream(cpuInfoMaxFreqFile);
try {
stream.read(buffer);
int endIndex = 0;
//Trim the first number out of the byte buffer.
while (Character.isDigit(buffer[endIndex]) && endIndex < buffer.length) {
endIndex++;
}
String str = new String(buffer, 0, endIndex);
Integer freqBound = Integer.parseInt(str);
if (freqBound > maxFreq) {
maxFreq = freqBound;
}
} catch (NumberFormatException e) {
//Fall through and use /proc/cpuinfo.
} finally {
stream.close();
}
}
}
if (maxFreq == DEVICEINFO_UNKNOWN) {
FileInputStream stream = new FileInputStream("/proc/cpuinfo");
try {
int freqBound = parseFileForValue("cpu MHz", stream);
freqBound *= 1024; //MHz -> kHz
if (freqBound > maxFreq) maxFreq = freqBound;
} finally {
stream.close();
}
}
} catch (IOException e) {
maxFreq = DEVICEINFO_UNKNOWN; //Fall through and return unknown.
}
return maxFreq;
}
/**
* Reads the number of CPU cores from the first available information from
* {@code /sys/devices/system/cpu/possible}, {@code /sys/devices/system/cpu/present},
* then {@code /sys/devices/system/cpu/}.
*
* @return Number of CPU cores in the phone, or DEVICEINFO_UKNOWN = -1 in the event of an error.
*/
public static int getNumberOfCPUCores() {
if (Build.VERSION.SDK_INT <= Build.VERSION_CODES.GINGERBREAD_MR1) {
// Gingerbread doesn't support giving a single application access to both cores, but a
// handful of devices (Atrix 4G and Droid X2 for example) were released with a dual-core
// chipset and Gingerbread; that can let an app in the background run without impacting
// the foreground application. But for our purposes, it makes them single core.
return 1;
}
int cores;
try {
cores = getCoresFromFileInfo("/sys/devices/system/cpu/possible");
if (cores == DEVICEINFO_UNKNOWN) {
cores = getCoresFromFileInfo("/sys/devices/system/cpu/present");
}
if (cores == DEVICEINFO_UNKNOWN) {
cores = new File("/sys/devices/system/cpu/").listFiles(CPU_FILTER).length;
}
} catch (SecurityException e) {
cores = DEVICEINFO_UNKNOWN;
} catch (NullPointerException e) {
cores = DEVICEINFO_UNKNOWN;
}
return cores;
}
/**
* Tries to read file contents from the file location to determine the number of cores on device.
*
* @param fileLocation The location of the file with CPU information
* @return Number of CPU cores in the phone, or DEVICEINFO_UKNOWN = -1 in the event of an error.
*/
private static int getCoresFromFileInfo(String fileLocation) {
InputStream is = null;
try {
is = new FileInputStream(fileLocation);
BufferedReader buf = new BufferedReader(new InputStreamReader(is));
String fileContents = buf.readLine();
buf.close();
return getCoresFromFileString(fileContents);
} catch (IOException e) {
return DEVICEINFO_UNKNOWN;
} finally {
if (is != null) {
try {
is.close();
} catch (IOException e) {
// Do nothing.
}
}
}
}
/**
* Converts from a CPU core information format to number of cores.
*
* @param str The CPU core information string, in the format of "0-N"
* @return The number of cores represented by this string
*/
private static int getCoresFromFileString(String str) {
if (str == null || !str.matches("0-[\\d]+$")) {
return DEVICEINFO_UNKNOWN;
}
return Integer.valueOf(str.substring(2)) + 1;
}
/**
* Helper method for reading values from system files, using a minimised buffer.
*
* @param textToMatch - Text in the system files to read for.
* @param stream - FileInputStream of the system file being read from.
* @return A numerical value following textToMatch in specified the system file.
* -1 in the event of a failure.
*/
private static int parseFileForValue(String textToMatch, FileInputStream stream) {
byte[] buffer = new byte[1024];
try {
int length = stream.read(buffer);
for (int i = 0; i < length; i++) {
if (buffer[i] == '\n' || i == 0) {
if (buffer[i] == '\n') i++;
for (int j = i; j < length; j++) {
int textIndex = j - i;
//Text doesn't match query at some point.
if (buffer[j] != textToMatch.charAt(textIndex)) {
break;
}
//Text matches query here.
if (textIndex == textToMatch.length() - 1) {
return extractValue(buffer, j);
}
}
}
}
} catch (IOException e) {
//Ignore any exceptions and fall through to return unknown value.
} catch (NumberFormatException e) {
}
return DEVICEINFO_UNKNOWN;
}
/**
* Helper method used by {@link #parseFileForValue(String, FileInputStream) parseFileForValue}. Parses
* the next available number after the match in the file being read and returns it as an integer.
*
* @param index - The index in the buffer array to begin looking.
* @return The next number on that line in the buffer, returned as an int. Returns
* DEVICEINFO_UNKNOWN = -1 in the event that no more numbers exist on the same line.
*/
private static int extractValue(byte[] buffer, int index) {
while (index < buffer.length && buffer[index] != '\n') {
if (Character.isDigit(buffer[index])) {
int start = index;
index++;
while (index < buffer.length && Character.isDigit(buffer[index])) {
index++;
}
String str = new String(buffer, 0, start, index - start);
return Integer.parseInt(str);
}
index++;
}
return DEVICEINFO_UNKNOWN;
}
/**
* 获取当前剩余内存(ram)
*
* @param context
* @return
*/
public static long getAvailMemory(Context context) {
ActivityManager am = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
ActivityManager.MemoryInfo mi = new ActivityManager.MemoryInfo();
am.getMemoryInfo(mi);
return mi.availMem;
}
/**
* 获取厂商信息
*
* @return
*/
public static String getBrand() {
return Build.BRAND;
}
/**
* 获取手机机型
*
* @return
*/
public static String getModel() {
return Build.MODEL;
}
/**
* 获取硬件信息(cpu型号)
*
* @return
*/
public static String getHardWare() {
try {
FileReader fr = new FileReader("/proc/cpuinfo");
BufferedReader br = new BufferedReader(fr);
String text;
String last = "";
while ((text = br.readLine()) != null) {
last = text;
}
//一般机型的cpu型号都会在cpuinfo文件的最后一行
if (last.contains("Hardware")) {
String[] hardWare = last.split(":\\s+", 2);
return hardWare[1];
}
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
return Build.HARDWARE;
}
/**
* Level judgement based on current memory and CPU.
*
* @param context - Context object.
* @return
*/
public static int judgeDeviceLevel(Context context) {
int level;
//有一些设备不符合下述的判断规则则走一个机型判断模式
int specialDevice = judgeDeviceLevelInDeviceName();
if (specialDevice >= 0) return specialDevice;
int ramLevel = judgeMemory(context);
int cpuLevel = judgeCPU();
if (ramLevel == 0 || ramLevel == 1 || cpuLevel == 0) {
level = DEVICE_LEVEL_LOW;
} else {
if (cpuLevel > 1) {
level = DEVICE_LEVEL_HIGH;
} else {
level = DEVICE_LEVEL_MID;
}
}
LogUtils.d(TAG,"DeviceLevel: " + level);
return level;
}
/**
* -1 不是特定的高低端机型
* @return
*/
private static int judgeDeviceLevelInDeviceName() {
String currentDeviceName = getDeviceName();
for (String deviceName:upscaleDevice) {
if (deviceName.equals(currentDeviceName)) {
return DEVICE_LEVEL_HIGH;
}
}
for (String deviceName:middleDevice) {
if (deviceName.equals(currentDeviceName)) {
return DEVICE_LEVEL_MID;
}
}
for (String deviceName:lowDevice) {
if (deviceName.equals(currentDeviceName)) {
return DEVICE_LEVEL_LOW;
}
}
return -1;
}
public static final String[] upscaleDevice = {"vivo X6S A","MHA-AL00","VKY-AL00","V1838A"};
public static final String[] lowDevice = {};
public static final String[] middleDevice = {"OPPO R11s","PAR-AL00","MI 8 Lite","ONEPLUS A6000","PRO 6","PRO 7 Plus"};
/**
* 评定内存的等级.
*
* @return
*/
private static int judgeMemory(Context context) {
long ramMB = getTotalMemory(context) / (1024 * 1024);
int level = -1;
if (ramMB <= 2000) { //2G或以下的最低档
level = 0;
} else if (ramMB <= 3000) { //2-3G
level = 1;
} else if (ramMB <= 4000) { //4G档 2018主流中端机
level = 2;
} else if (ramMB <= 6000) { //6G档 高端机
level = 3;
} else { //6G以上 旗舰机配置
level = 4;
}
return level;
}
/**
* 评定CPU等级.按频率和厂商型号综合判断
*
* @return
*/
private static int judgeCPU() {
int level = 0;
String cpuName = getHardWare();
int freqMHz = getCPUMaxFreqKHz() / 1024;
//一个不符合下述规律的高级白名单
//如果可以获取到CPU型号名称 -> 根据不同的名称走不同判定策略
if (!TextUtils.isEmpty(cpuName)) {
if (cpuName.contains("qcom") || cpuName.contains("Qualcomm")) { //高通骁龙
return judgeQualcommCPU(cpuName, freqMHz);
} else if (cpuName.contains("hi") || cpuName.contains("kirin")) { //海思麒麟
return judgeSkinCPU(cpuName, freqMHz);
} else if (cpuName.contains("MT")) {//联发科
return judgeMTCPU(cpuName, freqMHz);
}
}
//cpu型号无法获取的普通规则
if (freqMHz <= 1600) { //1.5G 低端
level = 0;
} else if (freqMHz <= 1950) { //2GHz 低中端
level = 1;
} else if (freqMHz <= 2500) { //2.2 2.3g 中高端
level = 2;
} else { //高端
level = 3;
}
return level;
}
/**
* 联发科芯片等级判定
*
* @return
*/
private static int judgeMTCPU(String cpuName, int freqMHz) {
//P60之前的全是低端机 MT6771V/C
int level = 0;
int mtCPUVersion = getMTCPUVersion(cpuName);
if (mtCPUVersion == -1) {
//读取不出version 按照一个比较严格的方式来筛选出高端机
if (freqMHz <= 1600) { //1.5G 低端
level = 0;
} else if (freqMHz <= 2200) { //2GHz 低中端
level = 1;
} else if (freqMHz <= 2700) { //2.2 2.3g 中高端
level = 2;
} else { //高端
level = 3;
}
} else if (mtCPUVersion < 6771) {
//均为中低端机
if (freqMHz <= 1600) { //1.5G 低端
level = 0;
} else { //2GHz 中端
level = 1;
}
} else {
if (freqMHz <= 1600) { //1.5G 低端
level = 0;
} else if (freqMHz <= 1900) { //2GHz 低中端
level = 1;
} else if (freqMHz <= 2500) { //2.2 2.3g 中高端
level = 2;
} else { //高端
level = 3;
}
}
return level;
}
/**
* 通过联发科CPU型号定义 -> 获取cpu version
*
* @param cpuName
* @return
*/
private static int getMTCPUVersion(String cpuName) {
//截取MT后面的四位数字
int cpuVersion = -1;
if (cpuName.length() > 5) {
String cpuVersionStr = cpuName.substring(2, 6);
try {
cpuVersion = Integer.valueOf(cpuVersionStr);
} catch (NumberFormatException exception) {
exception.printStackTrace();
}
}
return cpuVersion;
}
/**
* 高通骁龙芯片等级判定
*
* @return
*/
private static int judgeQualcommCPU(String cpuName, int freqMHz) {
int level = 0;
//xxxx inc MSM8937 比较老的芯片
//7 8 xxx inc SDM710
if (cpuName.contains("MSM")) {
//老芯片
if (freqMHz <= 1600) { //1.5G 低端
level = 0;
} else { //2GHz 低中端
level = 1;
}
} else {
//新的芯片
if (freqMHz <= 1600) { //1.5G 低端
level = 0;
} else if (freqMHz <= 2000) { //2GHz 低中端
level = 1;
} else if (freqMHz <= 2500) { //2.2 2.3g 中高端
level = 2;
} else { //高端
level = 3;
}
}
return level;
}
/**
* 麒麟芯片等级判定
*
* @param freqMHz
* @return
*/
private static int judgeSkinCPU(String cpuName, int freqMHz) {
//型号 -> kirin710之后 & 最高核心频率
int level = 0;
if (cpuName.startsWith("hi")) {
//这个是海思的芯片中低端
if (freqMHz <= 1600) { //1.5G 低端
level = 0;
} else if (freqMHz <= 2000) { //2GHz 低中端
level = 1;
}
} else {
//这个是海思麒麟的芯片
if (freqMHz <= 1600) { //1.5G 低端
level = 0;
} else if (freqMHz <= 2000) { //2GHz 低中端
level = 1;
} else if (freqMHz <= 2500) { //2.2 2.3g 中高端
level = 2;
} else { //高端
level = 3;
}
}
return level;
}
public static final String Nexus_6P = "Nexus 6P";
/**
* 获取设备名
*
* @return
*/
public static String getDeviceName() {
String deviceName = "";
if (Build.MODEL != null) deviceName = Build.MODEL;
LogUtils.e(TAG,"deviceName: " + deviceName);
return deviceName;
}
}

View File

@ -0,0 +1,57 @@
/*
* MIT License
*
* Copyright (c) 2023 Agora Community
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package io.agora.beautyapi.faceunity.utils
import io.agora.base.internal.Logging
object LogUtils {
private const val beautyType = "FaceUnity"
@JvmStatic
fun i(tag: String, content: String, vararg args: Any) {
val consoleMessage = "[BeautyAPI][$beautyType] : ${String.format(content, args)}"
Logging.log(Logging.Severity.LS_INFO, tag, consoleMessage)
}
@JvmStatic
fun d(tag: String, content: String, vararg args: Any) {
val consoleMessage = "[BeautyAPI][$beautyType] : ${String.format(content, args)}"
Logging.d(tag, consoleMessage)
}
@JvmStatic
fun w(tag: String, content: String, vararg args: Any){
val consoleMessage = "[BeautyAPI][$beautyType] : ${String.format(content, args)}"
Logging.w(tag, consoleMessage)
}
@JvmStatic
fun e(tag: String, content: String, vararg args: Any){
val consoleMessage = "[BeautyAPI][$beautyType] : ${String.format(content, args)}"
Logging.e(tag, consoleMessage)
}
}

View File

@ -0,0 +1,80 @@
/*
* MIT License
*
* Copyright (c) 2023 Agora Community
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package io.agora.beautyapi.faceunity.utils
import android.os.Handler
import android.os.Looper
import io.agora.beautyapi.faceunity.BeautyStats
import kotlin.math.max
import kotlin.math.min
class StatsHelper(
private val statsDuration: Long,
private val onStatsChanged: (BeautyStats) -> Unit
) {
private val mMainHandler = Handler(Looper.getMainLooper())
private var mStartTime = 0L
private var mCostList = mutableListOf<Long>()
private var mCostMax = 0L
private var mCostMin = Long.MAX_VALUE
fun once(cost: Long) {
val curr = System.currentTimeMillis()
if (mStartTime == 0L) {
mStartTime = curr
} else if (curr - mStartTime >= statsDuration) {
mStartTime = curr
var total = 0L
mCostList.forEach {
total += it
}
val average = total / mCostList.size
val costMin = mCostMin
val costMax = mCostMax
mMainHandler.post {
onStatsChanged.invoke(BeautyStats(costMin, costMax, average))
}
mCostList.clear()
mCostMax = 0L
mCostMin = Long.MAX_VALUE
}
mCostList.add(cost)
mCostMax = max(mCostMax, cost)
mCostMin = min(mCostMin, cost)
}
fun reset() {
mMainHandler.removeCallbacksAndMessages(null)
mStartTime = 0
mCostList.clear()
mCostMax = 0L
mCostMin = Long.MAX_VALUE
}
}

View File

@ -0,0 +1,210 @@
/*
* MIT License
*
* Copyright (c) 2023 Agora Community
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package io.agora.beautyapi.faceunity.utils.egl;
import static android.opengl.EGL14.EGL_CONTEXT_CLIENT_VERSION;
import android.opengl.GLDebugHelper;
import javax.microedition.khronos.egl.EGL10;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.egl.EGLContext;
import javax.microedition.khronos.egl.EGLDisplay;
import javax.microedition.khronos.egl.EGLSurface;
import io.agora.beautyapi.faceunity.utils.LogUtils;
public class EGLContextHelper {
private static final String DEBUG_TAG = "EGLContextManager";
private final int mRedSize = 8;
private final int mGreenSize = 8;
private final int mBlueSize = 8;
private final int mAlphaSize = 0;
private final int mDepthSize = 16;
private final int mStencilSize = 0;
private final int mRenderType = 4;
public EGLContextHelper(){}
public void initEGL(EGLContext shareContext) throws Exception {
mEGL = (EGL10) GLDebugHelper.wrap(EGLContext.getEGL(),
GLDebugHelper.CONFIG_CHECK_GL_ERROR
| GLDebugHelper.CONFIG_CHECK_THREAD, null);
if (mEGL == null) {
throw new Exception("Couldn't get EGL");
}
mGLDisplay = mEGL.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
if (mGLDisplay == null) {
throw new Exception("Couldn't get display for GL");
}
int[] curGLVersion = new int[2];
mEGL.eglInitialize(mGLDisplay, curGLVersion);
LogUtils.i(DEBUG_TAG, "GL version = " + curGLVersion[0] + "."
+ curGLVersion[1]);
int[] num_config = new int[1];
if(!mEGL.eglChooseConfig(mGLDisplay, mConfigSpec, null, 1,
num_config)){
throw new IllegalArgumentException("eglChooseConfig failed");
}
int numConfigs = num_config[0];
if (numConfigs <= 0) {
throw new IllegalArgumentException(
"No configs match configSpec");
}
EGLConfig[] configs = new EGLConfig[numConfigs];
if (!mEGL.eglChooseConfig(mGLDisplay, mConfigSpec, configs, numConfigs,
num_config)) {
throw new IllegalArgumentException("eglChooseConfig#2 failed");
}
mGLConfig = chooseConfig(mEGL, mGLDisplay, configs);
if (mGLConfig == null) {
mGLConfig = configs[0];
}
int[] surfaceAttribs = {
EGL10.EGL_WIDTH, 1,
EGL10.EGL_HEIGHT, 1,
EGL10.EGL_NONE
};
mGLSurface = mEGL.eglCreatePbufferSurface(mGLDisplay, mGLConfig, surfaceAttribs);
if (mGLSurface == null) {
throw new Exception("Couldn't create new surface");
}
int[] attrib_list = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE};
mGLContext = mEGL.eglCreateContext(mGLDisplay, mGLConfig,
shareContext, attrib_list);
if (mGLContext == null) {
throw new Exception("Couldn't create new context");
}
// if (!mEGL.eglMakeCurrent(mGLDisplay, mGLSurface, mGLSurface, mGLContext)) {
// throw new Exception("Failed to eglMakeCurrent");
// }
}
public EGLContext getEGLContext() {
return mGLContext;
}
public EGLDisplay getGLDisplay() {
return mGLDisplay;
}
public EGLConfig getGLConfig() {
return mGLConfig;
}
public EGLSurface getGLSurface() {
return mGLSurface;
}
public EGL10 getEGL() {
return mEGL;
}
EGL10 mEGL;
EGLDisplay mGLDisplay;
EGLConfig mGLConfig;
EGLSurface mGLSurface;
EGLContext mGLContext;
int[] mConfigSpec = new int[]{
EGL10.EGL_RED_SIZE, mRedSize,
EGL10.EGL_GREEN_SIZE, mGreenSize,
EGL10.EGL_BLUE_SIZE, mBlueSize,
EGL10.EGL_ALPHA_SIZE, mAlphaSize,
EGL10.EGL_DEPTH_SIZE, mDepthSize,
EGL10.EGL_STENCIL_SIZE, mStencilSize,
EGL10.EGL_RENDERABLE_TYPE, mRenderType,//egl版本 2.0
EGL10.EGL_NONE};
public void release() {
mEGL.eglMakeCurrent(mGLDisplay, EGL10.EGL_NO_SURFACE,
EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT);
mEGL.eglDestroySurface(mGLDisplay, mGLSurface);
mEGL.eglDestroyContext(mGLDisplay, mGLContext);
mEGL.eglTerminate(mGLDisplay);
LogUtils.i(DEBUG_TAG, "GL Cleaned up");
}
public boolean eglMakeCurrent(){
if(mGLContext == EGL10.EGL_NO_CONTEXT){
return false;
}else{
return mEGL.eglMakeCurrent(mGLDisplay, mGLSurface, mGLSurface, mGLContext);
}
}
public boolean eglMakeNoCurrent(){
return mEGL.eglMakeCurrent(mGLDisplay, EGL10.EGL_NO_SURFACE,
EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT);
}
private EGLConfig chooseConfig(EGL10 egl, EGLDisplay display,
EGLConfig[] configs) {
for (EGLConfig config : configs) {
int d = findConfigAttrib(egl, display, config,
EGL10.EGL_DEPTH_SIZE, 0);
int s = findConfigAttrib(egl, display, config,
EGL10.EGL_STENCIL_SIZE, 0);
if ((d >= mDepthSize) && (s >= mStencilSize)) {
int r = findConfigAttrib(egl, display, config,
EGL10.EGL_RED_SIZE, 0);
int g = findConfigAttrib(egl, display, config,
EGL10.EGL_GREEN_SIZE, 0);
int b = findConfigAttrib(egl, display, config,
EGL10.EGL_BLUE_SIZE, 0);
int a = findConfigAttrib(egl, display, config,
EGL10.EGL_ALPHA_SIZE, 0);
if ((r == mRedSize) && (g == mGreenSize)
&& (b == mBlueSize) && (a == mAlphaSize)) {
return config;
}
}
}
return null;
}
private int findConfigAttrib(EGL10 egl, EGLDisplay display,
EGLConfig config, int attribute, int defaultValue) {
int[] value = new int[1];
if (egl.eglGetConfigAttrib(display, config, attribute, value)) {
return value[0];
}
return defaultValue;
}
}

View File

@ -0,0 +1,84 @@
/*
* MIT License
*
* Copyright (c) 2023 Agora Community
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package io.agora.beautyapi.faceunity.utils.egl;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.GLES30;
public class GLCopyHelper {
private final int bufferCount;
public GLCopyHelper(){
this(1);
}
public GLCopyHelper(int bufferCount){
this.bufferCount = bufferCount;
}
private int[] mDstFrameBuffer;
private int[] mSrcFrameBuffer;
public void copy2DTextureToOesTexture(
int srcTexture,
int dstTexture,
int width, int height,
int index){
if(mDstFrameBuffer == null){
mDstFrameBuffer = new int[bufferCount];
GLES20.glGenFramebuffers(bufferCount, mDstFrameBuffer, 0);
}
if(mSrcFrameBuffer == null){
mSrcFrameBuffer = new int[bufferCount];
GLES20.glGenFramebuffers(bufferCount, mSrcFrameBuffer, 0);
}
GLES30.glBindFramebuffer(GLES30.GL_READ_FRAMEBUFFER, mSrcFrameBuffer[index]);
GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, srcTexture);
GLES30.glFramebufferTexture2D(GLES30.GL_READ_FRAMEBUFFER, GLES30.GL_COLOR_ATTACHMENT0, GLES30.GL_TEXTURE_2D, srcTexture, 0);
GLES30.glBindFramebuffer(GLES30.GL_DRAW_FRAMEBUFFER, mDstFrameBuffer[index]);
GLES30.glFramebufferTexture2D(GLES30.GL_DRAW_FRAMEBUFFER,
GLES30.GL_COLOR_ATTACHMENT0, GLES11Ext.GL_TEXTURE_EXTERNAL_OES, dstTexture, 0);
GLES30.glBlitFramebuffer(0, 0, width, height, 0, 0, width, height, GLES30.GL_COLOR_BUFFER_BIT, GLES30.GL_LINEAR);
GLES30.glBindFramebuffer(GLES30.GL_DRAW_FRAMEBUFFER, 0);
GLES30.glBindFramebuffer(GLES30.GL_READ_FRAMEBUFFER, 0);
GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, 0);
GLES30.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
}
public void release(){
if(mDstFrameBuffer != null){
GLES20.glDeleteFramebuffers(mDstFrameBuffer.length, mDstFrameBuffer, 0);
mDstFrameBuffer = null;
}
if(mSrcFrameBuffer != null){
GLES20.glDeleteFramebuffers(mSrcFrameBuffer.length, mSrcFrameBuffer, 0);
mSrcFrameBuffer = null;
}
}
}

View File

@ -0,0 +1,204 @@
package io.agora.beautyapi.faceunity.utils.egl;
import android.graphics.Matrix;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import io.agora.base.internal.video.EglBase;
import io.agora.base.internal.video.GlRectDrawer;
import io.agora.base.internal.video.RendererCommon;
public class GLFrameBuffer {
private int mFramebufferId = -1;
private int mTextureId = -1;
private int mWidth, mHeight, mRotation;
private boolean isFlipV, isFlipH, isTextureInner, isTextureChanged, isSizeChanged;
private RendererCommon.GlDrawer drawer;
private float[] mTexMatrix = GLUtils.IDENTITY_MATRIX;
public GLFrameBuffer() {
}
public boolean setSize(int width, int height) {
if (mWidth != width || mHeight != height) {
mWidth = width;
mHeight = height;
isSizeChanged = true;
return true;
}
return false;
}
public void setRotation(int rotation) {
if (mRotation != rotation) {
mRotation = rotation;
}
}
public void setFlipV(boolean flipV) {
if (isFlipV != flipV) {
isFlipV = flipV;
}
}
public void setFlipH(boolean flipH) {
if (isFlipH != flipH) {
isFlipH = flipH;
}
}
public void setTextureId(int textureId){
if(mTextureId != textureId){
deleteTexture();
mTextureId = textureId;
isTextureChanged = true;
}
}
public int getTextureId(){
return mTextureId;
}
public void setTexMatrix(float[] matrix) {
if (matrix != null) {
mTexMatrix = matrix;
} else {
mTexMatrix = GLUtils.IDENTITY_MATRIX;
}
}
public void resetTransform(){
mTexMatrix = GLUtils.IDENTITY_MATRIX;
isFlipH = isFlipV = false;
mRotation = 0;
}
public int process(int textureId, int textureType) {
if (mWidth <= 0 && mHeight <= 0) {
throw new RuntimeException("setSize firstly!");
}
if(mTextureId == -1){
mTextureId = createTexture(mWidth, mHeight);
bindFramebuffer(mTextureId);
isTextureInner = true;
}else if(isTextureInner && isSizeChanged){
GLES20.glDeleteTextures(1, new int[]{mTextureId}, 0);
mTextureId = createTexture(mWidth, mHeight);
bindFramebuffer(mTextureId);
}else if(isTextureChanged){
bindFramebuffer(mTextureId);
}
isTextureChanged = false;
isSizeChanged = false;
if(drawer == null){
drawer = new GlRectDrawer();
}
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, mFramebufferId);
GLUtils.checkGlError("glBindFramebuffer");
Matrix transform = RendererCommon.convertMatrixToAndroidGraphicsMatrix(mTexMatrix);
transform.preTranslate(0.5f, 0.5f);
transform.preRotate(mRotation, 0.f, 0.f);
transform.preScale(
isFlipH ? -1.f: 1.f,
isFlipV ? -1.f: 1.f
);
transform.preTranslate(-0.5f, -0.5f);
float[] matrix = RendererCommon.convertMatrixFromAndroidGraphicsMatrix(transform);
synchronized (EglBase.lock){
if(textureType == GLES11Ext.GL_TEXTURE_EXTERNAL_OES){
drawer.drawOes(textureId, matrix, mWidth, mHeight, 0, 0, mWidth, mHeight);
}else{
drawer.drawRgb(textureId, matrix, mWidth, mHeight, 0, 0, mWidth, mHeight);
}
}
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, GLES20.GL_NONE);
GLES20.glFinish();
return mTextureId;
}
public void release(){
deleteTexture();
deleteFramebuffer();
if(drawer != null){
drawer.release();
drawer = null;
}
}
private void deleteFramebuffer() {
if (mFramebufferId != -1) {
GLES20.glDeleteFramebuffers(1, new int[]{mFramebufferId}, 0);
mFramebufferId = -1;
}
}
public int createTexture(int width, int height){
int[] textures = new int[1];
GLES20.glGenTextures(1, textures, 0);
GLUtils.checkGlError("glGenTextures");
int textureId = textures[0];
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0,
GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, GLES20.GL_NONE);
return textureId;
}
public void resizeTexture(int textureId, int width, int height) {
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0,
GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, GLES20.GL_NONE);
}
private void deleteTexture() {
if (isTextureInner && mTextureId != -1) {
GLES20.glDeleteTextures(1, new int[]{mTextureId}, 0);
}
isTextureInner = false;
mTextureId = -1;
}
private void bindFramebuffer(int textureId) {
if(mFramebufferId == -1){
int[] framebuffers = new int[1];
GLES20.glGenFramebuffers(1, framebuffers, 0);
GLUtils.checkGlError("glGenFramebuffers");
mFramebufferId = framebuffers[0];
}
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, mFramebufferId);
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER,
GLES20.GL_COLOR_ATTACHMENT0,
GLES20.GL_TEXTURE_2D,
textureId, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, GLES20.GL_NONE);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, GLES20.GL_NONE);
}
}

View File

@ -0,0 +1,180 @@
/*
* MIT License
*
* Copyright (c) 2023 Agora Community
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package io.agora.beautyapi.faceunity.utils.egl
import android.opengl.GLES20
import android.util.Log
import android.util.Size
import java.util.concurrent.ConcurrentLinkedQueue
class GLTextureBufferQueue(
private val glFrameBuffer: GLFrameBuffer = GLFrameBuffer(),
private val cacheCount: Int = 6,
private val loggable: Boolean = false
) {
private val TAG = "GLTextureBufferQueue"
private var cacheIndex = 0
private val cacheTextureOuts = arrayOfNulls<TextureOut>(cacheCount)
private val textureIdQueue = ConcurrentLinkedQueue<TextureOut>()
fun enqueue(iN: TextureIn): Int {
var size = textureIdQueue.size
if (size < cacheCount) {
var out = cacheTextureOuts[cacheIndex]
val outSize = when (iN.rotation) {
90, 270 -> Size(iN.height, iN.width)
else -> Size(iN.width, iN.height)
}
if (out == null) {
val textureId = glFrameBuffer.createTexture(outSize.width, outSize.height)
out = TextureOut(
0,
textureId,
GLES20.GL_TEXTURE_2D,
outSize.width,
outSize.height,
iN.isFrontCamera,
iN.isMirror,
)
cacheTextureOuts[cacheIndex] = out
} else if (out.width != outSize.width || out.height != outSize.height) {
glFrameBuffer.resizeTexture(out.textureId, outSize.width, outSize.height)
out = TextureOut(
0,
out.textureId,
out.textureType,
outSize.width,
outSize.height,
iN.isFrontCamera,
iN.isMirror,
)
cacheTextureOuts[cacheIndex] = out
} else if(out.isFrontCamera != iN.isFrontCamera){
out = TextureOut(
0,
out.textureId,
out.textureType,
out.width,
out.height,
iN.isFrontCamera,
iN.isMirror,
)
cacheTextureOuts[cacheIndex] = out
}
glFrameBuffer.textureId = out.textureId
glFrameBuffer.setSize(out.width, out.height)
glFrameBuffer.resetTransform()
glFrameBuffer.setRotation(iN.rotation)
if (iN.transform != null) {
glFrameBuffer.setTexMatrix(iN.transform)
var flipH = iN.isFrontCamera
if(iN.isMirror){
flipH = !flipH
}
glFrameBuffer.setFlipH(flipH)
} else {
var flipH = !iN.isFrontCamera
if(iN.isMirror){
flipH = !flipH
}
glFrameBuffer.setFlipH(flipH)
}
glFrameBuffer.setFlipV(iN.flipV)
glFrameBuffer.process(iN.textureId, iN.textureType)
out.index = cacheIndex
out.tag = iN.tag
textureIdQueue.offer(out)
if(loggable){
Log.d(TAG, "TextureIdQueue enqueue index=$cacheIndex, size=$size")
}
cacheIndex = (cacheIndex + 1) % cacheCount
size++
} else {
if(loggable){
Log.e(TAG, "TextureIdQueue is full!!")
}
}
return size
}
fun dequeue(remove: Boolean = true): TextureOut? {
val size = textureIdQueue.size
val poll = if(remove){
textureIdQueue.poll()
}else{
textureIdQueue.peek()
}
if(loggable){
Log.d(TAG, "TextureIdQueue dequeue index=${poll?.index}, size=$size")
}
return poll
}
fun reset() {
cacheIndex = 0
textureIdQueue.clear()
}
fun release() {
cacheIndex = 0
cacheTextureOuts.forEachIndexed { index, textureOut ->
if (textureOut != null) {
GLES20.glDeleteTextures(1, intArrayOf(textureOut.textureId), 0)
cacheTextureOuts[index] = null
}
}
textureIdQueue.clear()
glFrameBuffer.release()
}
data class TextureIn(
val textureId: Int,
val textureType: Int,
val width: Int,
val height: Int,
val rotation: Int,
val flipV: Boolean,
val isFrontCamera: Boolean,
val isMirror: Boolean,
val transform: FloatArray?,
val tag: Any? = null
)
data class TextureOut(
var index: Int = 0,
val textureId: Int,
val textureType: Int,
val width: Int,
val height: Int,
val isFrontCamera: Boolean,
var tag: Any? = null
)
}

View File

@ -0,0 +1,279 @@
/*
* MIT License
*
* Copyright (c) 2023 Agora Community
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package io.agora.beautyapi.faceunity.utils.egl;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.ImageFormat;
import android.graphics.Rect;
import android.graphics.YuvImage;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.Matrix;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.IntBuffer;
import java.util.Objects;
import javax.microedition.khronos.egl.EGL10;
import javax.microedition.khronos.egl.EGLContext;
import io.agora.beautyapi.faceunity.utils.LogUtils;
public class GLUtils {
private static final String TAG = "GLUtils";
public static final float[] IDENTITY_MATRIX = new float[16];
static {
Matrix.setIdentityM(IDENTITY_MATRIX, 0);
}
private GLUtils() {
}
public static Bitmap getTexture2DImage(int textureID, int width, int height) {
try {
int[] oldFboId = new int[1];
GLES20.glGetIntegerv(GLES20.GL_FRAMEBUFFER_BINDING, IntBuffer.wrap(oldFboId));
int[] framebuffers = new int[1];
GLES20.glGenFramebuffers(1, framebuffers, 0);
int framebufferId = framebuffers[0];
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, framebufferId);
int[] renderbuffers = new int[1];
GLES20.glGenRenderbuffers(1, renderbuffers, 0);
int renderId = renderbuffers[0];
GLES20.glBindRenderbuffer(GLES20.GL_RENDERBUFFER, renderId);
GLES20.glRenderbufferStorage(GLES20.GL_RENDERBUFFER, GLES20.GL_DEPTH_COMPONENT16, width, height);
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, textureID, 0);
GLES20.glFramebufferRenderbuffer(GLES20.GL_FRAMEBUFFER, GLES20.GL_DEPTH_ATTACHMENT, GLES20.GL_RENDERBUFFER, renderId);
if (GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER) != GLES20.GL_FRAMEBUFFER_COMPLETE) {
LogUtils.e(TAG, "Framebuffer error");
}
ByteBuffer rgbaBuf = ByteBuffer.allocateDirect(width * height * 4);
rgbaBuf.position(0);
GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaBuf);
Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
bitmap.copyPixelsFromBuffer(rgbaBuf);
GLES20.glDeleteRenderbuffers(1, IntBuffer.wrap(framebuffers));
GLES20.glDeleteFramebuffers(1, IntBuffer.allocate(framebufferId));
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, oldFboId[0]);
return bitmap;
} catch (Exception e) {
LogUtils.e(TAG, e.toString());
}
return null;
}
public static Bitmap getTextureOESImage(int textureID, int width, int height) {
try {
int[] oldFboId = new int[1];
GLES20.glGetIntegerv(GLES20.GL_FRAMEBUFFER_BINDING, IntBuffer.wrap(oldFboId));
int[] framebuffers = new int[1];
GLES20.glGenFramebuffers(1, framebuffers, 0);
int framebufferId = framebuffers[0];
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, framebufferId);
int[] renderbuffers = new int[1];
GLES20.glGenRenderbuffers(1, renderbuffers, 0);
int renderId = renderbuffers[0];
GLES20.glBindRenderbuffer(GLES20.GL_RENDERBUFFER, renderId);
GLES20.glRenderbufferStorage(GLES20.GL_RENDERBUFFER, GLES20.GL_DEPTH_COMPONENT16, width, height);
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureID, 0);
GLES20.glFramebufferRenderbuffer(GLES20.GL_FRAMEBUFFER, GLES20.GL_DEPTH_ATTACHMENT, GLES20.GL_RENDERBUFFER, renderId);
if (GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER) != GLES20.GL_FRAMEBUFFER_COMPLETE) {
LogUtils.e(TAG, "Framebuffer error");
}
ByteBuffer rgbaBuf = ByteBuffer.allocateDirect(width * height * 4);
rgbaBuf.position(0);
GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaBuf);
Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
bitmap.copyPixelsFromBuffer(rgbaBuf);
GLES20.glDeleteRenderbuffers(1, IntBuffer.wrap(framebuffers));
GLES20.glDeleteFramebuffers(1, IntBuffer.allocate(framebufferId));
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, oldFboId[0]);
return bitmap;
} catch (Exception e) {
LogUtils.e(TAG, e.toString());
}
return null;
}
public static Bitmap nv21ToBitmap(byte[] nv21, int width, int height) {
Bitmap bitmap = null;
try {
YuvImage image = new YuvImage(nv21, ImageFormat.NV21, width, height, null);
ByteArrayOutputStream stream = new ByteArrayOutputStream();
image.compressToJpeg(new Rect(0, 0, width, height), 80, stream);
bitmap = BitmapFactory.decodeByteArray(stream.toByteArray(), 0, stream.size());
stream.close();
} catch (IOException e) {
e.printStackTrace();
}
return bitmap;
}
private static Bitmap readBitmap(int width, int height) {
ByteBuffer rgbaBuf = ByteBuffer.allocateDirect(width * height * 4);
rgbaBuf.position(0);
GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaBuf);
Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
bitmap.copyPixelsFromBuffer(rgbaBuf);
return bitmap;
}
public static float[] createTransformMatrix(int rotation, boolean flipH, boolean flipV) {
float[] renderMVPMatrix = new float[16];
float[] tmp = new float[16];
Matrix.setIdentityM(tmp, 0);
boolean _flipH = flipH;
boolean _flipV = flipV;
if (rotation % 180 != 0) {
_flipH = flipV;
_flipV = flipH;
}
if (_flipH) {
Matrix.rotateM(tmp, 0, tmp, 0, 180, 0, 1f, 0);
}
if (_flipV) {
Matrix.rotateM(tmp, 0, tmp, 0, 180, 1f, 0f, 0);
}
float _rotation = rotation;
if (_rotation != 0) {
if (_flipH != _flipV) {
_rotation *= -1;
}
Matrix.rotateM(tmp, 0, tmp, 0, _rotation, 0, 0, 1);
}
Matrix.setIdentityM(renderMVPMatrix, 0);
Matrix.multiplyMM(renderMVPMatrix, 0, tmp, 0, renderMVPMatrix, 0);
return renderMVPMatrix;
}
public static EGLContext getCurrGLContext() {
EGL10 egl = (EGL10) javax.microedition.khronos.egl.EGLContext.getEGL();
if (egl != null && !Objects.equals(egl.eglGetCurrentContext(), EGL10.EGL_NO_CONTEXT)) {
return egl.eglGetCurrentContext();
}
return null;
}
public static void checkGlError(String op) {
int error = GLES20.glGetError();
if (error != GLES20.GL_NO_ERROR) {
String msg = op + ": glError 0x" + Integer.toHexString(error);
LogUtils.e(TAG, msg);
throw new RuntimeException(msg);
}
}
public static int createProgram(String vertexSource, String fragmentSource) {
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
if (vertexShader == 0) {
return 0;
}
int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
if (pixelShader == 0) {
return 0;
}
int program = GLES20.glCreateProgram();
checkGlError("glCreateProgram");
if (program == 0) {
LogUtils.e(TAG, "Could not create program");
}
GLES20.glAttachShader(program, vertexShader);
checkGlError("glAttachShader");
GLES20.glAttachShader(program, pixelShader);
checkGlError("glAttachShader");
GLES20.glLinkProgram(program);
int[] linkStatus = new int[1];
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] != GLES20.GL_TRUE) {
LogUtils.e(TAG, "Could not link program: ");
LogUtils.e(TAG, GLES20.glGetProgramInfoLog(program));
GLES20.glDeleteProgram(program);
program = 0;
}
return program;
}
public static int loadShader(int shaderType, String source) {
int shader = GLES20.glCreateShader(shaderType);
checkGlError("glCreateShader type=" + shaderType);
GLES20.glShaderSource(shader, source);
GLES20.glCompileShader(shader);
int[] compiled = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == 0) {
LogUtils.e(TAG, "Could not compile shader " + shaderType + ":");
LogUtils.e(TAG, " " + GLES20.glGetShaderInfoLog(shader));
GLES20.glDeleteShader(shader);
shader = 0;
}
return shader;
}
public static int createTexture(int textureTarget, Bitmap bitmap, int minFilter,
int magFilter, int wrapS, int wrapT) {
int[] textureHandle = new int[1];
GLES20.glGenTextures(1, textureHandle, 0);
checkGlError("glGenTextures");
GLES20.glBindTexture(textureTarget, textureHandle[0]);
checkGlError("glBindTexture " + textureHandle[0]);
GLES20.glTexParameterf(textureTarget, GLES20.GL_TEXTURE_MIN_FILTER, minFilter);
GLES20.glTexParameterf(textureTarget, GLES20.GL_TEXTURE_MAG_FILTER, magFilter); //线性插值
GLES20.glTexParameteri(textureTarget, GLES20.GL_TEXTURE_WRAP_S, wrapS);
GLES20.glTexParameteri(textureTarget, GLES20.GL_TEXTURE_WRAP_T, wrapT);
if (bitmap != null) {
android.opengl.GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);
}
checkGlError("glTexParameter");
return textureHandle[0];
}
}

View File

@ -0,0 +1,214 @@
/*
* MIT License
*
* Copyright (c) 2023 Agora Community
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package io.agora.beautyapi.faceunity.utils.egl
import android.opengl.GLES20
import io.agora.beautyapi.faceunity.utils.LogUtils
import java.util.concurrent.Callable
import java.util.concurrent.ConcurrentLinkedQueue
import java.util.concurrent.CountDownLatch
import java.util.concurrent.Executors
import java.util.concurrent.Future
import javax.microedition.khronos.egl.EGLContext
class TextureProcessHelper(
private val cacheCount: Int = 2
) {
private val TAG = "TextureProcessHelper"
private val glTextureBufferQueueIn = GLTextureBufferQueue(cacheCount = cacheCount, loggable = true)
private val glTextureBufferQueueOut = GLTextureBufferQueue(cacheCount = cacheCount, loggable = false)
private val glFrameBuffer = GLFrameBuffer()
private val futureQueue = ConcurrentLinkedQueue<Future<Int>>()
private val workerThread = Executors.newSingleThreadExecutor()
private val eglContextHelper =
EGLContextHelper()
private var eglContextBase: EGLContext? = null
private var isReleased = false
private var filter: ((GLTextureBufferQueue.TextureOut) -> Int)? = null
private var isBegin = false
private var frameIndex = 0
fun setFilter(filter: (GLTextureBufferQueue.TextureOut) -> Int) {
this.filter = filter
}
fun process(
texId: Int, texType: Int,
width: Int, height: Int, rotation: Int,
transform: FloatArray,
isFrontCamera: Boolean,
isMirror: Boolean
): Int {
if (isReleased) {
return -1
}
val currGLContext = GLUtils.getCurrGLContext() ?: return -1
if (eglContextBase == null) {
eglContextBase = currGLContext
executeSync {
eglContextHelper.initEGL(eglContextBase)
eglContextHelper.eglMakeCurrent()
}
} else if (eglContextBase != currGLContext) {
eglContextBase = currGLContext
executeSync {
eglContextHelper.release()
eglContextHelper.initEGL(eglContextBase)
eglContextHelper.eglMakeCurrent()
}
}
glTextureBufferQueueIn.enqueue(
GLTextureBufferQueue.TextureIn(
texId,
texType,
width,
height,
rotation,
false,
isFrontCamera,
isMirror,
transform,
frameIndex
)
)
frameIndex ++
if (isReleased) {
return -1
}
futureQueue.offer(workerThread.submit(Callable {
if (isReleased) {
return@Callable -2
}
val frame = glTextureBufferQueueIn.dequeue(false) ?: return@Callable -2
val filterTexId = filter?.invoke(frame) ?: -1
if (filterTexId >= 0) {
glTextureBufferQueueOut.enqueue(
GLTextureBufferQueue.TextureIn(
filterTexId,
GLES20.GL_TEXTURE_2D,
frame.width,
frame.height,
0,
false,
false,
true,
null,
frame.tag
)
)
} else {
glTextureBufferQueueOut.enqueue(
GLTextureBufferQueue.TextureIn(
frame.textureId,
frame.textureType,
frame.width,
frame.height,
0,
false,
false,
true,
null,
frame.tag
)
)
}
glTextureBufferQueueIn.dequeue(true)
return@Callable 0
}))
var ret = 0
if (isBegin || futureQueue.size >= cacheCount) {
isBegin = true
try {
val get = futureQueue.poll()?.get() ?: -1
if (get == 0) {
val dequeue = glTextureBufferQueueOut.dequeue() ?: return -1
glFrameBuffer.setSize(dequeue.width, dequeue.height)
ret = glFrameBuffer.process(dequeue.textureId, dequeue.textureType)
}
}catch (e: Exception){
LogUtils.e(TAG, "process end with exception: $e")
}
}
return ret
}
fun reset(){
if(frameIndex == 0){
return
}
isBegin = false
frameIndex = 0
var future = futureQueue.poll()
while (future != null) {
future.cancel(true)
future = futureQueue.poll()
}
glTextureBufferQueueIn.reset()
// glFrameBuffer.release()
executeSync {
glTextureBufferQueueOut.reset()
}
}
fun size() = futureQueue.size
fun release() {
isReleased = true
filter = null
isBegin = false
frameIndex = 0
var future = futureQueue.poll()
while (future != null) {
future.cancel(true)
future = futureQueue.poll()
}
glTextureBufferQueueIn.release()
glFrameBuffer.release()
executeSync {
glTextureBufferQueueOut.release()
if (eglContextBase != null) {
eglContextHelper.release()
eglContextBase = null
}
}
workerThread.shutdown()
}
fun executeSync(run: () -> Unit) {
val latch = CountDownLatch(1)
workerThread.execute {
run.invoke()
latch.countDown()
}
latch.await()
}
}

View File

@ -1 +1 @@
apply plugin: 'com.android.library' apply plugin: 'img-optimizer' apply plugin: 'kotlin-android' android { compileSdkVersion rootProject.ext.android.compileSdkVersion buildToolsVersion rootProject.ext.android.buildToolsVersion aaptOptions.cruncherEnabled = false aaptOptions.useNewCruncher = false packagingOptions { pickFirst "lib/armeabi/libyuvutils.so" pickFirst "lib/arm64-v8a/libyuvutils.so" pickFirst "lib/armeabi-v7a/libyuvutils.so" pickFirst "lib/armeabi/libyuvtools.so" pickFirst "lib/arm64-v8a/libyuvtools.so" pickFirst "lib/armeabi-v7a/libyuvtools.so" exclude "lib/arm64-v8a/libmmcv_api_handgesture.so" exclude "lib/arm64-v8a/libmmcv_api_express.so" exclude "lib/arm64-v8a/libMediaEncoder.so" exclude "lib/arm64-v8a/libarcore_sdk_c.so" exclude "lib/arm64-v8a/libmediadecoder.so" exclude "lib/arm64-v8a/libMediaMuxer.so" exclude "lib/arm64-v8a/libarcore_sdk_jni.so" exclude "lib/arm64-v8a/libMediaUtils.so" exclude "lib/arm64-v8a/libcosmosffmpeg.so" } defaultConfig { minSdkVersion rootProject.ext.android.minSdkVersion targetSdkVersion rootProject.ext.android.targetSdkVersion versionCode rootProject.ext.android.versionCode versionName rootProject.ext.android.versionName manifestPlaceholders = rootProject.ext.manifestPlaceholders ndk { abiFilters "armeabi-v7a", "arm64-v8a" } javaCompileOptions { annotationProcessorOptions { arguments = [AROUTER_MODULE_NAME: project.getName()] } } } aaptOptions { cruncherEnabled = false useNewCruncher = false } buildTypes { release { minifyEnabled false proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' } } kotlinOptions { allWarningsAsErrors = true } compileOptions { sourceCompatibility JavaVersion.VERSION_1_8 targetCompatibility JavaVersion.VERSION_1_8 } } repositories { flatDir { dirs 'libs', '../libs' } mavenCentral() } dependencies { implementation 'androidx.constraintlayout:constraintlayout:2.0.0' implementation fileTree(dir: 'libs', include: ['*.jar', '*.aar']) implementation (name:'../libs/beautysdk-202202241203',ext:'aar') implementation (name:'../libs/svgaplayer-release-v1.2.1',ext:'aar') //socket.io implementation('io.socket:socket.io-client:1.0.0') { exclude group: 'org.json', module: 'json' } //common api project(path: ':common') api project(path:':FaceUnity')//新娱美颜 api project(':Share')//分享 annotationProcessor rootProject.ext.dependencies["arouter-compiler"] //工具 api rootProject.ext.dependencies["blank-utilcode"] implementation 'com.eightbitlab:blurview:1.6.6' implementation 'com.google.code.gson:gson:2.8.6' implementation "com.getkeepsafe.relinker:relinker:1.4.4" //ExoPlayer,腾讯的播放器不支持无缝切换 implementation 'com.google.android.exoplayer:exoplayer:2.18.2' implementation 'com.google.android.exoplayer:exoplayer-core:2.18.2@aar' }
apply plugin: 'com.android.library' apply plugin: 'img-optimizer' apply plugin: 'kotlin-android' android { compileSdkVersion rootProject.ext.android.compileSdkVersion buildToolsVersion rootProject.ext.android.buildToolsVersion aaptOptions.cruncherEnabled = false aaptOptions.useNewCruncher = false packagingOptions { pickFirst "lib/armeabi/libyuvutils.so" pickFirst "lib/arm64-v8a/libyuvutils.so" pickFirst "lib/armeabi-v7a/libyuvutils.so" pickFirst "lib/armeabi/libyuvtools.so" pickFirst "lib/arm64-v8a/libyuvtools.so" pickFirst "lib/armeabi-v7a/libyuvtools.so" exclude "lib/arm64-v8a/libmmcv_api_handgesture.so" exclude "lib/arm64-v8a/libmmcv_api_express.so" exclude "lib/arm64-v8a/libMediaEncoder.so" exclude "lib/arm64-v8a/libarcore_sdk_c.so" exclude "lib/arm64-v8a/libmediadecoder.so" exclude "lib/arm64-v8a/libMediaMuxer.so" exclude "lib/arm64-v8a/libarcore_sdk_jni.so" exclude "lib/arm64-v8a/libMediaUtils.so" exclude "lib/arm64-v8a/libcosmosffmpeg.so" } defaultConfig { minSdkVersion rootProject.ext.android.minSdkVersion targetSdkVersion rootProject.ext.android.targetSdkVersion versionCode rootProject.ext.android.versionCode versionName rootProject.ext.android.versionName manifestPlaceholders = rootProject.ext.manifestPlaceholders ndk { abiFilters "armeabi-v7a", "arm64-v8a" } javaCompileOptions { annotationProcessorOptions { arguments = [AROUTER_MODULE_NAME: project.getName()] } } } aaptOptions { cruncherEnabled = false useNewCruncher = false } buildTypes { release { minifyEnabled false proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' } } kotlinOptions { allWarningsAsErrors = true } compileOptions { sourceCompatibility JavaVersion.VERSION_1_8 targetCompatibility JavaVersion.VERSION_1_8 } } repositories { flatDir { dirs 'libs', '../libs' } mavenCentral() } dependencies { implementation 'androidx.constraintlayout:constraintlayout:2.0.0' implementation fileTree(dir: 'libs', include: ['*.jar', '*.aar']) implementation (name:'../libs/beautysdk-202202241203',ext:'aar') implementation (name:'../libs/svgaplayer-release-v1.2.1',ext:'aar') //socket.io implementation('io.socket:socket.io-client:1.0.0') { exclude group: 'org.json', module: 'json' } //common api project(path:':lib_faceunity')//新娱美颜 api project(':Share')//分享 annotationProcessor rootProject.ext.dependencies["arouter-compiler"] //工具 api rootProject.ext.dependencies["blank-utilcode"] implementation 'com.eightbitlab:blurview:1.6.6' implementation 'com.google.code.gson:gson:2.8.6' implementation "com.getkeepsafe.relinker:relinker:1.4.4" //ExoPlayer,腾讯的播放器不支持无缝切换 implementation 'com.google.android.exoplayer:exoplayer:2.18.2' implementation 'com.google.android.exoplayer:exoplayer-core:2.18.2@aar' }

View File

@ -272,6 +272,13 @@ public class LiveRyAnchorActivity extends LiveActivity implements LiveFunctionCl
RandomPkManager.getInstance().addOnRandomPkTimer(onRandomPkTimer);
manager.setOnMirrorChanged(new FaceManager.OnMirrorChanged() {
@Override
public void onChange(boolean falg) {
mLivePushViewHolder.setEnableBeauty(falg);
}
});
//添加开播前设置控件
mLiveReadyViewHolder = new LiveNewReadyRyViewHolder(mContext, mContainer, mLiveSDK);
mLiveReadyViewHolder.setManager(manager);
@ -1476,7 +1483,7 @@ public class LiveRyAnchorActivity extends LiveActivity implements LiveFunctionCl
@Override
public void onLinkMicToPk(String uid, String pkhead, String pkname) {
L.eSw("onLinkMicToPkonLinkMicToPkonLinkMicToPk");
}
@Override

View File

@ -0,0 +1,87 @@
package com.yunbao.live.dialog;
import android.app.ActionBar;
import android.content.Context;
import android.content.DialogInterface;
import android.os.Bundle;
import android.view.Gravity;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import androidx.annotation.Nullable;
import com.yunbao.common.dialog.AbsDialogFragment;
import com.yunbao.faceunity.FaceManager;
import com.yunbao.faceunity.ui.FaceUnityView;
import com.yunbao.live.R;
/**
* 新娱美颜SDK暂时不用不要删除
*/
public class LiveFaceUnityDialogNewFragment extends AbsDialogFragment {
private Context mContext;
private FaceUnityView faceView;
private FaceManager manager;
private View mRootView;
public LiveFaceUnityDialogNewFragment(Context mContext) {
this.mContext = mContext;
}
@Override
protected int getLayoutId() {
return 0;
}
@Override
protected View getLayoutView() {
faceView = new FaceUnityView(mContext);
return faceView;
}
@Override
protected int getDialogStyle() {
return R.style.dialog4;
}
@Override
protected boolean canCancel() {
return true;
}
@Override
protected void setWindowAttributes(Window window) {
WindowManager.LayoutParams params = window.getAttributes();
params.width = ActionBar.LayoutParams.MATCH_PARENT;
params.height = ActionBar.LayoutParams.WRAP_CONTENT;
params.gravity = Gravity.BOTTOM;
window.setAttributes(params);
}
@Override
public void onActivityCreated(@Nullable Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
manager.setFaceUnityView(faceView);
manager.loadConfig();
}
@Override
public void onDismiss(DialogInterface dialog) {
super.onDismiss(dialog);
if(mRootView!=null) {
mRootView.setVisibility(View.VISIBLE);
}
}
public void setManager(FaceManager manager) {
this.manager = manager;
}
public void setDismissShowUi(View mRootView) {
this.mRootView=mRootView;
}
}

View File

@ -68,6 +68,7 @@ import com.yunbao.live.socket.SocketRyClient;
import com.yunbao.live.socket.SocketRyLinkMicPkUtil;
import com.yunbao.live.socket.SocketSendBean;
import com.yunbao.live.views.LiveLinkMicPkViewHolder;
import com.yunbao.live.views.LivePlayRyViewHolder;
import com.yunbao.live.views.LivePushRyViewHolder;
import org.greenrobot.eventbus.EventBus;
@ -685,112 +686,10 @@ public class LiveRyLinkMicPkPresenter implements View.OnClickListener {
EventBus.getDefault().post(new AnchorInfoEvent(false, u.getId(), u.getUserNiceName(), u.getAvatar()));
Log.e("eve", u.getId() + "");
L.eSw("主播接受了主播的PK邀請");
/**
* 加入副房间
* 前提必须已经 通过 {@link RCRTCEngine#joinRoom(String, RCRTCRoomType, IRCRTCResultDataCallback)} {@link RCRTCEngine#joinRoom(String, IRCRTCResultDataCallback)} 加入了主房间
*
* @param roomId 房间 ID 长度 64 个字符可包含`A-Z``a-z``0-9``+``=``-``_`
* @param callBack 加入房间回调
* @group 房间管理
*/
RandomPkManager.getInstance().setPkStatus(RandomPkManager.PK_STATUS_START);
/*RCRTCEngine.getInstance().joinOtherRoom(u.getId(), new IRCRTCResultDataCallback<RCRTCOtherRoom>() {
@Override
public void onSuccess(RCRTCOtherRoom rcrtcOtherRoom) {
rcrtcOtherRoom.registerOtherRoomEventsListener(otherRoomEventsListener);
new Handler(Looper.getMainLooper()).post(new Runnable() {
public void run() {
//遍历远端用户列表
for (int i = 0; i < rcrtcOtherRoom.getRemoteUsers().size(); i++) {
//遍历远端用户发布的资源列表
for (RCRTCInputStream stream : rcrtcOtherRoom.getRemoteUsers().get(i).getStreams()) {
Log.e("ry", stream.getMediaType() + "类型");
if (stream.getMediaType() == RCRTCMediaType.VIDEO) {
//如果远端用户发布的是视频流创建显示视图RCRTCVideoView并添加到布局中显示
RCRTCVideoView remoteView = new RCRTCVideoView(contexts);
((RCRTCVideoInputStream) stream).setVideoView(remoteView);
//todo 本demo只演示添加1个远端用户的视图
livePushRyViewHolder.mPreView1.removeAllViews();
remoteView.setScalingType(SCALE_ASPECT_FILL);
livePushRyViewHolder.mPreView1.addView(remoteView);
}
//如果要订阅所有远端用户的流保存所有流信息方便后面统一订阅
inputStreamList.add(stream);
RCRTCMixConfig config = new RCRTCMixConfig();
RCRTCMixConfig.MediaConfig mediaConfig = new RCRTCMixConfig.MediaConfig();
config.setMediaConfig(mediaConfig);
//视频输出配置
RCRTCMixConfig.MediaConfig.VideoConfig videoConfig = new RCRTCMixConfig.MediaConfig.VideoConfig();
mediaConfig.setVideoConfig(videoConfig);
//大流视频的输出参数
RCRTCMixConfig.MediaConfig.VideoConfig.VideoLayout normal = new RCRTCMixConfig.MediaConfig.VideoConfig.VideoLayout();
videoConfig.setVideoLayout(normal);
//推荐宽帧率参数值可以通过默认视频流的配置获取也可以根据实际需求来自定义设置
//如不设置宽高值则服务端将使用默认宽高 360 * 640
//:发布的视频分辨率为720 * 1280如果不设置则观众端看到的视频分辨率为 360 * 640,
//所以如果想让观众端看到的视频分辨率和发布视频分辨率一致则应从发布的视频流中获取分辨率配置并设置到 mediaConfig
RCRTCVideoStreamConfig defaultVideoConfig = RCRTCEngine.getInstance().getDefaultVideoStream().getVideoConfig();
int fps = defaultVideoConfig.getVideoFps().getFps();
int width = 960;
int height = 720;
normal.setWidth(width); //视频宽
normal.setHeight(height); //视频高
normal.setFps(fps); //视频帧率
//1. 设置自适应合流布局模式
config.setLayoutMode(RCRTCMixConfig.MixLayoutMode.ADAPTIVE);
//2. 合流画布设置
if (rcrtcLiveInfo == null) {
Log.w("PkDebug", "PK合流失败,rcrtcLiveInfo为空");
}
rcrtcLiveInfo.setMixConfig(config, new IRCRTCResultCallback() {
@Override
public void onSuccess() {
Log.e("ry", "混成功13");
}
@Override
public void onFailed(RTCErrorCode errorCode) {
Log.e("ry", "混失败" + errorCode);
}
});
}
}
//开始订阅资源
rtcRoom.getLocalUser().subscribeStreams(inputStreamList, new IRCRTCResultCallback() {
@Override
public void onSuccess() {
Log.i("ry", "订阅资源成功");
}
@Override
public void onFailed(RTCErrorCode rtcErrorCode) {
Log.i("ry", "订阅资源失败: " + rtcErrorCode.getReason());
ToastUtil.show(mContext.getString(R.string.live_pk_link_error));
}
});
}
});
}
@Override
public void onFailed(RTCErrorCode rtcErrorCode) {
Log.i("ry", "11111加入其他房间失败 " + rtcErrorCode.getReason());
}
});*/
LivePushRyViewHolder.btn_close.setVisibility(View.VISIBLE);
LiveRyAnchorActivity.isDRPK = 1;
ScreenDimenUtil util = ScreenDimenUtil.getInstance();
int mScreenWdith = util.getScreenWdith();
/*LinearLayout.LayoutParams params = new LinearLayout.LayoutParams(LinearLayout.LayoutParams.MATCH_PARENT, mScreenWdith * 720 / 960);
params.weight = 1;
params.topMargin = mContext.getResources().getDimensionPixelOffset(R.dimen.live_top);
livePushRyViewHolder.camera.setLayoutParams(params);
livePushRyViewHolder.mPreView1.setLayoutParams(params);
livePushRyViewHolder.mPreView1.setVisibility(View.VISIBLE);*/
livePushRyViewHolder.setAnPkRtc(u);//设置对方主播视图
final SocketSendBean msg1 = new SocketSendBean()
@ -802,6 +701,7 @@ public class LiveRyLinkMicPkPresenter implements View.OnClickListener {
.param("pkhead", u.getAvatar())
.param("pkname", u.getUserNiceName());
msg1.create();
LivePushRyViewHolder.btn_close.setVisibility(View.VISIBLE);
/*Conversation.ConversationType conversationType = Conversation.ConversationType.CHATROOM;
TextMessage messageContent = TextMessage.obtain(msg1.mResult.toString());
@ -839,19 +739,6 @@ public class LiveRyLinkMicPkPresenter implements View.OnClickListener {
LivePushRyViewHolder.btn_close.setVisibility(View.VISIBLE);
LiveRyAnchorActivity.isDRPK = 1;
/*JSONObject msg1 = new JSONObject();
msg1.put("uid", CommonAppConfig.getInstance().getUid());
msg1.put("pkuid", CommonAppConfig.getInstance().getUid());
msg1.put("pkhead", CommonAppConfig.getInstance().getUserBean().getAvatarThumb());
msg1.put("pkname", CommonAppConfig.getInstance().getUserBean().getUserNiceName());
EventBus.getDefault().post(new AnchorInfoEvent(false, bean.getId(), bean.getUserNiceName(), bean.getAvatar()));
if (bean != null && bean.isRandomPk()) {
msg1.put("random_pk", bean.isRandomPk() ? 1 : 0);
msg1.put("is_ladders", bean.getRankPkImgUrl());
}*/
/*---------------------------------------------------------------- */
final SocketSendBean msg1 = new SocketSendBean()
.param("_method_", SOCKET_LINK_MIC_PK)
@ -898,116 +785,6 @@ public class LiveRyLinkMicPkPresenter implements View.OnClickListener {
onLinkMicPkStart(mApplyUid, 2);
}
});
/*IMRTCManager.getInstance().responseJoinOtherRoom(mApplyUid, true, msg1.toString(), new IRCRTCResultCallback() {
@Override
public void onSuccess() {
RCRTCEngine.getInstance().joinOtherRoom(mApplyUid, new IRCRTCResultDataCallback<RCRTCOtherRoom>() {
@Override
public void onSuccess(RCRTCOtherRoom rcrtcOtherRoom) {
rcrtcOtherRoom.registerOtherRoomEventsListener(otherRoomEventsListener);
ToastUtil.show(WordUtil.isNewZh() ? "接受成功" : "Success");
new Handler(Looper.getMainLooper()).post(new Runnable() {
public void run() {
for (int i = 0; i < rcrtcOtherRoom.getRemoteUsers().size(); i++) {
//遍历远端用户发布的资源列表
for (RCRTCInputStream stream : rcrtcOtherRoom.getRemoteUsers().get(i).getStreams()) {
if (stream.getMediaType() == RCRTCMediaType.VIDEO) {
//如果远端用户发布的是视频流创建显示视图RCRTCVideoView并添加到布局中显示
RCRTCVideoView remoteView = new RCRTCVideoView(contexts);
((RCRTCVideoInputStream) stream).setVideoView(remoteView);
//todo 本demo只演示添加1个远端用户的视图
livePushRyViewHolder.mPreView1.removeAllViews();
remoteView.setScalingType(SCALE_ASPECT_FILL);
livePushRyViewHolder.mPreView1.addView(remoteView);
}
//如果要订阅所有远端用户的流保存所有流信息方便后面统一订阅
inputStreamList.add(stream);
}
}
Log.e("ry", "asa" + inputStreamList.size());
//开始订阅资源
rtcRoom.getLocalUser().subscribeStreams(inputStreamList, new IRCRTCResultCallback() {
@Override
public void onSuccess() {
Log.i("ry", "订阅资源成功");
List<RCRTCStream> streams = new ArrayList<>();
streams.add(RCRTCEngine.getInstance().getDefaultVideoStream());
RCRTCMixConfig config = new RCRTCMixConfig();
RCRTCMixConfig.MediaConfig mediaConfig = new RCRTCMixConfig.MediaConfig();
config.setMediaConfig(mediaConfig);
//视频输出配置
RCRTCMixConfig.MediaConfig.VideoConfig videoConfig = new RCRTCMixConfig.MediaConfig.VideoConfig();
mediaConfig.setVideoConfig(videoConfig);
//大流视频的输出参数
RCRTCMixConfig.MediaConfig.VideoConfig.VideoLayout normal = new RCRTCMixConfig.MediaConfig.VideoConfig.VideoLayout();
videoConfig.setVideoLayout(normal);
//推荐宽帧率参数值可以通过默认视频流的配置获取也可以根据实际需求来自定义设置
//如不设置宽高值则服务端将使用默认宽高 360 * 640
//:发布的视频分辨率为720 * 1280如果不设置则观众端看到的视频分辨率为 360 * 640,
//所以如果想让观众端看到的视频分辨率和发布视频分辨率一致则应从发布的视频流中获取分辨率配置并设置到 mediaConfig
RCRTCVideoStreamConfig defaultVideoConfig = RCRTCEngine.getInstance().getDefaultVideoStream().getVideoConfig();
int fps = defaultVideoConfig.getVideoFps().getFps();
int width = 960;
int height = 720;
normal.setWidth(width); //视频宽
normal.setHeight(height); //视频高
normal.setFps(fps); //视频帧率
//1. 设置自适应合流布局模式
config.setLayoutMode(RCRTCMixConfig.MixLayoutMode.ADAPTIVE);
//2. 合流画布设置
rcrtcLiveInfo.setMixConfig(config, new IRCRTCResultCallback() {
@Override
public void onSuccess() {
Log.e("ry", "混成功14");
}
@Override
public void onFailed(RTCErrorCode errorCode) {
Log.e("ry", "混失败" + errorCode);
}
});
}
@Override
public void onFailed(RTCErrorCode rtcErrorCode) {
Log.i("ry", "订阅资源失败: " + rtcErrorCode);
ToastUtil.show(mContext.getString(R.string.live_pk_link_error));
}
});
}
});
}
@Override
public void onFailed(RTCErrorCode rtcErrorCode) {
Log.e("ry", mApplyUid + "加入其他房间失败 " + rtcErrorCode);
Log.i("ry", mApplyUid + "加入其他房间失败 " + rtcErrorCode);
}
});
new Handler(Looper.getMainLooper()).post(new Runnable() {
public void run() {
Bus.get().post(new LiveAudienceEvent()
.setType(LiveAudienceEvent.LiveAudienceType.UN_LEAVELIVE));
LiveRyAnchorActivity.isDRPK = 1;
LivePushRyViewHolder.btn_close.setVisibility(View.VISIBLE);
SocketRyLinkMicPkUtil.linkMicPkAccept(mSocketRyClient, mApplyUid, mApplyUrl, mApplyNmae);
EventBus.getDefault().post(new LiveAudienceEvent().setType(LiveAudienceEvent.LiveAudienceType.UP_PK_TWO).setObject(mApplyUid));
onLinkMicPkStart(mApplyUid, 2);
}
});
}
@Override
public void onFailed(RTCErrorCode errorCode) {
ToastUtil.show("接受失败");
}
});*/
}
//与用户连麦
@ -2321,7 +2098,6 @@ public class LiveRyLinkMicPkPresenter implements View.OnClickListener {
if (mIsAnchor) {
((LiveRyAnchorActivity) mContext).setPkBtnVisible(false);
mPkTimeCount = PK_TIME_MAX;
} else {
// mPkTimeCount=mPkTimeFromServer;
mPkTimeCount = PK_TIME_MAX;

View File

@ -1260,6 +1260,7 @@ public class SocketRyClient {
mListener.onLinkMicPkApply(u, map.getString("stream"), 1);
break;
case 2://收到对方主播PK回调
L.eSw("收到对方主播PK回调");
RandomPkManager.getInstance().setPkStatus(RandomPkManager.PK_STATUS_START);
mListener.onLinkMicToPk(map.getString("uid"), map.getString("pkhead"), map.getString("pkname"));
mListener.onLinkMicPkStart(map.getString("uid"), map.getString("pkhead"), map.getString("pkname"), map.getString("is_ladders"));// mListener.onLinkMicPkStart(map.getString("uid"));
@ -1274,6 +1275,7 @@ public class SocketRyClient {
mListener.onLinkMicPkRefuse();
break;
case 4://所有人收到PK开始址的回调
L.eSw("所有人收到PK开始址的回调");
// RandomPkManager.getInstance().setPkStatus(RandomPkManager.PK_STATUS_START);
EventBus.getDefault().post(new LiveAudienceEvent().setType(LiveAudienceEvent.LiveAudienceType.PK_TWO_START).setObject(map.getString("pkuid")));
mListener.onLinkMicPkStart(map.getString("pkuid"), map.getString("pkhead"), map.getString("pkname"), map.getString("is_ladders"));

View File

@ -44,7 +44,6 @@ import com.yunbao.common.interfaces.CommonCallback;
import com.yunbao.common.interfaces.ImageResultCallback;
import com.yunbao.common.interfaces.OnItemClickListener;
import com.yunbao.common.manager.IMLoginManager;
import com.yunbao.common.manager.SWManager;
import com.yunbao.common.utils.Bus;
import com.yunbao.common.utils.DialogUitl;
import com.yunbao.common.utils.L;
@ -66,6 +65,7 @@ import com.yunbao.live.activity.LiveRyAnchorActivity;
import com.yunbao.live.dialog.LiveAnchorEditCallMeDialog;
import com.yunbao.live.dialog.LiveAnchorSayPopDialog;
import com.yunbao.live.dialog.LiveFaceUnityDialogFragment;
import com.yunbao.live.dialog.LiveFaceUnityDialogNewFragment;
import com.yunbao.live.dialog.LiveNewRoomClassDialogFragment;
import com.yunbao.live.dialog.LiveNewRoomTypeDialogFragment;
import com.yunbao.live.dialog.LiveTimeDialogFragment;
@ -77,10 +77,7 @@ import org.greenrobot.eventbus.ThreadMode;
import java.io.File;
import java.util.Locale;
import cn.rongcloud.rtc.api.RCRTCEngine;
import cn.rongcloud.rtc.api.stream.RCRTCCameraOutputStream;
import cn.rongcloud.rtc.api.stream.RCRTCVideoStreamConfig;
import cn.rongcloud.rtc.base.RCRTCParamsType;
import io.agora.beautyapi.faceunity.agora.SWManager;
public class LiveNewReadyRyViewHolder extends AbsViewHolder implements View.OnClickListener {
@ -328,36 +325,6 @@ public class LiveNewReadyRyViewHolder extends AbsViewHolder implements View.OnCl
liveOpenCustomPopup.setSelectClarity(selectClarity);
}
SWManager.get().setDimensions(selectClarity);
/*//設置開播分辨率
RCRTCParamsType.RCRTCVideoResolution rcrtcVideoResolution = RCRTCParamsType.RCRTCVideoResolution.RESOLUTION_480_848;
int minRate = 200;
int maxRate = 900;
switch (selectClarity) {
case 0:
rcrtcVideoResolution = RCRTCParamsType.RCRTCVideoResolution.RESOLUTION_480_848;
minRate = 200;
maxRate = 900;
break;
case 1:
rcrtcVideoResolution = RCRTCParamsType.RCRTCVideoResolution.RESOLUTION_720_1280;
minRate = 250;
maxRate = 2200;
break;
case 2:
rcrtcVideoResolution = RCRTCParamsType.RCRTCVideoResolution.RESOLUTION_1080_1920;
minRate = 400;
maxRate = 4000;
break;
}
RCRTCVideoStreamConfig config =
RCRTCVideoStreamConfig.Builder.create()
.setMinRate(minRate)
.setMaxRate(maxRate)
.setVideoFps(RCRTCParamsType.RCRTCVideoFps.Fps_15)
.setVideoResolution(rcrtcVideoResolution)
.build();
RCRTCEngine.getInstance().getDefaultVideoStream().setVideoConfig(config);*/
Log.e("切换分辨率", "时间戳" + System.currentTimeMillis());
//重新发布一下流
Bus.get().post(new LivePushRyEvent());
@ -569,8 +536,7 @@ public class LiveNewReadyRyViewHolder extends AbsViewHolder implements View.OnCl
} else if (i == R.id.btn_locaiton) {
switchLocation();
} else if (i == R.id.btn_horizontally) {
RCRTCCameraOutputStream cameraStream = RCRTCEngine.getInstance().getDefaultVideoStream();
cameraStream.setPreviewMirror(!cameraStream.isPreviewMirror());
SWManager.get().setMirrorMode();
} else if (i == R.id.btn_robot) {
new XPopup.Builder(mContext)
.asCustom(new LiveRobotSettingCustomPopup(mContext))
@ -614,6 +580,20 @@ public class LiveNewReadyRyViewHolder extends AbsViewHolder implements View.OnCl
}
}
public void setFaceUnityNew(boolean init){
LiveFaceUnityDialogNewFragment fragment = new LiveFaceUnityDialogNewFragment(mContext);
fragment.setManager(manager);
fragment.setDismissShowUi(mRootView);
if (mContext instanceof LiveRyAnchorActivity) {
fragment.show(((LiveRyAnchorActivity) mContext).getSupportFragmentManager(), "FaceUnity");
mRootView.setVisibility(View.INVISIBLE);
if (init) {
fragment.dismiss();
}
}
}
/**
* 打开心愿单窗口
*/

View File

@ -38,7 +38,6 @@ import com.yunbao.common.http.HttpCallback;
import com.yunbao.common.http.HttpClient;
import com.yunbao.common.interfaces.OnItemClickListener;
import com.yunbao.common.manager.IMLoginManager;
import com.yunbao.common.manager.SWAuManager;
import com.yunbao.common.utils.Bus;
import com.yunbao.common.utils.DialogUitl;
import com.yunbao.common.utils.DpUtil;
@ -53,7 +52,6 @@ import com.yunbao.live.R;
import com.yunbao.live.activity.LiveActivity;
import com.yunbao.live.activity.LiveAudienceActivity;
import com.yunbao.live.event.LiveAudienceEvent;
import com.yunbao.live.utils.LiveExoPlayerManager;
import org.greenrobot.eventbus.EventBus;
import org.greenrobot.eventbus.Subscribe;
@ -89,6 +87,7 @@ import cn.rongcloud.rtc.base.RCRTCRoomType;
import cn.rongcloud.rtc.base.RCRTCStreamType;
import cn.rongcloud.rtc.base.RTCErrorCode;
import cn.rongcloud.rtc.core.RendererCommon;
import io.agora.beautyapi.faceunity.agora.SWAuManager;
public class LivePlayRyViewHolder extends LiveRoomPlayViewHolder {
@ -121,7 +120,7 @@ public class LivePlayRyViewHolder extends LiveRoomPlayViewHolder {
static int vHeight;//视频高
private TextView debugView;
private LiveExoPlayerManager mPlayer;
//private LiveExoPlayerManager mPlayer;
private boolean isPk = false;
private boolean userJoinLinkMic = false;//用户是否已加入房间
@ -174,10 +173,6 @@ public class LivePlayRyViewHolder extends LiveRoomPlayViewHolder {
params.height = vHeight;
mPkContainer.requestLayout();
mPlayer = new LiveExoPlayerManager(mContext);
mPlayer.setMainView(mVideoView);
mPlayer.setListener(new ExoPlayerListener());
debugView = new TextView(mContext);
debugView.setBackgroundColor(Color.WHITE);
}
@ -185,7 +180,7 @@ public class LivePlayRyViewHolder extends LiveRoomPlayViewHolder {
public void initSwEngine(String liveUid) {
this.liveUid = liveUid;
swAuManager = SWAuManager.get();
swAuManager.setAudienceContainer(playFrameLayout);
swAuManager.setAudienceContainer(ry_view);
swAuManager.initRtcEngine((Activity) mContext);
swAuManager.setupRemoteVideo(Integer.parseInt(liveUid));
swAuManager.joinRoom(CommonAppConfig.getInstance().getUid(), CommonAppConfig.SWToken, SWAuManager.getChannelName(liveUid));
@ -229,10 +224,6 @@ public class LivePlayRyViewHolder extends LiveRoomPlayViewHolder {
Log.i(TAG, "setLiveBeanLandscape: " + landscape + " isPk: " + isPk);
this.landscape = landscape;
this.videoLandscape = landscape;
if (mPlayer != null) {
mPlayer.setViewResizeMode(landscape == VIDEO_VERTICAL);
}
if (landscape == 2) {
Log.i(TAG, "还原916");
RelativeLayout.LayoutParams params = (RelativeLayout.LayoutParams) mVideoView.getLayoutParams();
@ -297,13 +288,7 @@ public class LivePlayRyViewHolder extends LiveRoomPlayViewHolder {
*/
@Override
public void resumePlay() {
if (!mPlayer.isPlaying()) {
new Handler(Looper.getMainLooper())
.postDelayed(() -> {
mPlayer.replay();
// ToastUtil.show("强制播放" + val);
}, 100);
}
}
/**
@ -317,30 +302,6 @@ public class LivePlayRyViewHolder extends LiveRoomPlayViewHolder {
PLAY_MODEL = playModel;
waitNextUrl = null;
Log.i(TAG, "play" + " url:" + url + " playModel: " + playModel + " landscape: " + landscape + " videoLandscape" + videoLandscape);
if (playModel != PLAY_MODEL_DEF && !url.contains(videoFps[0] + ".flv")) {
mPlayer.setViewResizeMode(landscape == VIDEO_VERTICAL);
if (landscape == VIDEO_VERTICAL && !isPk) {
url = url.replace(".flv", videoRatioVertical[playModel] + videoFps[0] + ".flv");
} else if (landscape == VIDEO_HORIZONTAL || isPk) {
url = url.replace(".flv", videoRatioHorizontal[playModel] + videoFps[0] + ".flv");
}
} else if (!url.contains(videoFps[0] + ".flv")) {
mPlayer.setViewResizeMode(false);
}
Log.e("purl121", url);
if (TextUtils.isEmpty(url) || mVideoView == null) {
return;
}
if (TextUtils.isEmpty(url) || mVideoView == null) {
return;
}
if (mPlayer.isPlaying()) {
mPlayer.stop();
mPlayer.clearUrl();
}
mPlayer.startUrl(url);
purl = url;
onPrepared();
}
@ -351,14 +312,12 @@ public class LivePlayRyViewHolder extends LiveRoomPlayViewHolder {
PLAY_MODEL = playModel;
Log.i(TAG, "switchStream: " + " url:" + url + " playModel: " + playModel + " landscape: " + landscape + " videoLandscape = " + videoLandscape + " ispk = " + isPk + " bean = " + roomModel.getEnterRoomInfo().getIsconnection());
if (playModel != PLAY_MODEL_DEF && !url.contains(videoFps[0] + ".flv")) {
mPlayer.setViewResizeMode(landscape == VIDEO_VERTICAL);
if (landscape == VIDEO_VERTICAL && !isPk) {
url = url.replace(".flv", videoRatioVertical[playModel] + videoFps[0] + ".flv");
} else if (landscape == VIDEO_HORIZONTAL || isPk) {
url = url.replace(".flv", videoRatioHorizontal[playModel] + videoFps[0] + ".flv");
}
} else if (!url.contains(videoFps[0] + ".flv")) {
mPlayer.setViewResizeMode(false);
}
Log.e("purl121", url);
@ -370,7 +329,6 @@ public class LivePlayRyViewHolder extends LiveRoomPlayViewHolder {
if (TextUtils.isEmpty(url) || mVideoView == null) {
return;
}
mPlayer.switchUrl(url);
purl = url;
}
@ -383,7 +341,6 @@ public class LivePlayRyViewHolder extends LiveRoomPlayViewHolder {
Log.i(TAG, "switchStreamPk: isPk1" + isPk + " tmp = " + !tmpPk + " isPk2 = " + this.isPk);
if (this.isPk && tmpPk) return;
if (isPk && !tmpPk || this.isPk) {
mPlayer.setViewResizeMode(false);
String url;
if (PLAY_MODEL != -1) {
url = srcUrl.replace(".flv", videoRatioHorizontal[PLAY_MODEL] + videoFps[0] + ".flv");
@ -393,7 +350,6 @@ public class LivePlayRyViewHolder extends LiveRoomPlayViewHolder {
if (!tmpPk) {
waitNextUrl = url;
}
mPlayer.switchUrl(srcUrl);
tmpPk = true;
} else if (!isPk) {
tmpPk = false;
@ -404,14 +360,12 @@ public class LivePlayRyViewHolder extends LiveRoomPlayViewHolder {
} else {
waitNextUrl = null;
}
mPlayer.switchUrl(srcUrl);
}
}
@Override
public void clearFrame() {
super.clearFrame();
mPlayer.clearFrame();
RelativeLayout.LayoutParams params = (RelativeLayout.LayoutParams) mVideoView.getLayoutParams();
params.height = ViewGroup.LayoutParams.WRAP_CONTENT;
params.topMargin = mContext.getResources().getDimensionPixelOffset(R.dimen.live_top);
@ -436,10 +390,6 @@ public class LivePlayRyViewHolder extends LiveRoomPlayViewHolder {
public void release() {
mEnd = true;
mStarted = false;
if (mPlayer != null) {
mPlayer.stop();
mPlayer.release();
}
Bus.getOff(this);
EventBus.getDefault().unregister(this);
L.e(TAG, "release------->");
@ -454,9 +404,6 @@ public class LivePlayRyViewHolder extends LiveRoomPlayViewHolder {
mCover.setVisibility(View.VISIBLE);
}
}
if (mPlayer != null) {
mPlayer.stop();
}
stopPlay2();
}
@ -481,10 +428,6 @@ public class LivePlayRyViewHolder extends LiveRoomPlayViewHolder {
params1.addRule(RelativeLayout.ALIGN_TOP);
ry_view.requestLayout();
isPk = true;
if (mPlayer.getUrl().contains("848_24.flv") || mPlayer.getUrl().contains("1280_24.flv")) {
tmpPk = false;
switchStreamPk(true);
}
}
/**
@ -501,7 +444,6 @@ public class LivePlayRyViewHolder extends LiveRoomPlayViewHolder {
} else {
height = ViewGroup.LayoutParams.WRAP_CONTENT;
}
mPlayer.setViewResizeMode(landscape == VIDEO_VERTICAL);
if (landscape == 2) {
Log.i(TAG, "onPrepared:还原916 land=" + videoLandscape);
RelativeLayout.LayoutParams params = (RelativeLayout.LayoutParams) mVideoView.getLayoutParams();
@ -804,12 +746,6 @@ public class LivePlayRyViewHolder extends LiveRoomPlayViewHolder {
} else {
ToastUtil.show("You have successfully exited the voice connection");
}
if (mPlayer.getNowPlayer() != null) {
mPlayer.play();
Log.e("ry", mPlayer.isPlaying() + "purl" + purl);
if (!mPlayer.isPlaying()) {
mPlayer.switchUrl(purl);
}
ry_view.removeAllViews();
ry_view.getLayoutParams().height = ViewGroup.LayoutParams.WRAP_CONTENT;
onPrepared();
@ -817,7 +753,6 @@ public class LivePlayRyViewHolder extends LiveRoomPlayViewHolder {
MicStatusManager.getInstance().clear();
resumePlay();
}
if (onMicCallback != null) {
onMicCallback.onMikUpdate();
@ -1021,7 +956,7 @@ public class LivePlayRyViewHolder extends LiveRoomPlayViewHolder {
Log.i(TAG, "资源流 type: " + stream.getMediaType());
if (stream.getMediaType() == RCRTCMediaType.VIDEO) {
//暂停播放
mPlayer.stop();
//如果远端用户发布的是视频流创建显示视图RCRTCVideoView并添加到布局中显示
//如果远端用户发布的是视频流创建显示视图RCRTCVideoView并添加到布局中显示
@ -1314,7 +1249,6 @@ public class LivePlayRyViewHolder extends LiveRoomPlayViewHolder {
} else if (videoLandscape == VIDEO_HORIZONTAL) {
url = url.replace(".flv", videoRatioHorizontal[PLAY_MODEL_480] + videoFps[0] + ".flv");
}
mPlayer.switchUrl(url);
}
private class ExoPlayerListener implements Player.Listener {
@ -1364,7 +1298,6 @@ public class LivePlayRyViewHolder extends LiveRoomPlayViewHolder {
loadingListener.onPlayer();
}
if (waitNextUrl != null) {
mPlayer.switchUrl(waitNextUrl);
waitNextUrl = null;
}
}

View File

@ -46,8 +46,6 @@ import com.yunbao.common.http.live.LiveNetManager;
import com.yunbao.common.manager.IMLoginManager;
import com.yunbao.common.manager.IMRTCManager;
import com.yunbao.common.manager.RandomPkManager;
import com.yunbao.common.manager.SWAuManager;
import com.yunbao.common.manager.SWManager;
import com.yunbao.common.utils.Bus;
import com.yunbao.common.utils.DialogUitl;
import com.yunbao.common.utils.DpUtil;
@ -83,8 +81,8 @@ import cn.rongcloud.rtc.api.stream.RCRTCInputStream;
import cn.rongcloud.rtc.api.stream.RCRTCLiveInfo;
import cn.rongcloud.rtc.base.RCRTCParamsType;
import cn.rongcloud.rtc.base.RTCErrorCode;
import cn.rongcloud.rtc.core.CameraVideoCapturer;
import io.agora.rtc2.RtcEngine;
import io.agora.beautyapi.faceunity.agora.SWAuManager;
import io.agora.beautyapi.faceunity.agora.SWManager;
import io.rong.imlib.IRongCallback;
import io.rong.imlib.RongIMClient;
import io.rong.imlib.model.Conversation;
@ -96,7 +94,6 @@ import io.rong.message.TextMessage;
*/
public class LivePushRyViewHolder extends AbsRyLivePushViewHolder implements ITXLivePushListener {
private RtcEngine mRtcEngine;
private int mMeiBaiVal;//基础美颜 美白
private int mMoPiVal;//基础美颜 磨皮
private int mHongRunVal;//基础美颜 红润
@ -521,6 +518,7 @@ public class LivePushRyViewHolder extends AbsRyLivePushViewHolder implements ITX
// TODO: 2024/4/13 退出对方主播直播间
SWManager.get().exitChannelToUid(Integer.parseInt(CommonAppConfig.getInstance().getUid()),pkUid1);
SWManager.get().updateMyChannelView((FrameLayout) mBigContainer);
btn_close.setVisibility(View.GONE);//隐藏连麦按钮
EventBus.getDefault().post(new AnchorInfoEvent(true, "", "", ""));
closeButtonGone();
@ -590,164 +588,6 @@ public class LivePushRyViewHolder extends AbsRyLivePushViewHolder implements ITX
}
});
initRtcEngine();
/* // 构建 RoomConfig指定房间类型和主播身份
RCRTCRoomConfig roomConfig = RCRTCRoomConfig.Builder.create()
// 根据实际场景选择音视频直播LIVE_AUDIO_VIDEO 或音频直播LIVE_AUDIO
.setRoomType(RCRTCRoomType.LIVE_AUDIO_VIDEO)
.setLiveRole(BROADCASTER)
.build();
//调用 RCRTCEngine 下的 joinRoom 方法创建并加入一个直播房间
final CommonAppConfig appConfig = CommonAppConfig.getInstance();
RCRTCEngine.getInstance().joinRoom(appConfig.getUid(), roomConfig, new IRCRTCResultDataCallback<RCRTCRoom>() {
@Override
public void onFailed(RTCErrorCode errorCode) {
ToastUtil.show("开播失败" + errorCode);
}
@Override
public void onSuccess(final RCRTCRoom room) {
// 保存房间对象
rtcRoom = room;
IMRTCManager.getInstance().setRtcRoom(room);
new Handler(Looper.getMainLooper()).post(new Runnable() {
public void run() {
RCRTCConfig config = RCRTCConfig.Builder.create()
//是否硬解码
.enableHardwareDecoder(true)
//是否硬编码
.enableHardwareEncoder(true)
.build();
RCRTCEngine.getInstance().init(contexts, config);
RCRTCEngine.getInstance().getDefaultAudioStream().setAudioQuality(RCRTCParamsType.AudioQuality.MUSIC_HIGH, RCRTCParamsType.AudioScenario.MUSIC_CHATROOM);
//設置開播分辨率
RCRTCParamsType.RCRTCVideoResolution rcrtcVideoResolution = RCRTCParamsType.RCRTCVideoResolution.RESOLUTION_480_848;
int minRate = 200;
int maxRate = 900;
switch (IMLoginManager.get(mContext).getSelectClarity()) {
case 0:
rcrtcVideoResolution = RCRTCParamsType.RCRTCVideoResolution.RESOLUTION_480_848;
minRate = 200;
maxRate = 900;
break;
case 1:
rcrtcVideoResolution = RCRTCParamsType.RCRTCVideoResolution.RESOLUTION_720_1280;
minRate = 250;
maxRate = 2200;
break;
case 2:
rcrtcVideoResolution = RCRTCParamsType.RCRTCVideoResolution.RESOLUTION_1080_1920;
minRate = 400;
maxRate = 4000;
break;
}
RCRTCVideoStreamConfig videoConfigBuilder = RCRTCVideoStreamConfig.Builder.create()
//设置分辨率
.setVideoResolution(rcrtcVideoResolution)
//设置帧率
.setVideoFps(RCRTCParamsType.RCRTCVideoFps.Fps_24)
//设置最小码率480P下推荐200
.setMinRate(minRate)
//设置最大码率480P下推荐900
.setMaxRate(maxRate)
.build();
// 创建本地视频显示视图
RCRTCEngine.getInstance().getDefaultVideoStream().setVideoConfig(videoConfigBuilder);
RCRTCEngine.getInstance().getDefaultVideoStream().enableTinyStream(false);
RCRTCVideoView rongRTCVideoView = new RCRTCVideoView(contexts);
rongRTCVideoView.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FILL);
RCRTCEngine.getInstance().getDefaultVideoStream().setVideoView(rongRTCVideoView);
RCRTCEngine.getInstance().getDefaultVideoStream().startCamera(new IRCRTCResultDataCallback<Boolean>() {
@Override
public void onSuccess(Boolean data) {
//设置摄像头最大缩放比例
boolean zoom = RCRTCEngine.getInstance().getDefaultVideoStream().setCameraZoomFactor(1.0f);
// ToastUtil.show("设置比例="+zoom);
Log.i("摄像头", "onSuccess: 打开摄像头");
isNeedOpenCamera = false;
}
@Override
public void onFailed(RTCErrorCode errorCode) {
Log.i("摄像头", "onFailed: 打开摄像头失败 " + errorCode);
}
});
RCRTCEngine.getInstance().registerEventListener(new IRCRTCEngineEventListener() {
@Override
public void onKicked(String roomId, RCRTCParamsType.RCRTCKickedReason kickedReason) {
}
@Override
public void onError(RTCErrorCode errorCode) {
super.onError(errorCode);
Log.i("摄像头", "onError: 错误码" + errorCode);
}
@Override
public void onLocalVideoEventNotify(RCRTCVideoEventCode event) {
super.onLocalVideoEventNotify(event);
Log.i("摄像头", "onLocalVideoEventNotify: 本地视频事件" + event.code);
if (event.code == 3) {
isNeedOpenCamera = true;
}
}
});
//设置摄像头最大缩放比例
// RCRTCEngine.getInstance().getDefaultVideoStream().setCameraZoomFactor(RCRTCEngine.getInstance().getDefaultVideoStream().getCameraMaxZoomFactor());
mPreView.addView(rongRTCVideoView);
tencentTRTCBeautyManager = new TencentTRTCBeautyManager(mContext);
//加入房间成功后可以通过 RCRTCLocalUser 对象发布本地默认音视频流包括麦克风采集的音频和摄像头采集的视频
RCRTCEngine.getInstance().getDefaultVideoStream().setEncoderMirror(true);
if (rtcRoom == null || rtcRoom.getLocalUser() == null) {
if (room == null || room.getLocalUser() == null) {
ToastUtil.show("开播失败 请稍后再试,错误代码:room is null");
((LiveRyAnchorActivity) mContext).endLives();
return;
}
rtcRoom = room;
IMRTCManager.getInstance().setRtcRoom(room);
}
rtcRoom.getLocalUser().publishDefaultLiveStreams(new IRCRTCResultDataCallback<RCRTCLiveInfo>() {
@Override
public void onSuccess(RCRTCLiveInfo rcrtcLiveInfo1) {
rcrtcLiveInfo = rcrtcLiveInfo1;
room.registerRoomListener(roomEventsListener);
//美颜
// new Handler(Looper.getMainLooper()).post(new Runnable() {
// public void run() {
// //旧美颜不需要了
//
//
// }
// });
}
@Override
public void onFailed(RTCErrorCode rtcErrorCode) {
Log.e("ry", "rtcErrorCode" + rtcErrorCode);
}
});
}
});
}
});*/
}
/**
@ -784,6 +624,7 @@ public class LivePushRyViewHolder extends AbsRyLivePushViewHolder implements ITX
public void onLinkMicAnchorClose(){
swManager.updateMyChannelView((FrameLayout) mBigContainer);
LivePushRyViewHolder.btn_close.setVisibility(View.GONE);
}
@Override
@ -825,17 +666,7 @@ public class LivePushRyViewHolder extends AbsRyLivePushViewHolder implements ITX
} else {
mCameraFront = true;
}
RCRTCEngine.getInstance().getDefaultVideoStream().switchCamera(new CameraVideoCapturer.CameraSwitchHandler() {
@Override
public void onCameraSwitchDone(boolean isFrontCamera) {
}
@Override
public void onCameraSwitchError(String errorDescription) {
}
});
swManager.switchCamera();
}
/**
@ -982,7 +813,6 @@ public class LivePushRyViewHolder extends AbsRyLivePushViewHolder implements ITX
public void startBgm(String path) {
mBgmPath = path;
}
@Override
@ -1107,6 +937,12 @@ public class LivePushRyViewHolder extends AbsRyLivePushViewHolder implements ITX
});
}
public void setEnableBeauty(boolean flag){
if(swManager!=null){
swManager.setEnableBeauty(flag);
}
}
@Subscribe(threadMode = ThreadMode.MAIN)
public void updateSub(LiveAudienceEvent event) {
if (event.getType() == LIVE_PK_END) {
@ -1116,4 +952,12 @@ public class LivePushRyViewHolder extends AbsRyLivePushViewHolder implements ITX
}
Log.i("PK----->", "updateSub: " + isPk + "|" + event.getType());
}
@Override
public void onDestroy() {
super.onDestroy();
if(swManager!=null){
swManager.exitChannelAll();
}
}
}

View File

@ -72,7 +72,7 @@ dependencies {
//
api project(':video')
api project(':common')
api project(':FaceUnity')//
api project(':lib_faceunity')//
api project(':Share')//
implementation 'androidx.appcompat:appcompat:1.3.0'
implementation 'com.google.android.material:material:1.4.0'

View File

@ -85,7 +85,7 @@ android {
doLast {
delete(fileTree(dir: outputDir, includes: [
'model/ai_bgseg_green.bundle',
'model/ai_face_processor.bundle',
//'model/ai_face_processor.bundle',
//'model/ai_face_processor_lite.bundle',
'model/ai_hairseg.bundle',
'model/ai_hand_processor.bundle',

View File

@ -7,3 +7,4 @@ include ':pluginsForAnchor'
//include ':lib_huawei'
include ':lib_google'
include ':IAP6Helper'
include ':lib_faceunity'