add[声望升级-接入美颜]

This commit is contained in:
Martin
2024-04-18 09:47:38 +08:00
parent 0db8eefbe6
commit 583740e866
41 changed files with 3303 additions and 1920 deletions

4
lib_faceunity/.gitignore vendored Normal file
View File

@@ -0,0 +1,4 @@
/build
/src/main/assets/makeup
/src/main/assets/sticker
authpack.java

View File

@@ -0,0 +1,79 @@
apply plugin: 'com.android.library'
apply plugin: 'maven-publish'
apply plugin: 'kotlin-android'
android {
compileSdkVersion 31
buildToolsVersion "30.0.3"
defaultConfig {
minSdkVersion 21
targetSdkVersion 31
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
consumerProguardFiles "consumer-rules.pro"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
kotlinOptions {
jvmTarget = '1.8'
}
}
dependencies {
testImplementation 'junit:junit:4.13.2'
androidTestImplementation 'androidx.test.ext:junit:1.1.3'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.4.0'
if (new File("$AGORA_RTC_SDK").exists()) {
api fileTree(dir: "${AGORA_RTC_SDK}", include: ['*.jar', '*.aar'])
} else {
api "$AGORA_RTC_SDK"
}
api project(path: ':FaceUnity')
}
// Because the components are created only during the afterEvaluate phase, you must
// configure your publications using the afterEvaluate() lifecycle method.
afterEvaluate {
publishing {
publications {
// Creates a Maven publication called "release".
release(MavenPublication) {
// Applies the component for the release build variant.
from components.release
// You can then customize attributes of the publication as shown below.
groupId = 'com.github.AgoraIO-Community.BeautyAPI'
artifactId = 'FaceUnity'
version = "$LIB_VERSION"
}
// Creates a Maven publication called “debug”.
debug(MavenPublication) {
// Applies the component for the debug build variant.
from components.debug
groupId = 'com.github.AgoraIO-Community.BeautyAPI'
artifactId = 'FaceUnity'
version = "$LIB_VERSION"
}
}
if(IS_PUBLISH_LOCAL){
repositories {
maven {
url = "file://${rootProject.projectDir.path}/maven"
println("maven publish to ${url}")
}
}
}
}
}

View File

21
lib_faceunity/proguard-rules.pro vendored Normal file
View File

@@ -0,0 +1,21 @@
# Add project specific ProGuard rules here.
# You can control the set of applied configuration files using the
# proguardFiles setting in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile

View File

@@ -0,0 +1,5 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:tools="http://schemas.android.com/tools"
package="io.agora.beautyapi.faceunity">
<uses-sdk tools:overrideLibrary="io.agora.beautyapi.faceunity" />
</manifest>

View File

@@ -0,0 +1,179 @@
/*
* MIT License
*
* Copyright (c) 2023 Agora Community
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package io.agora.beautyapi.faceunity
import android.content.Context
import android.view.View
import com.faceunity.core.faceunity.FURenderKit
import io.agora.base.VideoFrame
import io.agora.rtc2.Constants
import io.agora.rtc2.RtcEngine
const val VERSION = "1.0.6"
enum class CaptureMode{
Agora, // 使用声网内部的祼数据接口进行处理
Custom // 自定义模式需要自己调用onFrame接口将原始视频帧传给BeautyAPI做处理
}
interface IEventCallback{
/**
* 统计数据回调,每处理完一帧后会回调一次
*
* @param stats 美颜统计数据
*/
fun onBeautyStats(stats: BeautyStats)
}
data class BeautyStats(
val minCostMs:Long, // 统计区间内的最小值
val maxCostMs: Long, // 统计区间内的最大值
val averageCostMs: Long // 统计区间内的平均值
)
enum class MirrorMode {
// 没有镜像正常画面的定义:前置拍到画面和手机看到画面是左右不一致的,后置拍到画面和手机看到画面是左右一致的
MIRROR_LOCAL_REMOTE, //本地远端都镜像,前置默认,本地和远端贴纸都正常
MIRROR_LOCAL_ONLY, // 仅本地镜像,远端不镜像,,远端贴纸正常,本地贴纸镜像。用于打电话场景,电商直播场景(保证电商直播后面的告示牌文字是正的);这种模式因为本地远端是反的,所以肯定有一边的文字贴纸方向会是反的
MIRROR_REMOTE_ONLY, // 仅远端镜像,本地不镜像,远端贴纸正常,本地贴纸镜像
MIRROR_NONE // 本地远端都不镜像,后置默认,本地和远端贴纸都正常
}
data class CameraConfig(
val frontMirror: MirrorMode = MirrorMode.MIRROR_LOCAL_REMOTE, // 前置默认镜像:本地远端都镜像
val backMirror: MirrorMode = MirrorMode.MIRROR_NONE // 后置默认镜像:本地远端都不镜像
)
data class Config(
val context: Context, // Android Context 上下文
val rtcEngine: RtcEngine, // 声网Rtc引擎
val fuRenderKit: FURenderKit, // 美颜SDK处理句柄
val eventCallback: IEventCallback? = null, // 事件回调
val captureMode: CaptureMode = CaptureMode.Agora, // 处理模式
val statsDuration: Long = 1000, // 统计区间
val statsEnable: Boolean = false, // 是否开启统计
val cameraConfig: CameraConfig = CameraConfig() // 摄像头镜像配置
)
enum class ErrorCode(val value: Int) {
ERROR_OK(0), // 一切正常
ERROR_HAS_NOT_INITIALIZED(101), // 没有调用Initialize或调用失败情况下调用了其他API
ERROR_HAS_INITIALIZED(102), // 已经Initialize成功后再次调用报错
ERROR_HAS_RELEASED(103), // 已经调用release销毁后还调用其他API
ERROR_PROCESS_NOT_CUSTOM(104), // 非Custom处理模式下调用onFrame接口从外部传入视频帧
ERROR_VIEW_TYPE_ERROR(105), // 当调用setupLocalVideo时view类型错误时返回
ERROR_FRAME_SKIPPED(106), // 当处理帧忽略时在onFrame返回
}
enum class BeautyPreset {
CUSTOM, // 不使用推荐的美颜参数
DEFAULT // 默认的
}
fun createFaceUnityBeautyAPI(): FaceUnityBeautyAPI = FaceUnityBeautyAPIImpl()
interface FaceUnityBeautyAPI {
/**
* 初始化API
*
* @param config 配置参数
* @return 见ErrorCode
*/
fun initialize(config: Config): Int
/**
* 开启/关闭美颜
*
* @param enable true开启; false: 关闭
* @return 见ErrorCode
*/
fun enable(enable: Boolean): Int
/**
* 本地视图渲染,由内部来处理镜像问题
*
* @param view SurfaceView或TextureView
* @param renderMode 渲染缩放模式
* @return 见ErrorCode
*/
fun setupLocalVideo(view: View, renderMode: Int = Constants.RENDER_MODE_HIDDEN): Int
/**
* 当ProcessMode==Custom时由外部传入原始视频帧
*
* @param videoFrame 原始视频帧
* @return 见ErrorCode
*/
fun onFrame(videoFrame: VideoFrame): Int
/**
* 声网提供的美颜最佳默认参数
*
* @return 见ErrorCode
*/
fun setBeautyPreset(preset: BeautyPreset = BeautyPreset.DEFAULT): Int
/**
* 更新摄像头配置
*/
fun updateCameraConfig(config: CameraConfig): Int
/**
* 是否是前置摄像头
* PS只在美颜处理中才能知道准确的值否则会一直是true
*/
fun isFrontCamera(): Boolean
/**
* 获取镜像状态
*
* @return 镜像状态true: 镜像false非镜像
*/
fun getMirrorApplied(): Boolean
/**
* 在处理线程里执行操作
*
* @param run 操作run
*/
fun runOnProcessThread(run: ()->Unit)
/**
* 私参配置用于不对外api的调用多用于测试
*/
fun setParameters(key: String, value: String)
/**
* 释放资源,一旦释放后这个实例将无法使用
*
* @return 见ErrorCode
*/
fun release(): Int
}

View File

@@ -0,0 +1,818 @@
/*
* MIT License
*
* Copyright (c) 2023 Agora Community
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package io.agora.beautyapi.faceunity
import android.graphics.Matrix
import android.opengl.GLES11Ext
import android.opengl.GLES20
import android.view.SurfaceView
import android.view.TextureView
import android.view.View
import com.faceunity.core.entity.FUBundleData
import com.faceunity.core.entity.FURenderInputData
import com.faceunity.core.enumeration.CameraFacingEnum
import com.faceunity.core.enumeration.FUInputBufferEnum
import com.faceunity.core.enumeration.FUInputTextureEnum
import com.faceunity.core.enumeration.FUTransformMatrixEnum
import com.faceunity.core.faceunity.FUAIKit
import com.faceunity.core.faceunity.FURenderKit
import com.faceunity.core.model.facebeauty.FaceBeauty
import com.faceunity.core.model.facebeauty.FaceBeautyFilterEnum
import io.agora.base.TextureBufferHelper
import io.agora.base.VideoFrame
import io.agora.base.VideoFrame.I420Buffer
import io.agora.base.VideoFrame.SourceType
import io.agora.base.VideoFrame.TextureBuffer
import io.agora.base.internal.video.EglBase
import io.agora.base.internal.video.YuvHelper
import io.agora.beautyapi.faceunity.utils.FuDeviceUtils
import io.agora.beautyapi.faceunity.utils.LogUtils
import io.agora.beautyapi.faceunity.utils.StatsHelper
import io.agora.beautyapi.faceunity.utils.egl.GLFrameBuffer
import io.agora.beautyapi.faceunity.utils.egl.TextureProcessHelper
import io.agora.rtc2.Constants
import io.agora.rtc2.gl.EglBaseProvider
import io.agora.rtc2.video.IVideoFrameObserver
import io.agora.rtc2.video.VideoCanvas
import java.io.File
import java.nio.ByteBuffer
import java.util.Collections
import java.util.concurrent.Callable
class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver {
private val TAG = "FaceUnityBeautyAPIImpl"
private val reportId = "scenarioAPI"
private val reportCategory = "beauty_android_$VERSION"
private var beautyMode = 0 // 0: 自动根据buffer类型切换1固定使用OES纹理2固定使用i4203: 单纹理模式
private var enableTextureAsync = true // 是否开启纹理+异步缓存处理不支持在预览中实时切换。对于GPU性能好的手机可以减小美颜处理耗时对于中端机开启后效果也不明显。
private var textureBufferHelper: TextureBufferHelper? = null
private var wrapTextureBufferHelper: TextureBufferHelper? = null
private var byteBuffer: ByteBuffer? = null
private var byteArray: ByteArray? = null
private var config: Config? = null
private var enable: Boolean = false
private var enableChange: Boolean = false
private var isReleased: Boolean = false
private var captureMirror = false
private var renderMirror = false
private val identityMatrix = Matrix()
private var mTextureProcessHelper: TextureProcessHelper? = null
private var statsHelper: StatsHelper? = null
private var skipFrame = 0
private enum class ProcessSourceType{
UNKNOWN,
TEXTURE_OES_ASYNC,
TEXTURE_2D_ASYNC,
TEXTURE_OES,
TEXTURE_2D,
I420
}
private var currProcessSourceType = ProcessSourceType.UNKNOWN
private var deviceLevel = FuDeviceUtils.DEVICEINFO_UNKNOWN
private var isFrontCamera = true
private var cameraConfig = CameraConfig()
private var localVideoRenderMode = Constants.RENDER_MODE_HIDDEN
private val pendingProcessRunList = Collections.synchronizedList(mutableListOf<()->Unit>())
private val transformGLFrameBuffer = GLFrameBuffer()
override fun initialize(config: Config): Int {
if (this.config != null) {
LogUtils.e(TAG, "initialize >> The beauty api has been initialized!")
return ErrorCode.ERROR_HAS_INITIALIZED.value
}
this.config = config
this.cameraConfig = config.cameraConfig
if (config.captureMode == CaptureMode.Agora) {
config.rtcEngine.registerVideoFrameObserver(this)
}
statsHelper = StatsHelper(config.statsDuration){
this.config?.eventCallback?.onBeautyStats(it)
}
LogUtils.i(TAG, "initialize >> config = $config")
LogUtils.i(TAG, "initialize >> beauty api version=$VERSION, beauty sdk version=${FURenderKit.getInstance().getVersion()}")
// config face beauty
if (deviceLevel == FuDeviceUtils.DEVICEINFO_UNKNOWN) {
deviceLevel = FuDeviceUtils.judgeDeviceLevel(config.context)
FUAIKit.getInstance().faceProcessorSetFaceLandmarkQuality(deviceLevel)
if (deviceLevel > FuDeviceUtils.DEVICE_LEVEL_MID) {
FUAIKit.getInstance().fuFaceProcessorSetDetectSmallFace(true)
}
}
LogUtils.i(TAG, "initialize >> FuDeviceUtils deviceLevel=$deviceLevel")
config.rtcEngine.sendCustomReportMessage(reportId, reportCategory, "initialize", "config=$config, deviceLevel=$deviceLevel", 0)
return ErrorCode.ERROR_OK.value
}
override fun enable(enable: Boolean): Int {
LogUtils.i(TAG, "enable >> enable = $enable")
if (config == null) {
LogUtils.e(TAG, "enable >> The beauty api has not been initialized!")
return ErrorCode.ERROR_HAS_NOT_INITIALIZED.value
}
if (isReleased) {
LogUtils.e(TAG, "enable >> The beauty api has been released!")
return ErrorCode.ERROR_HAS_RELEASED.value
}
if(config?.captureMode == CaptureMode.Custom){
skipFrame = 2
LogUtils.i(TAG, "enable >> skipFrame = $skipFrame")
}
config?.rtcEngine?.sendCustomReportMessage(reportId, reportCategory, "enable", "enable=$enable", 0)
if(this.enable != enable){
this.enable = enable
enableChange = true
LogUtils.i(TAG, "enable >> enableChange")
}
return ErrorCode.ERROR_OK.value
}
override fun setupLocalVideo(view: View, renderMode: Int): Int {
val rtcEngine = config?.rtcEngine
if(rtcEngine == null){
LogUtils.e(TAG, "setupLocalVideo >> The beauty api has not been initialized!")
return ErrorCode.ERROR_HAS_NOT_INITIALIZED.value
}
LogUtils.i(TAG, "setupLocalVideo >> view=$view, renderMode=$renderMode")
localVideoRenderMode = renderMode
rtcEngine.sendCustomReportMessage(reportId, reportCategory, "enable", "view=$view, renderMode=$renderMode", 0)
if (view is TextureView || view is SurfaceView) {
val canvas = VideoCanvas(view, renderMode, 0)
canvas.mirrorMode = Constants.VIDEO_MIRROR_MODE_DISABLED
rtcEngine.setupLocalVideo(canvas)
return ErrorCode.ERROR_OK.value
}
return ErrorCode.ERROR_VIEW_TYPE_ERROR.value
}
override fun onFrame(videoFrame: VideoFrame): Int {
val conf = config
if(conf == null){
LogUtils.e(TAG, "onFrame >> The beauty api has not been initialized!")
return ErrorCode.ERROR_HAS_NOT_INITIALIZED.value
}
if (isReleased) {
LogUtils.e(TAG, "onFrame >> The beauty api has been released!")
return ErrorCode.ERROR_HAS_RELEASED.value
}
if (conf.captureMode != CaptureMode.Custom) {
LogUtils.e(TAG, "onFrame >> The capture mode is not Custom!")
return ErrorCode.ERROR_PROCESS_NOT_CUSTOM.value
}
if (processBeauty(videoFrame)) {
return ErrorCode.ERROR_OK.value
}
LogUtils.i(TAG, "onFrame >> Skip Frame.")
return ErrorCode.ERROR_FRAME_SKIPPED.value
}
override fun updateCameraConfig(config: CameraConfig): Int {
LogUtils.i(TAG, "updateCameraConfig >> oldCameraConfig=$cameraConfig, newCameraConfig=$config")
cameraConfig = CameraConfig(config.frontMirror, config.backMirror)
this.config?.rtcEngine?.sendCustomReportMessage(reportId, reportCategory, "updateCameraConfig", "config=$config", 0)
return ErrorCode.ERROR_OK.value
}
override fun runOnProcessThread(run: () -> Unit) {
if (config == null) {
LogUtils.e(TAG, "runOnProcessThread >> The beauty api has not been initialized!")
return
}
if (isReleased) {
LogUtils.e(TAG, "runOnProcessThread >> The beauty api has been released!")
return
}
if (textureBufferHelper?.handler?.looper?.thread == Thread.currentThread()) {
run.invoke()
} else if (textureBufferHelper != null) {
textureBufferHelper?.handler?.post(run)
} else {
pendingProcessRunList.add(run)
}
}
override fun isFrontCamera() = isFrontCamera
override fun setParameters(key: String, value: String) {
when(key){
"beauty_mode" -> beautyMode = value.toInt()
"enableTextureAsync" -> enableTextureAsync = value.toBoolean()
}
}
override fun setBeautyPreset(preset: BeautyPreset): Int {
val conf = config
if(conf == null){
LogUtils.e(TAG, "setBeautyPreset >> The beauty api has not been initialized!")
return ErrorCode.ERROR_HAS_NOT_INITIALIZED.value
}
if (isReleased) {
LogUtils.e(TAG, "setBeautyPreset >> The beauty api has been released!")
return ErrorCode.ERROR_HAS_RELEASED.value
}
LogUtils.i(TAG, "setBeautyPreset >> preset = $preset")
config?.rtcEngine?.sendCustomReportMessage(reportId, reportCategory, "enable", "preset=$preset", 0)
val recommendFaceBeauty = FaceBeauty(FUBundleData("graphics" + File.separator + "face_beautification.bundle"))
if (preset == BeautyPreset.DEFAULT) {
recommendFaceBeauty.filterName = FaceBeautyFilterEnum.FENNEN_1
recommendFaceBeauty.filterIntensity = 0.7
// 美牙
recommendFaceBeauty.toothIntensity = 0.3
// 亮眼
recommendFaceBeauty.eyeBrightIntensity = 0.3
// 大眼
recommendFaceBeauty.eyeEnlargingIntensity = 0.5
// 红润
recommendFaceBeauty.redIntensity = 0.5 * 2
// 美白
recommendFaceBeauty.colorIntensity = 0.75 * 2
// 磨皮
recommendFaceBeauty.blurIntensity = 0.75 * 6
if (deviceLevel > FuDeviceUtils.DEVICE_LEVEL_MID) {
val score = FUAIKit.getInstance().getFaceProcessorGetConfidenceScore(0)
if (score > 0.95) {
recommendFaceBeauty.blurType = 3
recommendFaceBeauty.enableBlurUseMask = true
} else {
recommendFaceBeauty.blurType = 2
recommendFaceBeauty.enableBlurUseMask = false
}
} else {
recommendFaceBeauty.blurType = 2
recommendFaceBeauty.enableBlurUseMask = false
}
// 嘴型
recommendFaceBeauty.mouthIntensity = 0.3
// 瘦鼻
recommendFaceBeauty.noseIntensity = 0.1
// 额头
recommendFaceBeauty.forHeadIntensity = 0.3
// 下巴
recommendFaceBeauty.chinIntensity = 0.0
// 瘦脸
recommendFaceBeauty.cheekThinningIntensity = 0.3
// 窄脸
recommendFaceBeauty.cheekNarrowIntensity = 0.0
// 小脸
recommendFaceBeauty.cheekSmallIntensity = 0.0
// v脸
recommendFaceBeauty.cheekVIntensity = 0.0
}
conf.fuRenderKit.faceBeauty = recommendFaceBeauty
return ErrorCode.ERROR_OK.value
}
override fun release(): Int {
val conf = config
val fuRenderer = conf?.fuRenderKit
if(fuRenderer == null){
LogUtils.e(TAG, "release >> The beauty api has not been initialized!")
return ErrorCode.ERROR_HAS_NOT_INITIALIZED.value
}
if (isReleased) {
LogUtils.e(TAG, "setBeautyPreset >> The beauty api has been released!")
return ErrorCode.ERROR_HAS_RELEASED.value
}
LogUtils.i(TAG, "release")
if (conf.captureMode == CaptureMode.Agora) {
conf.rtcEngine.registerVideoFrameObserver(null)
}
conf.rtcEngine.sendCustomReportMessage(reportId, reportCategory, "release", "", 0)
isReleased = true
textureBufferHelper?.let {
textureBufferHelper = null
it.handler.removeCallbacksAndMessages(null)
it.invoke {
fuRenderer.release()
mTextureProcessHelper?.release()
mTextureProcessHelper = null
transformGLFrameBuffer.release()
null
}
// it.handler.looper.quit()
it.dispose()
}
wrapTextureBufferHelper?.let {
wrapTextureBufferHelper = null
it.dispose()
}
statsHelper?.reset()
statsHelper = null
pendingProcessRunList.clear()
return ErrorCode.ERROR_OK.value
}
private fun processBeauty(videoFrame: VideoFrame): Boolean {
if (isReleased) {
LogUtils.e(TAG, "processBeauty >> The beauty api has been released!")
return false
}
val cMirror =
if (isFrontCamera) {
when (cameraConfig.frontMirror) {
MirrorMode.MIRROR_LOCAL_REMOTE -> true
MirrorMode.MIRROR_LOCAL_ONLY -> false
MirrorMode.MIRROR_REMOTE_ONLY -> true
MirrorMode.MIRROR_NONE -> false
}
} else {
when (cameraConfig.backMirror) {
MirrorMode.MIRROR_LOCAL_REMOTE -> true
MirrorMode.MIRROR_LOCAL_ONLY -> false
MirrorMode.MIRROR_REMOTE_ONLY -> true
MirrorMode.MIRROR_NONE -> false
}
}
val rMirror =
if (isFrontCamera) {
when (cameraConfig.frontMirror) {
MirrorMode.MIRROR_LOCAL_REMOTE -> false
MirrorMode.MIRROR_LOCAL_ONLY -> true
MirrorMode.MIRROR_REMOTE_ONLY -> true
MirrorMode.MIRROR_NONE -> false
}
} else {
when (cameraConfig.backMirror) {
MirrorMode.MIRROR_LOCAL_REMOTE -> false
MirrorMode.MIRROR_LOCAL_ONLY -> true
MirrorMode.MIRROR_REMOTE_ONLY -> true
MirrorMode.MIRROR_NONE -> false
}
}
if (captureMirror != cMirror || renderMirror != rMirror) {
LogUtils.w(TAG, "processBeauty >> enable=$enable, captureMirror=$captureMirror->$cMirror, renderMirror=$renderMirror->$rMirror")
captureMirror = cMirror
if(renderMirror != rMirror){
renderMirror = rMirror
config?.rtcEngine?.setLocalRenderMode(
localVideoRenderMode,
if(renderMirror) Constants.VIDEO_MIRROR_MODE_ENABLED else Constants.VIDEO_MIRROR_MODE_DISABLED
)
}
textureBufferHelper?.invoke {
mTextureProcessHelper?.reset()
}
skipFrame = 2
return false
}
val oldIsFrontCamera = isFrontCamera
isFrontCamera = videoFrame.sourceType == SourceType.kFrontCamera
if(oldIsFrontCamera != isFrontCamera){
LogUtils.w(TAG, "processBeauty >> oldIsFrontCamera=$oldIsFrontCamera, isFrontCamera=$isFrontCamera")
return false
}
if(enableChange){
enableChange = false
textureBufferHelper?.invoke {
mTextureProcessHelper?.reset()
}
return false
}
if(!enable){
return true
}
if (textureBufferHelper == null) {
textureBufferHelper = TextureBufferHelper.create(
"FURender",
EglBaseProvider.instance().rootEglBase.eglBaseContext
)
textureBufferHelper?.invoke {
synchronized(pendingProcessRunList){
val iterator = pendingProcessRunList.iterator()
while (iterator.hasNext()){
iterator.next().invoke()
iterator.remove()
}
}
}
LogUtils.i(TAG, "processBeauty >> create texture buffer, beautyMode=$beautyMode")
}
if (wrapTextureBufferHelper == null) {
wrapTextureBufferHelper = TextureBufferHelper.create(
"FURenderWrap",
EglBaseProvider.instance().rootEglBase.eglBaseContext
)
LogUtils.i(TAG, "processBeauty >> create texture buffer wrap, beautyMode=$beautyMode")
}
val startTime = System.currentTimeMillis()
val processTexId = when (beautyMode) {
2 -> processBeautySingleBuffer(videoFrame)
3 -> {
if (enableTextureAsync) {
processBeautySingleTextureAsync(videoFrame)
} else {
processBeautySingleTexture(videoFrame)
}
}
else -> processBeautyAuto(videoFrame)
}
if(config?.statsEnable == true){
val costTime = System.currentTimeMillis() - startTime
statsHelper?.once(costTime)
}
if (processTexId <= 0) {
LogUtils.w(TAG, "processBeauty >> processTexId <= 0")
return false
}
if(skipFrame > 0){
skipFrame --
LogUtils.w(TAG, "processBeauty >> skipFrame=$skipFrame")
return false
}
val processBuffer: TextureBuffer = wrapTextureBufferHelper?.wrapTextureBuffer(
videoFrame.rotatedWidth,
videoFrame.rotatedHeight,
TextureBuffer.Type.RGB,
processTexId,
identityMatrix
) ?: return false
videoFrame.replaceBuffer(processBuffer, 0, videoFrame.timestampNs)
return true
}
private fun processBeautyAuto(videoFrame: VideoFrame): Int {
val buffer = videoFrame.buffer
return if (buffer is TextureBuffer) {
if (enableTextureAsync) {
processBeautySingleTextureAsync(videoFrame)
} else {
processBeautySingleTexture(videoFrame)
}
} else {
processBeautySingleBuffer(videoFrame)
}
}
private fun processBeautySingleTextureAsync(videoFrame: VideoFrame): Int {
val texBufferHelper = wrapTextureBufferHelper ?: return -1
val textureBuffer = videoFrame.buffer as? TextureBuffer ?: return -1
when(textureBuffer.type){
TextureBuffer.Type.OES -> {
if(currProcessSourceType != ProcessSourceType.TEXTURE_OES_ASYNC){
LogUtils.i(TAG, "processBeauty >> process source type change old=$currProcessSourceType, new=${ProcessSourceType.TEXTURE_OES_ASYNC}")
if (currProcessSourceType != ProcessSourceType.UNKNOWN) {
skipFrame = 3
}
currProcessSourceType = ProcessSourceType.TEXTURE_OES_ASYNC
return -1
}
}
else -> {
if(currProcessSourceType != ProcessSourceType.TEXTURE_2D_ASYNC){
LogUtils.i(TAG, "processBeauty >> process source type change old=$currProcessSourceType, new=${ProcessSourceType.TEXTURE_2D_ASYNC}")
if (currProcessSourceType != ProcessSourceType.UNKNOWN) {
skipFrame = 3
}
currProcessSourceType = ProcessSourceType.TEXTURE_2D_ASYNC
skipFrame = 6
return -1
}
}
}
if(mTextureProcessHelper == null) {
mTextureProcessHelper = TextureProcessHelper()
mTextureProcessHelper?.setFilter { frame ->
val fuRenderKit = config?.fuRenderKit ?: return@setFilter -1
val input = FURenderInputData(frame.width, frame.height)
input.texture = FURenderInputData.FUTexture(
FUInputTextureEnum.FU_ADM_FLAG_COMMON_TEXTURE,
frame.textureId
)
val isFront = frame.isFrontCamera
input.renderConfig.let {
if (isFront) {
it.cameraFacing = CameraFacingEnum.CAMERA_FRONT
it.inputBufferMatrix = FUTransformMatrixEnum.CCROT0
it.inputTextureMatrix = FUTransformMatrixEnum.CCROT0
it.outputMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL
it.deviceOrientation = 270
} else {
it.cameraFacing = CameraFacingEnum.CAMERA_BACK
it.inputBufferMatrix = FUTransformMatrixEnum.CCROT0
it.inputTextureMatrix = FUTransformMatrixEnum.CCROT0
it.outputMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL
it.deviceOrientation = 270
}
}
if (isReleased) {
return@setFilter -1
}
val ret = textureBufferHelper?.invoke {
synchronized(EglBase.lock){
return@invoke fuRenderKit.renderWithInput(input).texture?.texId ?: -1
}
}
return@setFilter ret ?: -1
}
}
return texBufferHelper.invoke {
if(isReleased){
return@invoke -1
}
return@invoke mTextureProcessHelper?.process(
textureBuffer.textureId,
when (textureBuffer.type) {
TextureBuffer.Type.OES -> GLES11Ext.GL_TEXTURE_EXTERNAL_OES
else -> GLES20.GL_TEXTURE_2D
},
textureBuffer.width,
textureBuffer.height,
videoFrame.rotation,
textureBuffer.transformMatrixArray,
isFrontCamera,
(isFrontCamera && !captureMirror) || (!isFrontCamera && captureMirror)
)?: -1
}
}
private fun processBeautySingleTexture(videoFrame: VideoFrame): Int {
val texBufferHelper = wrapTextureBufferHelper ?: return -1
val textureBuffer = videoFrame.buffer as? TextureBuffer ?: return -1
when(textureBuffer.type){
TextureBuffer.Type.OES -> {
if(currProcessSourceType != ProcessSourceType.TEXTURE_OES){
LogUtils.i(TAG, "processBeauty >> process source type change old=$currProcessSourceType, new=${ProcessSourceType.TEXTURE_OES}")
if (currProcessSourceType != ProcessSourceType.UNKNOWN) {
skipFrame = 3
}
currProcessSourceType = ProcessSourceType.TEXTURE_OES
return -1
}
}
else -> {
if(currProcessSourceType != ProcessSourceType.TEXTURE_2D){
LogUtils.i(TAG, "processBeauty >> process source type change old=$currProcessSourceType, new=${ProcessSourceType.TEXTURE_2D}")
if (currProcessSourceType != ProcessSourceType.UNKNOWN) {
skipFrame = 3
}
currProcessSourceType = ProcessSourceType.TEXTURE_2D
skipFrame = 6
return -1
}
}
}
val width = videoFrame.rotatedWidth
val height = videoFrame.rotatedHeight
val isFront = videoFrame.sourceType == SourceType.kFrontCamera
val rotation = videoFrame.rotation
return texBufferHelper.invoke {
val fuRenderKit = config?.fuRenderKit ?: return@invoke -1
transformGLFrameBuffer.setSize(width, height)
transformGLFrameBuffer.resetTransform()
transformGLFrameBuffer.setTexMatrix(textureBuffer.transformMatrixArray)
transformGLFrameBuffer.setRotation(rotation)
var flipH = isFront
if((isFrontCamera && !captureMirror) || (!isFrontCamera && captureMirror)){
flipH = !flipH
}
transformGLFrameBuffer.setFlipH(flipH)
val transformTexId = transformGLFrameBuffer.process(
textureBuffer.textureId, when (textureBuffer.type) {
TextureBuffer.Type.OES -> GLES11Ext.GL_TEXTURE_EXTERNAL_OES
else -> GLES20.GL_TEXTURE_2D
}
)
val input = FURenderInputData(width, height)
input.texture = FURenderInputData.FUTexture(
FUInputTextureEnum.FU_ADM_FLAG_COMMON_TEXTURE,
transformTexId
)
input.renderConfig.let {
if (isFront) {
it.cameraFacing = CameraFacingEnum.CAMERA_FRONT
it.inputBufferMatrix = FUTransformMatrixEnum.CCROT0
it.inputTextureMatrix = FUTransformMatrixEnum.CCROT0
it.outputMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL
it.deviceOrientation = 270
} else {
it.cameraFacing = CameraFacingEnum.CAMERA_BACK
it.inputBufferMatrix = FUTransformMatrixEnum.CCROT0
it.inputTextureMatrix = FUTransformMatrixEnum.CCROT0
it.outputMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL
it.deviceOrientation = 270
}
}
if (isReleased) {
return@invoke -1
}
synchronized(EglBase.lock){
return@invoke fuRenderKit.renderWithInput(input).texture?.texId ?: -1
}
}
}
private fun processBeautySingleBuffer(videoFrame: VideoFrame): Int {
val texBufferHelper = textureBufferHelper ?: return -1
if(currProcessSourceType != ProcessSourceType.I420){
LogUtils.i(TAG, "processBeauty >> process source type change old=$currProcessSourceType, new=${ProcessSourceType.I420}")
if (currProcessSourceType != ProcessSourceType.UNKNOWN) {
skipFrame = 3
}
currProcessSourceType = ProcessSourceType.I420
return -1
}
val bufferArray = getNV21Buffer(videoFrame) ?: return -1
val buffer = videoFrame.buffer
val width = buffer.width
val height = buffer.height
val isFront = videoFrame.sourceType == SourceType.kFrontCamera
val mirror = (isFrontCamera && !captureMirror) || (!isFrontCamera && captureMirror)
val rotation = videoFrame.rotation
return texBufferHelper.invoke(Callable {
if(isReleased){
return@Callable -1
}
val fuRenderKit = config?.fuRenderKit ?: return@Callable -1
val input = FURenderInputData(width, height)
input.imageBuffer = FURenderInputData.FUImageBuffer(
FUInputBufferEnum.FU_FORMAT_NV21_BUFFER,
bufferArray
)
input.renderConfig.let {
if (isFront) {
it.cameraFacing = CameraFacingEnum.CAMERA_FRONT
it.inputBufferMatrix = if(mirror) {
when (rotation) {
0 -> FUTransformMatrixEnum.CCROT0
180 -> FUTransformMatrixEnum.CCROT180
else -> FUTransformMatrixEnum.CCROT90
}
} else {
when (rotation) {
0 -> FUTransformMatrixEnum.CCROT0_FLIPHORIZONTAL
180 -> FUTransformMatrixEnum.CCROT0_FLIPVERTICAL
else -> FUTransformMatrixEnum.CCROT90_FLIPHORIZONTAL
}
}
it.inputTextureMatrix = if(mirror) {
when (rotation) {
0 -> FUTransformMatrixEnum.CCROT0
180 -> FUTransformMatrixEnum.CCROT180
else -> FUTransformMatrixEnum.CCROT90
}
} else {
when (rotation) {
0 -> FUTransformMatrixEnum.CCROT0_FLIPHORIZONTAL
180 -> FUTransformMatrixEnum.CCROT0_FLIPVERTICAL
else -> FUTransformMatrixEnum.CCROT90_FLIPHORIZONTAL
}
}
it.deviceOrientation = when(rotation){
0 -> 270
180 -> 90
else -> 0
}
it.outputMatrix = FUTransformMatrixEnum.CCROT0
} else {
it.cameraFacing = CameraFacingEnum.CAMERA_BACK
it.inputBufferMatrix = if(mirror) {
when (rotation) {
0 -> FUTransformMatrixEnum.CCROT0_FLIPHORIZONTAL
180 -> FUTransformMatrixEnum.CCROT0_FLIPVERTICAL
else -> FUTransformMatrixEnum.CCROT90_FLIPVERTICAL
}
} else {
when (rotation) {
0 -> FUTransformMatrixEnum.CCROT0
180 -> FUTransformMatrixEnum.CCROT180
else -> FUTransformMatrixEnum.CCROT270
}
}
it.inputTextureMatrix = if(mirror) {
when (rotation) {
0 -> FUTransformMatrixEnum.CCROT0_FLIPHORIZONTAL
180 -> FUTransformMatrixEnum.CCROT0_FLIPVERTICAL
else -> FUTransformMatrixEnum.CCROT90_FLIPVERTICAL
}
} else {
when (rotation) {
0 -> FUTransformMatrixEnum.CCROT0
180 -> FUTransformMatrixEnum.CCROT180
else -> FUTransformMatrixEnum.CCROT270
}
}
it.deviceOrientation = when(rotation){
0 -> 270
180 -> 90
else -> 0
}
it.outputMatrix = FUTransformMatrixEnum.CCROT0
}
}
mTextureProcessHelper?.let {
if(it.size() > 0){
it.reset()
return@Callable -1
}
}
synchronized(EglBase.lock){
return@Callable fuRenderKit.renderWithInput(input).texture?.texId ?: -1
}
})
}
private fun getNV21Buffer(videoFrame: VideoFrame): ByteArray? {
val buffer = videoFrame.buffer
val width = buffer.width
val height = buffer.height
val size = (width * height * 3.0f / 2.0f + 0.5f).toInt()
if (byteBuffer == null || byteBuffer?.capacity() != size || byteArray == null || byteArray?.size != size) {
byteBuffer?.clear()
byteBuffer = ByteBuffer.allocateDirect(size)
byteArray = ByteArray(size)
return null
}
val outArray = byteArray ?: return null
val outBuffer = byteBuffer ?: return null
val i420Buffer = buffer as? I420Buffer ?: buffer.toI420()
YuvHelper.I420ToNV12(
i420Buffer.dataY, i420Buffer.strideY,
i420Buffer.dataV, i420Buffer.strideV,
i420Buffer.dataU, i420Buffer.strideU,
outBuffer, width, height
)
outBuffer.position(0)
outBuffer.get(outArray)
if(buffer !is I420Buffer){
i420Buffer.release()
}
return outArray
}
// IVideoFrameObserver implements
override fun onCaptureVideoFrame(sourceType: Int, videoFrame: VideoFrame?): Boolean {
videoFrame ?: return false
return processBeauty(videoFrame)
}
override fun onPreEncodeVideoFrame(sourceType: Int, videoFrame: VideoFrame?) = false
override fun onMediaPlayerVideoFrame(videoFrame: VideoFrame?, mediaPlayerId: Int) = false
override fun onRenderVideoFrame(
channelId: String?,
uid: Int,
videoFrame: VideoFrame?
) = false
override fun getVideoFrameProcessMode() = IVideoFrameObserver.PROCESS_MODE_READ_WRITE
override fun getVideoFormatPreference() = IVideoFrameObserver.VIDEO_PIXEL_DEFAULT
override fun getRotationApplied() = false
override fun getMirrorApplied() = captureMirror && !enable
override fun getObservedFramePosition() = IVideoFrameObserver.POSITION_POST_CAPTURER
}

View File

@@ -0,0 +1,196 @@
package io.agora.beautyapi.faceunity.agora;
import android.app.Activity;
import android.content.Context;
import android.view.SurfaceView;
import android.widget.FrameLayout;
import com.yunbao.common.CommonAppConfig;
import com.yunbao.common.CommonAppContext;
import com.yunbao.common.manager.base.BaseCacheManager;
import com.yunbao.common.utils.L;
import com.yunbao.common.utils.StringUtil;
import io.agora.rtc2.ChannelMediaOptions;
import io.agora.rtc2.Constants;
import io.agora.rtc2.IRtcEngineEventHandler;
import io.agora.rtc2.RtcConnection;
import io.agora.rtc2.RtcEngineConfig;
import io.agora.rtc2.RtcEngineEx;
import io.agora.rtc2.video.VideoCanvas;
/**
* 声网主播管理类
*/
public class SWAuManager extends BaseCacheManager {
// 填写项目的 App ID可在声网控制台中生成
public static final String shengwang_appId = "4cf0376582d742ac8a96778b25b1079e";
// 填写频道名
private Activity mContext;
public static SWAuManager manager;
private RtcEngineEx mRtcEngine;
private FrameLayout audienceContainer; //主播视图
private FrameLayout pkContainer1; //pk主播视图1
private FrameLayout pkContainer2; //pk主播视图2
private FrameLayout pkContainer3; //pk主播视图3
private FrameLayout linkUserContainer;//连麦用户视图
public SWAuManager(Context context) {
super(context);
}
/**
* 获取单列
*
* @return
*/
public static SWAuManager get() {
if (null == manager) {
manager = new SWAuManager(CommonAppContext.sInstance.getBaseContext());
}
return manager;
}
/**
* 初始化声网SDK
*/
public void initRtcEngine(Activity mContext) {
this.mContext = mContext;
try {
// 创建 RtcEngineConfig 对象,并进行配置
RtcEngineConfig config = new RtcEngineConfig();
config.mContext = mContext;
config.mAppId = SWAuManager.shengwang_appId;
config.mEventHandler = mRtcEventHandler;
// 创建并初始化 RtcEngine
mRtcEngine = (RtcEngineEx) RtcEngineEx.create(config);
} catch (Exception e) {
throw new RuntimeException("Check the error.");
}
mRtcEngine.setChannelProfile(Constants.CHANNEL_PROFILE_LIVE_BROADCASTING); // 直播模式,引擎級別
// 启用视频模块
mRtcEngine.enableVideo();
// 开启本地预览
//mRtcEngine.startPreview();
}
/**
* 加入房间
*/
public void joinRoom(String strUid, String token, String channelName) {
int uid;
if (StringUtil.isEmpty(strUid)) {
uid = 0;
} else {
uid = Integer.parseInt(strUid);
}
// 创建一个 SurfaceView 对象,并将其作为 FrameLayout 的子对象
SurfaceView surfaceView = new SurfaceView(mContext);
audienceContainer.addView(surfaceView);
// 创建 ChannelMediaOptions 对象,并进行配置
ChannelMediaOptions options = new ChannelMediaOptions();
// 根据场景将用户角色设置为 AUDIENCE (观众)
options.clientRoleType = Constants.CLIENT_ROLE_AUDIENCE;
// 直播场景下,设置频道场景为 BROADCASTING (直播场景)
options.audienceLatencyLevel = Constants.AUDIENCE_LATENCY_LEVEL_LOW_LATENCY; // 觀眾走極速直播
// 使用临时 Token 加入频道,自行指定用户 ID 并确保其在频道内的唯一性
mRtcEngine.joinChannel(token, channelName, uid, options);
}
public void joinExRoomEx(String strUid, String token, String channelName) {
int uid;
if (StringUtil.isEmpty(strUid)) {
uid = 0;
} else {
uid = Integer.parseInt(strUid);
}
// 创建 ChannelMediaOptions 对象,并进行配置
ChannelMediaOptions options = new ChannelMediaOptions();
// 根据场景将用户角色设置为 AUDIENCE (观众)
options.clientRoleType = Constants.CLIENT_ROLE_AUDIENCE;
// 直播场景下,设置频道场景为 BROADCASTING (直播场景)
options.audienceLatencyLevel = Constants.AUDIENCE_LATENCY_LEVEL_LOW_LATENCY; // 觀眾走極速直播
options.autoSubscribeVideo = true;
options.autoSubscribeAudio = true;
RtcConnection rtcConnection = new RtcConnection();
rtcConnection.channelId = "g99411"; //對方主播的頻道
rtcConnection.localUid = 99412;//自己的ID
mRtcEngine.joinChannelEx(token, rtcConnection, options, new IRtcEngineEventHandler() {
@Override
public void onJoinChannelSuccess(String channel, int uid, int elapsed) {
super.onJoinChannelSuccess(channel, uid, elapsed);
L.eSw("onJoinChannelSuccess:" + channel + " uid " +uid+ " elapsed: " + elapsed);
SurfaceView surfaceView = new SurfaceView(mContext);
surfaceView.setZOrderMediaOverlay(true);
pkContainer1.addView(surfaceView);
VideoCanvas videoCanvas = new VideoCanvas(surfaceView, VideoCanvas.RENDER_MODE_HIDDEN, 99411);
mRtcEngine.setupRemoteVideoEx(videoCanvas, rtcConnection);
}
@Override
public void onError(int err) {
super.onError(err);
L.eSw("onError:" + err);
}
});
}
public static String getChannelName(String liveUid) {
return CommonAppConfig.SWChannelPrefix + liveUid;
}
//远程监听
private final IRtcEngineEventHandler mRtcEventHandler = new IRtcEngineEventHandler() {
@Override
// 监听频道内的远端用户,获取用户的 uid 信息
public void onUserJoined(int uid, int elapsed) {
mContext.runOnUiThread(new Runnable() {
@Override
public void run() {
// 获取 uid 后,设置远端视频视图
//setupRemoteVideo(uid);
}
});
}
};
//设置对方主播视图
public void setupRemoteVideo(int uid) {
SurfaceView surfaceView = new SurfaceView(mContext);
surfaceView.setZOrderMediaOverlay(true);
audienceContainer.addView(surfaceView);
// 将 SurfaceView 对象传入声网实时互动 SDK设置远端视图
mRtcEngine.setupRemoteVideo(new VideoCanvas(surfaceView, VideoCanvas.RENDER_MODE_HIDDEN, uid));
}
public void setAudienceContainer(FrameLayout audienceContainer) {
this.audienceContainer = audienceContainer;
}
public void setPkContainer1(FrameLayout pkContainer1) {
this.pkContainer1 = pkContainer1;
}
public void setPkContainer2(FrameLayout pkContainer2) {
this.pkContainer2 = pkContainer2;
}
public void setPkContainer3(FrameLayout pkContainer3) {
this.pkContainer3 = pkContainer3;
}
public void setLinkUserContainer(FrameLayout linkUserContainer) {
this.linkUserContainer = linkUserContainer;
}
}

View File

@@ -0,0 +1,418 @@
package io.agora.beautyapi.faceunity.agora;
import android.app.Activity;
import android.content.Context;
import android.view.SurfaceView;
import android.widget.FrameLayout;
import com.yunbao.common.CommonAppConfig;
import com.yunbao.common.CommonAppContext;
import com.yunbao.common.bean.SwTokenModel;
import com.yunbao.common.http.base.HttpCallback;
import com.yunbao.common.http.live.LiveNetManager;
import com.yunbao.common.manager.IMLoginManager;
import com.yunbao.common.manager.base.BaseCacheManager;
import com.yunbao.common.utils.L;
import com.yunbao.common.utils.StringUtil;
import com.yunbao.common.utils.ToastUtil;
import com.yunbao.faceunity.utils.FURenderer;
import io.agora.beautyapi.faceunity.BeautyPreset;
import io.agora.beautyapi.faceunity.CameraConfig;
import io.agora.beautyapi.faceunity.CaptureMode;
import io.agora.beautyapi.faceunity.Config;
import io.agora.beautyapi.faceunity.FaceUnityBeautyAPI;
import io.agora.beautyapi.faceunity.FaceUnityBeautyAPIKt;
import io.agora.beautyapi.faceunity.MirrorMode;
import io.agora.rtc2.ChannelMediaOptions;
import io.agora.rtc2.Constants;
import io.agora.rtc2.IRtcEngineEventHandler;
import io.agora.rtc2.RtcConnection;
import io.agora.rtc2.RtcEngineConfig;
import io.agora.rtc2.RtcEngineEx;
import io.agora.rtc2.video.VideoCanvas;
import io.agora.rtc2.video.VideoEncoderConfiguration;
/**
* 声网主播管理类
*/
public class SWManager extends BaseCacheManager {
// 填写项目的 App ID可在声网控制台中生成
public static final String shengwang_appId = "4cf0376582d742ac8a96778b25b1079e";
// 填写频道名
private Activity mContext;
public static SWManager manager;
private RtcEngineEx mRtcEngine;
private final FaceUnityBeautyAPI faceUnityBeautyAPI = FaceUnityBeautyAPIKt.createFaceUnityBeautyAPI();
private int uid;
private VideoEncoderConfiguration cfg;
private FrameLayout anchorContainer; //主播视图
private FrameLayout pkContainer1; //pk主播视图1
private FrameLayout pkContainer2; //pk主播视图2
private FrameLayout pkContainer3; //pk主播视图3
private FrameLayout linkUserContainer;//连麦用户视图
private onRtcEngineListener onRtcEngineListener;
public void setOnRtcEngineListener(SWManager.onRtcEngineListener onRtcEngineListener) {
this.onRtcEngineListener = onRtcEngineListener;
}
public SWManager(Context context) {
super(context);
}
/**
* 获取单列
* @return
*/
public static SWManager get() {
if (null == manager) {
manager = new SWManager(CommonAppContext.sInstance.getBaseContext());
}
return manager;
}
/**
* 初始化声网SDK
*/
public void initRtcEngine(Activity mContext, String strUid) {
this.mContext = mContext;
if (StringUtil.isEmpty(strUid)) {
uid = 0;
} else {
uid = Integer.parseInt(strUid);
}
try {
// 创建 RtcEngineConfig 对象,并进行配置
RtcEngineConfig config = new RtcEngineConfig();
config.mContext = mContext;
config.mAppId = SWManager.shengwang_appId;
config.mEventHandler = mRtcEventHandler;
// 创建并初始化 RtcEngine
mRtcEngine = (RtcEngineEx) RtcEngineEx.create(config);
} catch (Exception e) {
throw new RuntimeException("Check the error.");
}
mRtcEngine.setChannelProfile(Constants.CHANNEL_PROFILE_LIVE_BROADCASTING);
// 启用视频模块
mRtcEngine.enableVideo();
// 开启本地预览
mRtcEngine.startPreview();
cfg = new VideoEncoderConfiguration();
//设置默认分辨率
switch (IMLoginManager.get(mContext).getSelectClarity()) {
case 0:
cfg.dimensions = VideoEncoderConfiguration.VD_840x480;
break;
case 1:
cfg.dimensions = VideoEncoderConfiguration.VD_1280x720;
break;
case 2:
cfg.dimensions = VideoEncoderConfiguration.VD_1920x1080;
break;
}
cfg.frameRate = 24;
cfg.bitrate = 0;
cfg.mirrorMode = VideoEncoderConfiguration.MIRROR_MODE_TYPE.MIRROR_MODE_ENABLED; //镜像
mRtcEngine.setVideoEncoderConfiguration(cfg);
// 创建一个 SurfaceView 对象,并将其作为 FrameLayout 的子对象
SurfaceView surfaceView = new SurfaceView(mContext);
anchorContainer.addView(surfaceView);
Config config = new Config(mContext, mRtcEngine, FURenderer.INSTANCE.mFURenderKit, null, CaptureMode.Agora, 0, false, new CameraConfig(MirrorMode.MIRROR_NONE,MirrorMode.MIRROR_NONE));
faceUnityBeautyAPI.initialize(config);
faceUnityBeautyAPI.enable(true);
faceUnityBeautyAPI.setBeautyPreset(BeautyPreset.CUSTOM);
//FaceUnityBeautyManage.getInstance().mFURenderKit.setFaceBeauty();
// 设置视图
faceUnityBeautyAPI.setupLocalVideo(surfaceView, Constants.RENDER_MODE_HIDDEN);
//faceUnityBeautyAPI.updateCameraConfig(new CameraConfig(MirrorMode.MIRROR_NONE,MirrorMode.MIRROR_NONE));
//mRtcEngine.setupLocalVideo(new VideoCanvas(surfaceView, VideoCanvas.RENDER_MODE_HIDDEN, uid));
}
public void setEnableBeauty(boolean flag){
if(faceUnityBeautyAPI!=null){
faceUnityBeautyAPI.enable(flag);
}
}
/**
* 设置清晰度
* @param selectClarity
*/
public void setDimensions(int selectClarity) {
if (cfg != null && mRtcEngine != null) {
switch (selectClarity) {
case 0:
cfg.dimensions = VideoEncoderConfiguration.VD_840x480;
break;
case 1:
cfg.dimensions = VideoEncoderConfiguration.VD_1280x720;
break;
case 2:
cfg.dimensions = VideoEncoderConfiguration.VD_1920x1080;
break;
}
mRtcEngine.setVideoEncoderConfiguration(cfg);
// 创建一个 SurfaceView 对象,并将其作为 FrameLayout 的子对象
SurfaceView surfaceView = new SurfaceView(mContext);
anchorContainer.addView(surfaceView);
// 设置视图
mRtcEngine.setupLocalVideo(new VideoCanvas(surfaceView, VideoCanvas.RENDER_MODE_HIDDEN, uid));
}
}
/**
* 设置镜像模式
*/
public void setMirrorMode(){
if(cfg!=null&&mRtcEngine!=null){
L.eSw("setMirrorMode设置镜像"+cfg.mirrorMode);
if(cfg.mirrorMode==VideoEncoderConfiguration.MIRROR_MODE_TYPE.MIRROR_MODE_ENABLED){
cfg.mirrorMode = VideoEncoderConfiguration.MIRROR_MODE_TYPE.MIRROR_MODE_DISABLED; //取消镜像
}else{
cfg.mirrorMode = VideoEncoderConfiguration.MIRROR_MODE_TYPE.MIRROR_MODE_ENABLED; //设置镜像
}
mRtcEngine.setVideoEncoderConfiguration(cfg);
SurfaceView surfaceView = new SurfaceView(mContext);
anchorContainer.addView(surfaceView);
mRtcEngine.setupLocalVideo(new VideoCanvas(surfaceView, VideoCanvas.RENDER_MODE_HIDDEN, uid));
}
}
/**
* 设置镜像模式
*/
public void switchCamera(){
if(mRtcEngine!=null){
mRtcEngine.switchCamera();
}
}
/**
* 设置美颜
*/
public void setBeautPreset(){
if(mRtcEngine!=null){
}
}
/**
* 创建直播间
*/
public void createChannel(String token, String channelName) {
ChannelMediaOptions options = new ChannelMediaOptions();
// 设置角色 BROADCASTER (主播) 或 AUDIENCE (观众)
options.clientRoleType = Constants.CLIENT_ROLE_BROADCASTER;
// 加入频道
mRtcEngine.joinChannel(token, channelName, uid, options);
}
/**
* 更新主播视图
* @param frameLayout
*/
public void updateMyChannelView(FrameLayout frameLayout) {
mRtcEngine.setupLocalVideo(null);
SurfaceView surfaceView = new SurfaceView(mContext);
surfaceView.setZOrderMediaOverlay(true);
frameLayout.addView(surfaceView);
VideoCanvas videoCanvas = new VideoCanvas(surfaceView, VideoCanvas.RENDER_MODE_HIDDEN, uid);
mRtcEngine.setupLocalVideo(videoCanvas);
}
/**
* PK-加入对方主播直播间
* @param strUid
* @param token
* @param toUid
* @param channelName
*/
public void joinChannelEx(String strUid, String token, String toUid, String channelName) {
int tempUid;
if (StringUtil.isEmpty(strUid)) {
tempUid = 0;
} else {
tempUid = Integer.parseInt(strUid);
}
// 创建 ChannelMediaOptions 对象,并进行配置
ChannelMediaOptions options = new ChannelMediaOptions();
// 根据场景将用户角色设置为 AUDIENCE (观众)
options.clientRoleType = Constants.CLIENT_ROLE_BROADCASTER;
options.autoSubscribeVideo = true;
options.autoSubscribeAudio = true;
options.publishMicrophoneTrack = false;
options.publishCameraTrack = false;
RtcConnection rtcConnection = new RtcConnection();
rtcConnection.channelId = channelName; //對方主播的頻道
rtcConnection.localUid = tempUid;//自己的ID
L.eSw("strUid:" + tempUid + "_token:" + token + "_channelName:" + channelName);
mRtcEngine.joinChannelEx(token, rtcConnection, options, new IRtcEngineEventHandler() {
@Override
public void onJoinChannelSuccess(String channel, int scUid, int elapsed) {
super.onJoinChannelSuccess(channel, scUid, elapsed);
L.eSw("onJoinChannelSuccess:" + channel + " uid " + scUid + " elapsed: " + elapsed);
mContext.runOnUiThread(new Runnable() {
@Override
public void run() {
SurfaceView surfaceView = new SurfaceView(mContext);
surfaceView.setZOrderMediaOverlay(true);
pkContainer1.addView(surfaceView);
VideoCanvas videoCanvas = new VideoCanvas(surfaceView, VideoCanvas.RENDER_MODE_FIT,Integer.parseInt(toUid));
mRtcEngine.setupRemoteVideoEx(videoCanvas, rtcConnection);
}
});
}
@Override
public void onLeaveChannel(RtcStats stats) {
super.onLeaveChannel(stats);
}
@Override
public void onError(int err) {
super.onError(err);
L.eSw("onError:" + err);
}
});
}
/**
* 退出对方直播间
* @param uid 自己的ID
* @param liveUid 对方直播间号
*/
public void exitChannelToUid(int uid, String liveUid){
RtcConnection rtcConnection = new RtcConnection();
rtcConnection.channelId = getChannelName(liveUid); //對方主播的頻道
rtcConnection.localUid = uid;//自己的ID
mRtcEngine.leaveChannelEx(rtcConnection);
}
/**
* 退出所有频道
*/
public void exitChannelAll(){
if(mRtcEngine!=null){
mRtcEngine.leaveChannel();
}
}
//设置对方主播视图
private void setupRemoteVideo(int uid) {
SurfaceView surfaceView = new SurfaceView(mContext);
surfaceView.setZOrderMediaOverlay(true);
pkContainer1.addView(surfaceView);
// 将 SurfaceView 对象传入声网实时互动 SDK设置远端视图
mRtcEngine.setupRemoteVideo(new VideoCanvas(surfaceView, VideoCanvas.RENDER_MODE_FIT, uid));
}
public void setAnchorContainer(FrameLayout anchorContainer) {
this.anchorContainer = anchorContainer;
}
//远程监听
private final IRtcEngineEventHandler mRtcEventHandler = new IRtcEngineEventHandler() {
@Override
// 监听频道内的远端用户,获取用户的 uid 信息
public void onUserJoined(int uid, int elapsed) {
mContext.runOnUiThread(new Runnable() {
@Override
public void run() {
// 获取 uid 后,设置远端视频视图
//setupRemoteVideo(uid);
}
});
}
@Override
public void onJoinChannelSuccess(String channel, int uid, int elapsed) {
super.onJoinChannelSuccess(channel, uid, elapsed);
L.eSw("onJoinChannelSuccess 加入频道channel"+channel+" uid:"+uid+" elapsed:"+elapsed);
if (onRtcEngineListener != null) {
onRtcEngineListener.onOpenSuccess(channel, uid);
}
}
@Override
public void onError(int err) {
super.onError(err);
L.eSw("onError 错误码:"+err);
ToastUtil.show("onError:" + err);
}
@Override
public void onTokenPrivilegeWillExpire(String token) {
super.onTokenPrivilegeWillExpire(token);
L.eSw("onTokenPrivilegeWillExpire_Token 即将失效");
refreshToken();
}
@Override
public void onRequestToken() {
super.onRequestToken();
L.eSw("onRequestToken_Token失效");
refreshToken();
}
@Override
public void onLeaveChannel(RtcStats stats) {
super.onLeaveChannel(stats);
L.eSw("onLeaveChannel退出頻道");
}
@Override
public void onLocalVideoStateChanged(Constants.VideoSourceType source, int state, int error) {
super.onLocalVideoStateChanged(source, state, error);
L.eSw("onLocalVideoStateChanged_source"+source+" state_"+state+" error_"+error);
}
};
private void refreshToken() {
LiveNetManager.get(mContext).getSwToken(new HttpCallback<SwTokenModel>() {
@Override
public void onSuccess(SwTokenModel data) {
if (mRtcEngine != null) {
mRtcEngine.renewToken(data.getToken());
}
L.eSw("refreshToken_重新获取Token成功");
}
@Override
public void onError(String error) {
}
});
}
public interface onRtcEngineListener {
void onOpenSuccess(String channel, int uid);
}
public void setPkContainer1(FrameLayout pkContainer1) {
this.pkContainer1 = pkContainer1;
}
public void setPkContainer2(FrameLayout pkContainer2) {
this.pkContainer2 = pkContainer2;
}
public void setPkContainer3(FrameLayout pkContainer3) {
this.pkContainer3 = pkContainer3;
}
public void setLinkUserContainer(FrameLayout linkUserContainer) {
this.linkUserContainer = linkUserContainer;
}
public static String getChannelName(String liveUid) {
return CommonAppConfig.SWChannelPrefix + liveUid;
}
}

View File

@@ -0,0 +1,607 @@
/*
* MIT License
*
* Copyright (c) 2023 Agora Community
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
*
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package io.agora.beautyapi.faceunity.utils;
import android.annotation.TargetApi;
import android.app.ActivityManager;
import android.content.Context;
import android.os.Build;
import android.text.TextUtils;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileFilter;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
public class FuDeviceUtils {
public static final String TAG = "FuDeviceUtils";
public static final int DEVICE_LEVEL_HIGH = 2;
public static final int DEVICE_LEVEL_MID = 1;
public static final int DEVICE_LEVEL_LOW = 0;
/**
* The default return value of any method in this class when an
* error occurs or when processing fails (Currently set to -1). Use this to check if
* the information about the device in question was successfully obtained.
*/
public static final int DEVICEINFO_UNKNOWN = -1;
private static final FileFilter CPU_FILTER = new FileFilter() {
@Override
public boolean accept(File pathname) {
String path = pathname.getName();
//regex is slow, so checking char by char.
if (path.startsWith("cpu")) {
for (int i = 3; i < path.length(); i++) {
if (!Character.isDigit(path.charAt(i))) {
return false;
}
}
return true;
}
return false;
}
};
/**
* Calculates the total RAM of the device through Android API or /proc/meminfo.
*
* @param c - Context object for current running activity.
* @return Total RAM that the device has, or DEVICEINFO_UNKNOWN = -1 in the event of an error.
*/
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
public static long getTotalMemory(Context c) {
// memInfo.totalMem not supported in pre-Jelly Bean APIs.
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
ActivityManager.MemoryInfo memInfo = new ActivityManager.MemoryInfo();
ActivityManager am = (ActivityManager) c.getSystemService(Context.ACTIVITY_SERVICE);
am.getMemoryInfo(memInfo);
if (memInfo != null) {
return memInfo.totalMem;
} else {
return DEVICEINFO_UNKNOWN;
}
} else {
long totalMem = DEVICEINFO_UNKNOWN;
try {
FileInputStream stream = new FileInputStream("/proc/meminfo");
try {
totalMem = parseFileForValue("MemTotal", stream);
totalMem *= 1024;
} finally {
stream.close();
}
} catch (IOException e) {
e.printStackTrace();
}
return totalMem;
}
}
/**
* Method for reading the clock speed of a CPU core on the device. Will read from either
* {@code /sys/devices/system/cpu/cpu0/cpufreq/cpuinfo_max_freq} or {@code /proc/cpuinfo}.
*
* @return Clock speed of a core on the device, or -1 in the event of an error.
*/
public static int getCPUMaxFreqKHz() {
int maxFreq = DEVICEINFO_UNKNOWN;
try {
for (int i = 0; i < getNumberOfCPUCores(); i++) {
String filename =
"/sys/devices/system/cpu/cpu" + i + "/cpufreq/cpuinfo_max_freq";
File cpuInfoMaxFreqFile = new File(filename);
if (cpuInfoMaxFreqFile.exists() && cpuInfoMaxFreqFile.canRead()) {
byte[] buffer = new byte[128];
FileInputStream stream = new FileInputStream(cpuInfoMaxFreqFile);
try {
stream.read(buffer);
int endIndex = 0;
//Trim the first number out of the byte buffer.
while (Character.isDigit(buffer[endIndex]) && endIndex < buffer.length) {
endIndex++;
}
String str = new String(buffer, 0, endIndex);
Integer freqBound = Integer.parseInt(str);
if (freqBound > maxFreq) {
maxFreq = freqBound;
}
} catch (NumberFormatException e) {
//Fall through and use /proc/cpuinfo.
} finally {
stream.close();
}
}
}
if (maxFreq == DEVICEINFO_UNKNOWN) {
FileInputStream stream = new FileInputStream("/proc/cpuinfo");
try {
int freqBound = parseFileForValue("cpu MHz", stream);
freqBound *= 1024; //MHz -> kHz
if (freqBound > maxFreq) maxFreq = freqBound;
} finally {
stream.close();
}
}
} catch (IOException e) {
maxFreq = DEVICEINFO_UNKNOWN; //Fall through and return unknown.
}
return maxFreq;
}
/**
* Reads the number of CPU cores from the first available information from
* {@code /sys/devices/system/cpu/possible}, {@code /sys/devices/system/cpu/present},
* then {@code /sys/devices/system/cpu/}.
*
* @return Number of CPU cores in the phone, or DEVICEINFO_UKNOWN = -1 in the event of an error.
*/
public static int getNumberOfCPUCores() {
if (Build.VERSION.SDK_INT <= Build.VERSION_CODES.GINGERBREAD_MR1) {
// Gingerbread doesn't support giving a single application access to both cores, but a
// handful of devices (Atrix 4G and Droid X2 for example) were released with a dual-core
// chipset and Gingerbread; that can let an app in the background run without impacting
// the foreground application. But for our purposes, it makes them single core.
return 1;
}
int cores;
try {
cores = getCoresFromFileInfo("/sys/devices/system/cpu/possible");
if (cores == DEVICEINFO_UNKNOWN) {
cores = getCoresFromFileInfo("/sys/devices/system/cpu/present");
}
if (cores == DEVICEINFO_UNKNOWN) {
cores = new File("/sys/devices/system/cpu/").listFiles(CPU_FILTER).length;
}
} catch (SecurityException e) {
cores = DEVICEINFO_UNKNOWN;
} catch (NullPointerException e) {
cores = DEVICEINFO_UNKNOWN;
}
return cores;
}
/**
* Tries to read file contents from the file location to determine the number of cores on device.
*
* @param fileLocation The location of the file with CPU information
* @return Number of CPU cores in the phone, or DEVICEINFO_UKNOWN = -1 in the event of an error.
*/
private static int getCoresFromFileInfo(String fileLocation) {
InputStream is = null;
try {
is = new FileInputStream(fileLocation);
BufferedReader buf = new BufferedReader(new InputStreamReader(is));
String fileContents = buf.readLine();
buf.close();
return getCoresFromFileString(fileContents);
} catch (IOException e) {
return DEVICEINFO_UNKNOWN;
} finally {
if (is != null) {
try {
is.close();
} catch (IOException e) {
// Do nothing.
}
}
}
}
/**
* Converts from a CPU core information format to number of cores.
*
* @param str The CPU core information string, in the format of "0-N"
* @return The number of cores represented by this string
*/
private static int getCoresFromFileString(String str) {
if (str == null || !str.matches("0-[\\d]+$")) {
return DEVICEINFO_UNKNOWN;
}
return Integer.valueOf(str.substring(2)) + 1;
}
/**
* Helper method for reading values from system files, using a minimised buffer.
*
* @param textToMatch - Text in the system files to read for.
* @param stream - FileInputStream of the system file being read from.
* @return A numerical value following textToMatch in specified the system file.
* -1 in the event of a failure.
*/
private static int parseFileForValue(String textToMatch, FileInputStream stream) {
byte[] buffer = new byte[1024];
try {
int length = stream.read(buffer);
for (int i = 0; i < length; i++) {
if (buffer[i] == '\n' || i == 0) {
if (buffer[i] == '\n') i++;
for (int j = i; j < length; j++) {
int textIndex = j - i;
//Text doesn't match query at some point.
if (buffer[j] != textToMatch.charAt(textIndex)) {
break;
}
//Text matches query here.
if (textIndex == textToMatch.length() - 1) {
return extractValue(buffer, j);
}
}
}
}
} catch (IOException e) {
//Ignore any exceptions and fall through to return unknown value.
} catch (NumberFormatException e) {
}
return DEVICEINFO_UNKNOWN;
}
/**
* Helper method used by {@link #parseFileForValue(String, FileInputStream) parseFileForValue}. Parses
* the next available number after the match in the file being read and returns it as an integer.
*
* @param index - The index in the buffer array to begin looking.
* @return The next number on that line in the buffer, returned as an int. Returns
* DEVICEINFO_UNKNOWN = -1 in the event that no more numbers exist on the same line.
*/
private static int extractValue(byte[] buffer, int index) {
while (index < buffer.length && buffer[index] != '\n') {
if (Character.isDigit(buffer[index])) {
int start = index;
index++;
while (index < buffer.length && Character.isDigit(buffer[index])) {
index++;
}
String str = new String(buffer, 0, start, index - start);
return Integer.parseInt(str);
}
index++;
}
return DEVICEINFO_UNKNOWN;
}
/**
* 获取当前剩余内存(ram)
*
* @param context
* @return
*/
public static long getAvailMemory(Context context) {
ActivityManager am = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
ActivityManager.MemoryInfo mi = new ActivityManager.MemoryInfo();
am.getMemoryInfo(mi);
return mi.availMem;
}
/**
* 获取厂商信息
*
* @return
*/
public static String getBrand() {
return Build.BRAND;
}
/**
* 获取手机机型
*
* @return
*/
public static String getModel() {
return Build.MODEL;
}
/**
* 获取硬件信息(cpu型号)
*
* @return
*/
public static String getHardWare() {
try {
FileReader fr = new FileReader("/proc/cpuinfo");
BufferedReader br = new BufferedReader(fr);
String text;
String last = "";
while ((text = br.readLine()) != null) {
last = text;
}
//一般机型的cpu型号都会在cpuinfo文件的最后一行
if (last.contains("Hardware")) {
String[] hardWare = last.split(":\\s+", 2);
return hardWare[1];
}
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
return Build.HARDWARE;
}
/**
* Level judgement based on current memory and CPU.
*
* @param context - Context object.
* @return
*/
public static int judgeDeviceLevel(Context context) {
int level;
//有一些设备不符合下述的判断规则,则走一个机型判断模式
int specialDevice = judgeDeviceLevelInDeviceName();
if (specialDevice >= 0) return specialDevice;
int ramLevel = judgeMemory(context);
int cpuLevel = judgeCPU();
if (ramLevel == 0 || ramLevel == 1 || cpuLevel == 0) {
level = DEVICE_LEVEL_LOW;
} else {
if (cpuLevel > 1) {
level = DEVICE_LEVEL_HIGH;
} else {
level = DEVICE_LEVEL_MID;
}
}
LogUtils.d(TAG,"DeviceLevel: " + level);
return level;
}
/**
* -1 不是特定的高低端机型
* @return
*/
private static int judgeDeviceLevelInDeviceName() {
String currentDeviceName = getDeviceName();
for (String deviceName:upscaleDevice) {
if (deviceName.equals(currentDeviceName)) {
return DEVICE_LEVEL_HIGH;
}
}
for (String deviceName:middleDevice) {
if (deviceName.equals(currentDeviceName)) {
return DEVICE_LEVEL_MID;
}
}
for (String deviceName:lowDevice) {
if (deviceName.equals(currentDeviceName)) {
return DEVICE_LEVEL_LOW;
}
}
return -1;
}
public static final String[] upscaleDevice = {"vivo X6S A","MHA-AL00","VKY-AL00","V1838A"};
public static final String[] lowDevice = {};
public static final String[] middleDevice = {"OPPO R11s","PAR-AL00","MI 8 Lite","ONEPLUS A6000","PRO 6","PRO 7 Plus"};
/**
* 评定内存的等级.
*
* @return
*/
private static int judgeMemory(Context context) {
long ramMB = getTotalMemory(context) / (1024 * 1024);
int level = -1;
if (ramMB <= 2000) { //2G或以下的最低档
level = 0;
} else if (ramMB <= 3000) { //2-3G
level = 1;
} else if (ramMB <= 4000) { //4G档 2018主流中端机
level = 2;
} else if (ramMB <= 6000) { //6G档 高端机
level = 3;
} else { //6G以上 旗舰机配置
level = 4;
}
return level;
}
/**
* 评定CPU等级.(按频率和厂商型号综合判断)
*
* @return
*/
private static int judgeCPU() {
int level = 0;
String cpuName = getHardWare();
int freqMHz = getCPUMaxFreqKHz() / 1024;
//一个不符合下述规律的高级白名单
//如果可以获取到CPU型号名称 -> 根据不同的名称走不同判定策略
if (!TextUtils.isEmpty(cpuName)) {
if (cpuName.contains("qcom") || cpuName.contains("Qualcomm")) { //高通骁龙
return judgeQualcommCPU(cpuName, freqMHz);
} else if (cpuName.contains("hi") || cpuName.contains("kirin")) { //海思麒麟
return judgeSkinCPU(cpuName, freqMHz);
} else if (cpuName.contains("MT")) {//联发科
return judgeMTCPU(cpuName, freqMHz);
}
}
//cpu型号无法获取的普通规则
if (freqMHz <= 1600) { //1.5G 低端
level = 0;
} else if (freqMHz <= 1950) { //2GHz 低中端
level = 1;
} else if (freqMHz <= 2500) { //2.2 2.3g 中高端
level = 2;
} else { //高端
level = 3;
}
return level;
}
/**
* 联发科芯片等级判定
*
* @return
*/
private static int judgeMTCPU(String cpuName, int freqMHz) {
//P60之前的全是低端机 MT6771V/C
int level = 0;
int mtCPUVersion = getMTCPUVersion(cpuName);
if (mtCPUVersion == -1) {
//读取不出version 按照一个比较严格的方式来筛选出高端机
if (freqMHz <= 1600) { //1.5G 低端
level = 0;
} else if (freqMHz <= 2200) { //2GHz 低中端
level = 1;
} else if (freqMHz <= 2700) { //2.2 2.3g 中高端
level = 2;
} else { //高端
level = 3;
}
} else if (mtCPUVersion < 6771) {
//均为中低端机
if (freqMHz <= 1600) { //1.5G 低端
level = 0;
} else { //2GHz 中端
level = 1;
}
} else {
if (freqMHz <= 1600) { //1.5G 低端
level = 0;
} else if (freqMHz <= 1900) { //2GHz 低中端
level = 1;
} else if (freqMHz <= 2500) { //2.2 2.3g 中高端
level = 2;
} else { //高端
level = 3;
}
}
return level;
}
/**
* 通过联发科CPU型号定义 -> 获取cpu version
*
* @param cpuName
* @return
*/
private static int getMTCPUVersion(String cpuName) {
//截取MT后面的四位数字
int cpuVersion = -1;
if (cpuName.length() > 5) {
String cpuVersionStr = cpuName.substring(2, 6);
try {
cpuVersion = Integer.valueOf(cpuVersionStr);
} catch (NumberFormatException exception) {
exception.printStackTrace();
}
}
return cpuVersion;
}
/**
* 高通骁龙芯片等级判定
*
* @return
*/
private static int judgeQualcommCPU(String cpuName, int freqMHz) {
int level = 0;
//xxxx inc MSM8937 比较老的芯片
//7 8 xxx inc SDM710
if (cpuName.contains("MSM")) {
//老芯片
if (freqMHz <= 1600) { //1.5G 低端
level = 0;
} else { //2GHz 低中端
level = 1;
}
} else {
//新的芯片
if (freqMHz <= 1600) { //1.5G 低端
level = 0;
} else if (freqMHz <= 2000) { //2GHz 低中端
level = 1;
} else if (freqMHz <= 2500) { //2.2 2.3g 中高端
level = 2;
} else { //高端
level = 3;
}
}
return level;
}
/**
* 麒麟芯片等级判定
*
* @param freqMHz
* @return
*/
private static int judgeSkinCPU(String cpuName, int freqMHz) {
//型号 -> kirin710之后 & 最高核心频率
int level = 0;
if (cpuName.startsWith("hi")) {
//这个是海思的芯片中低端
if (freqMHz <= 1600) { //1.5G 低端
level = 0;
} else if (freqMHz <= 2000) { //2GHz 低中端
level = 1;
}
} else {
//这个是海思麒麟的芯片
if (freqMHz <= 1600) { //1.5G 低端
level = 0;
} else if (freqMHz <= 2000) { //2GHz 低中端
level = 1;
} else if (freqMHz <= 2500) { //2.2 2.3g 中高端
level = 2;
} else { //高端
level = 3;
}
}
return level;
}
public static final String Nexus_6P = "Nexus 6P";
/**
* 获取设备名
*
* @return
*/
public static String getDeviceName() {
String deviceName = "";
if (Build.MODEL != null) deviceName = Build.MODEL;
LogUtils.e(TAG,"deviceName: " + deviceName);
return deviceName;
}
}

View File

@@ -0,0 +1,57 @@
/*
* MIT License
*
* Copyright (c) 2023 Agora Community
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package io.agora.beautyapi.faceunity.utils
import io.agora.base.internal.Logging
object LogUtils {
private const val beautyType = "FaceUnity"
@JvmStatic
fun i(tag: String, content: String, vararg args: Any) {
val consoleMessage = "[BeautyAPI][$beautyType] : ${String.format(content, args)}"
Logging.log(Logging.Severity.LS_INFO, tag, consoleMessage)
}
@JvmStatic
fun d(tag: String, content: String, vararg args: Any) {
val consoleMessage = "[BeautyAPI][$beautyType] : ${String.format(content, args)}"
Logging.d(tag, consoleMessage)
}
@JvmStatic
fun w(tag: String, content: String, vararg args: Any){
val consoleMessage = "[BeautyAPI][$beautyType] : ${String.format(content, args)}"
Logging.w(tag, consoleMessage)
}
@JvmStatic
fun e(tag: String, content: String, vararg args: Any){
val consoleMessage = "[BeautyAPI][$beautyType] : ${String.format(content, args)}"
Logging.e(tag, consoleMessage)
}
}

View File

@@ -0,0 +1,80 @@
/*
* MIT License
*
* Copyright (c) 2023 Agora Community
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package io.agora.beautyapi.faceunity.utils
import android.os.Handler
import android.os.Looper
import io.agora.beautyapi.faceunity.BeautyStats
import kotlin.math.max
import kotlin.math.min
class StatsHelper(
private val statsDuration: Long,
private val onStatsChanged: (BeautyStats) -> Unit
) {
private val mMainHandler = Handler(Looper.getMainLooper())
private var mStartTime = 0L
private var mCostList = mutableListOf<Long>()
private var mCostMax = 0L
private var mCostMin = Long.MAX_VALUE
fun once(cost: Long) {
val curr = System.currentTimeMillis()
if (mStartTime == 0L) {
mStartTime = curr
} else if (curr - mStartTime >= statsDuration) {
mStartTime = curr
var total = 0L
mCostList.forEach {
total += it
}
val average = total / mCostList.size
val costMin = mCostMin
val costMax = mCostMax
mMainHandler.post {
onStatsChanged.invoke(BeautyStats(costMin, costMax, average))
}
mCostList.clear()
mCostMax = 0L
mCostMin = Long.MAX_VALUE
}
mCostList.add(cost)
mCostMax = max(mCostMax, cost)
mCostMin = min(mCostMin, cost)
}
fun reset() {
mMainHandler.removeCallbacksAndMessages(null)
mStartTime = 0
mCostList.clear()
mCostMax = 0L
mCostMin = Long.MAX_VALUE
}
}

View File

@@ -0,0 +1,210 @@
/*
* MIT License
*
* Copyright (c) 2023 Agora Community
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package io.agora.beautyapi.faceunity.utils.egl;
import static android.opengl.EGL14.EGL_CONTEXT_CLIENT_VERSION;
import android.opengl.GLDebugHelper;
import javax.microedition.khronos.egl.EGL10;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.egl.EGLContext;
import javax.microedition.khronos.egl.EGLDisplay;
import javax.microedition.khronos.egl.EGLSurface;
import io.agora.beautyapi.faceunity.utils.LogUtils;
public class EGLContextHelper {
private static final String DEBUG_TAG = "EGLContextManager";
private final int mRedSize = 8;
private final int mGreenSize = 8;
private final int mBlueSize = 8;
private final int mAlphaSize = 0;
private final int mDepthSize = 16;
private final int mStencilSize = 0;
private final int mRenderType = 4;
public EGLContextHelper(){}
public void initEGL(EGLContext shareContext) throws Exception {
mEGL = (EGL10) GLDebugHelper.wrap(EGLContext.getEGL(),
GLDebugHelper.CONFIG_CHECK_GL_ERROR
| GLDebugHelper.CONFIG_CHECK_THREAD, null);
if (mEGL == null) {
throw new Exception("Couldn't get EGL");
}
mGLDisplay = mEGL.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
if (mGLDisplay == null) {
throw new Exception("Couldn't get display for GL");
}
int[] curGLVersion = new int[2];
mEGL.eglInitialize(mGLDisplay, curGLVersion);
LogUtils.i(DEBUG_TAG, "GL version = " + curGLVersion[0] + "."
+ curGLVersion[1]);
int[] num_config = new int[1];
if(!mEGL.eglChooseConfig(mGLDisplay, mConfigSpec, null, 1,
num_config)){
throw new IllegalArgumentException("eglChooseConfig failed");
}
int numConfigs = num_config[0];
if (numConfigs <= 0) {
throw new IllegalArgumentException(
"No configs match configSpec");
}
EGLConfig[] configs = new EGLConfig[numConfigs];
if (!mEGL.eglChooseConfig(mGLDisplay, mConfigSpec, configs, numConfigs,
num_config)) {
throw new IllegalArgumentException("eglChooseConfig#2 failed");
}
mGLConfig = chooseConfig(mEGL, mGLDisplay, configs);
if (mGLConfig == null) {
mGLConfig = configs[0];
}
int[] surfaceAttribs = {
EGL10.EGL_WIDTH, 1,
EGL10.EGL_HEIGHT, 1,
EGL10.EGL_NONE
};
mGLSurface = mEGL.eglCreatePbufferSurface(mGLDisplay, mGLConfig, surfaceAttribs);
if (mGLSurface == null) {
throw new Exception("Couldn't create new surface");
}
int[] attrib_list = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE};
mGLContext = mEGL.eglCreateContext(mGLDisplay, mGLConfig,
shareContext, attrib_list);
if (mGLContext == null) {
throw new Exception("Couldn't create new context");
}
// if (!mEGL.eglMakeCurrent(mGLDisplay, mGLSurface, mGLSurface, mGLContext)) {
// throw new Exception("Failed to eglMakeCurrent");
// }
}
public EGLContext getEGLContext() {
return mGLContext;
}
public EGLDisplay getGLDisplay() {
return mGLDisplay;
}
public EGLConfig getGLConfig() {
return mGLConfig;
}
public EGLSurface getGLSurface() {
return mGLSurface;
}
public EGL10 getEGL() {
return mEGL;
}
EGL10 mEGL;
EGLDisplay mGLDisplay;
EGLConfig mGLConfig;
EGLSurface mGLSurface;
EGLContext mGLContext;
int[] mConfigSpec = new int[]{
EGL10.EGL_RED_SIZE, mRedSize,
EGL10.EGL_GREEN_SIZE, mGreenSize,
EGL10.EGL_BLUE_SIZE, mBlueSize,
EGL10.EGL_ALPHA_SIZE, mAlphaSize,
EGL10.EGL_DEPTH_SIZE, mDepthSize,
EGL10.EGL_STENCIL_SIZE, mStencilSize,
EGL10.EGL_RENDERABLE_TYPE, mRenderType,//egl版本 2.0
EGL10.EGL_NONE};
public void release() {
mEGL.eglMakeCurrent(mGLDisplay, EGL10.EGL_NO_SURFACE,
EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT);
mEGL.eglDestroySurface(mGLDisplay, mGLSurface);
mEGL.eglDestroyContext(mGLDisplay, mGLContext);
mEGL.eglTerminate(mGLDisplay);
LogUtils.i(DEBUG_TAG, "GL Cleaned up");
}
public boolean eglMakeCurrent(){
if(mGLContext == EGL10.EGL_NO_CONTEXT){
return false;
}else{
return mEGL.eglMakeCurrent(mGLDisplay, mGLSurface, mGLSurface, mGLContext);
}
}
public boolean eglMakeNoCurrent(){
return mEGL.eglMakeCurrent(mGLDisplay, EGL10.EGL_NO_SURFACE,
EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT);
}
private EGLConfig chooseConfig(EGL10 egl, EGLDisplay display,
EGLConfig[] configs) {
for (EGLConfig config : configs) {
int d = findConfigAttrib(egl, display, config,
EGL10.EGL_DEPTH_SIZE, 0);
int s = findConfigAttrib(egl, display, config,
EGL10.EGL_STENCIL_SIZE, 0);
if ((d >= mDepthSize) && (s >= mStencilSize)) {
int r = findConfigAttrib(egl, display, config,
EGL10.EGL_RED_SIZE, 0);
int g = findConfigAttrib(egl, display, config,
EGL10.EGL_GREEN_SIZE, 0);
int b = findConfigAttrib(egl, display, config,
EGL10.EGL_BLUE_SIZE, 0);
int a = findConfigAttrib(egl, display, config,
EGL10.EGL_ALPHA_SIZE, 0);
if ((r == mRedSize) && (g == mGreenSize)
&& (b == mBlueSize) && (a == mAlphaSize)) {
return config;
}
}
}
return null;
}
private int findConfigAttrib(EGL10 egl, EGLDisplay display,
EGLConfig config, int attribute, int defaultValue) {
int[] value = new int[1];
if (egl.eglGetConfigAttrib(display, config, attribute, value)) {
return value[0];
}
return defaultValue;
}
}

View File

@@ -0,0 +1,84 @@
/*
* MIT License
*
* Copyright (c) 2023 Agora Community
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package io.agora.beautyapi.faceunity.utils.egl;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.GLES30;
public class GLCopyHelper {
private final int bufferCount;
public GLCopyHelper(){
this(1);
}
public GLCopyHelper(int bufferCount){
this.bufferCount = bufferCount;
}
private int[] mDstFrameBuffer;
private int[] mSrcFrameBuffer;
public void copy2DTextureToOesTexture(
int srcTexture,
int dstTexture,
int width, int height,
int index){
if(mDstFrameBuffer == null){
mDstFrameBuffer = new int[bufferCount];
GLES20.glGenFramebuffers(bufferCount, mDstFrameBuffer, 0);
}
if(mSrcFrameBuffer == null){
mSrcFrameBuffer = new int[bufferCount];
GLES20.glGenFramebuffers(bufferCount, mSrcFrameBuffer, 0);
}
GLES30.glBindFramebuffer(GLES30.GL_READ_FRAMEBUFFER, mSrcFrameBuffer[index]);
GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, srcTexture);
GLES30.glFramebufferTexture2D(GLES30.GL_READ_FRAMEBUFFER, GLES30.GL_COLOR_ATTACHMENT0, GLES30.GL_TEXTURE_2D, srcTexture, 0);
GLES30.glBindFramebuffer(GLES30.GL_DRAW_FRAMEBUFFER, mDstFrameBuffer[index]);
GLES30.glFramebufferTexture2D(GLES30.GL_DRAW_FRAMEBUFFER,
GLES30.GL_COLOR_ATTACHMENT0, GLES11Ext.GL_TEXTURE_EXTERNAL_OES, dstTexture, 0);
GLES30.glBlitFramebuffer(0, 0, width, height, 0, 0, width, height, GLES30.GL_COLOR_BUFFER_BIT, GLES30.GL_LINEAR);
GLES30.glBindFramebuffer(GLES30.GL_DRAW_FRAMEBUFFER, 0);
GLES30.glBindFramebuffer(GLES30.GL_READ_FRAMEBUFFER, 0);
GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, 0);
GLES30.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
}
public void release(){
if(mDstFrameBuffer != null){
GLES20.glDeleteFramebuffers(mDstFrameBuffer.length, mDstFrameBuffer, 0);
mDstFrameBuffer = null;
}
if(mSrcFrameBuffer != null){
GLES20.glDeleteFramebuffers(mSrcFrameBuffer.length, mSrcFrameBuffer, 0);
mSrcFrameBuffer = null;
}
}
}

View File

@@ -0,0 +1,204 @@
package io.agora.beautyapi.faceunity.utils.egl;
import android.graphics.Matrix;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import io.agora.base.internal.video.EglBase;
import io.agora.base.internal.video.GlRectDrawer;
import io.agora.base.internal.video.RendererCommon;
public class GLFrameBuffer {
private int mFramebufferId = -1;
private int mTextureId = -1;
private int mWidth, mHeight, mRotation;
private boolean isFlipV, isFlipH, isTextureInner, isTextureChanged, isSizeChanged;
private RendererCommon.GlDrawer drawer;
private float[] mTexMatrix = GLUtils.IDENTITY_MATRIX;
public GLFrameBuffer() {
}
public boolean setSize(int width, int height) {
if (mWidth != width || mHeight != height) {
mWidth = width;
mHeight = height;
isSizeChanged = true;
return true;
}
return false;
}
public void setRotation(int rotation) {
if (mRotation != rotation) {
mRotation = rotation;
}
}
public void setFlipV(boolean flipV) {
if (isFlipV != flipV) {
isFlipV = flipV;
}
}
public void setFlipH(boolean flipH) {
if (isFlipH != flipH) {
isFlipH = flipH;
}
}
public void setTextureId(int textureId){
if(mTextureId != textureId){
deleteTexture();
mTextureId = textureId;
isTextureChanged = true;
}
}
public int getTextureId(){
return mTextureId;
}
public void setTexMatrix(float[] matrix) {
if (matrix != null) {
mTexMatrix = matrix;
} else {
mTexMatrix = GLUtils.IDENTITY_MATRIX;
}
}
public void resetTransform(){
mTexMatrix = GLUtils.IDENTITY_MATRIX;
isFlipH = isFlipV = false;
mRotation = 0;
}
public int process(int textureId, int textureType) {
if (mWidth <= 0 && mHeight <= 0) {
throw new RuntimeException("setSize firstly!");
}
if(mTextureId == -1){
mTextureId = createTexture(mWidth, mHeight);
bindFramebuffer(mTextureId);
isTextureInner = true;
}else if(isTextureInner && isSizeChanged){
GLES20.glDeleteTextures(1, new int[]{mTextureId}, 0);
mTextureId = createTexture(mWidth, mHeight);
bindFramebuffer(mTextureId);
}else if(isTextureChanged){
bindFramebuffer(mTextureId);
}
isTextureChanged = false;
isSizeChanged = false;
if(drawer == null){
drawer = new GlRectDrawer();
}
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, mFramebufferId);
GLUtils.checkGlError("glBindFramebuffer");
Matrix transform = RendererCommon.convertMatrixToAndroidGraphicsMatrix(mTexMatrix);
transform.preTranslate(0.5f, 0.5f);
transform.preRotate(mRotation, 0.f, 0.f);
transform.preScale(
isFlipH ? -1.f: 1.f,
isFlipV ? -1.f: 1.f
);
transform.preTranslate(-0.5f, -0.5f);
float[] matrix = RendererCommon.convertMatrixFromAndroidGraphicsMatrix(transform);
synchronized (EglBase.lock){
if(textureType == GLES11Ext.GL_TEXTURE_EXTERNAL_OES){
drawer.drawOes(textureId, matrix, mWidth, mHeight, 0, 0, mWidth, mHeight);
}else{
drawer.drawRgb(textureId, matrix, mWidth, mHeight, 0, 0, mWidth, mHeight);
}
}
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, GLES20.GL_NONE);
GLES20.glFinish();
return mTextureId;
}
public void release(){
deleteTexture();
deleteFramebuffer();
if(drawer != null){
drawer.release();
drawer = null;
}
}
private void deleteFramebuffer() {
if (mFramebufferId != -1) {
GLES20.glDeleteFramebuffers(1, new int[]{mFramebufferId}, 0);
mFramebufferId = -1;
}
}
public int createTexture(int width, int height){
int[] textures = new int[1];
GLES20.glGenTextures(1, textures, 0);
GLUtils.checkGlError("glGenTextures");
int textureId = textures[0];
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0,
GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, GLES20.GL_NONE);
return textureId;
}
public void resizeTexture(int textureId, int width, int height) {
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0,
GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, GLES20.GL_NONE);
}
private void deleteTexture() {
if (isTextureInner && mTextureId != -1) {
GLES20.glDeleteTextures(1, new int[]{mTextureId}, 0);
}
isTextureInner = false;
mTextureId = -1;
}
private void bindFramebuffer(int textureId) {
if(mFramebufferId == -1){
int[] framebuffers = new int[1];
GLES20.glGenFramebuffers(1, framebuffers, 0);
GLUtils.checkGlError("glGenFramebuffers");
mFramebufferId = framebuffers[0];
}
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, mFramebufferId);
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER,
GLES20.GL_COLOR_ATTACHMENT0,
GLES20.GL_TEXTURE_2D,
textureId, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, GLES20.GL_NONE);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, GLES20.GL_NONE);
}
}

View File

@@ -0,0 +1,180 @@
/*
* MIT License
*
* Copyright (c) 2023 Agora Community
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package io.agora.beautyapi.faceunity.utils.egl
import android.opengl.GLES20
import android.util.Log
import android.util.Size
import java.util.concurrent.ConcurrentLinkedQueue
class GLTextureBufferQueue(
private val glFrameBuffer: GLFrameBuffer = GLFrameBuffer(),
private val cacheCount: Int = 6,
private val loggable: Boolean = false
) {
private val TAG = "GLTextureBufferQueue"
private var cacheIndex = 0
private val cacheTextureOuts = arrayOfNulls<TextureOut>(cacheCount)
private val textureIdQueue = ConcurrentLinkedQueue<TextureOut>()
fun enqueue(iN: TextureIn): Int {
var size = textureIdQueue.size
if (size < cacheCount) {
var out = cacheTextureOuts[cacheIndex]
val outSize = when (iN.rotation) {
90, 270 -> Size(iN.height, iN.width)
else -> Size(iN.width, iN.height)
}
if (out == null) {
val textureId = glFrameBuffer.createTexture(outSize.width, outSize.height)
out = TextureOut(
0,
textureId,
GLES20.GL_TEXTURE_2D,
outSize.width,
outSize.height,
iN.isFrontCamera,
iN.isMirror,
)
cacheTextureOuts[cacheIndex] = out
} else if (out.width != outSize.width || out.height != outSize.height) {
glFrameBuffer.resizeTexture(out.textureId, outSize.width, outSize.height)
out = TextureOut(
0,
out.textureId,
out.textureType,
outSize.width,
outSize.height,
iN.isFrontCamera,
iN.isMirror,
)
cacheTextureOuts[cacheIndex] = out
} else if(out.isFrontCamera != iN.isFrontCamera){
out = TextureOut(
0,
out.textureId,
out.textureType,
out.width,
out.height,
iN.isFrontCamera,
iN.isMirror,
)
cacheTextureOuts[cacheIndex] = out
}
glFrameBuffer.textureId = out.textureId
glFrameBuffer.setSize(out.width, out.height)
glFrameBuffer.resetTransform()
glFrameBuffer.setRotation(iN.rotation)
if (iN.transform != null) {
glFrameBuffer.setTexMatrix(iN.transform)
var flipH = iN.isFrontCamera
if(iN.isMirror){
flipH = !flipH
}
glFrameBuffer.setFlipH(flipH)
} else {
var flipH = !iN.isFrontCamera
if(iN.isMirror){
flipH = !flipH
}
glFrameBuffer.setFlipH(flipH)
}
glFrameBuffer.setFlipV(iN.flipV)
glFrameBuffer.process(iN.textureId, iN.textureType)
out.index = cacheIndex
out.tag = iN.tag
textureIdQueue.offer(out)
if(loggable){
Log.d(TAG, "TextureIdQueue enqueue index=$cacheIndex, size=$size")
}
cacheIndex = (cacheIndex + 1) % cacheCount
size++
} else {
if(loggable){
Log.e(TAG, "TextureIdQueue is full!!")
}
}
return size
}
fun dequeue(remove: Boolean = true): TextureOut? {
val size = textureIdQueue.size
val poll = if(remove){
textureIdQueue.poll()
}else{
textureIdQueue.peek()
}
if(loggable){
Log.d(TAG, "TextureIdQueue dequeue index=${poll?.index}, size=$size")
}
return poll
}
fun reset() {
cacheIndex = 0
textureIdQueue.clear()
}
fun release() {
cacheIndex = 0
cacheTextureOuts.forEachIndexed { index, textureOut ->
if (textureOut != null) {
GLES20.glDeleteTextures(1, intArrayOf(textureOut.textureId), 0)
cacheTextureOuts[index] = null
}
}
textureIdQueue.clear()
glFrameBuffer.release()
}
data class TextureIn(
val textureId: Int,
val textureType: Int,
val width: Int,
val height: Int,
val rotation: Int,
val flipV: Boolean,
val isFrontCamera: Boolean,
val isMirror: Boolean,
val transform: FloatArray?,
val tag: Any? = null
)
data class TextureOut(
var index: Int = 0,
val textureId: Int,
val textureType: Int,
val width: Int,
val height: Int,
val isFrontCamera: Boolean,
var tag: Any? = null
)
}

View File

@@ -0,0 +1,279 @@
/*
* MIT License
*
* Copyright (c) 2023 Agora Community
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package io.agora.beautyapi.faceunity.utils.egl;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.ImageFormat;
import android.graphics.Rect;
import android.graphics.YuvImage;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.Matrix;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.IntBuffer;
import java.util.Objects;
import javax.microedition.khronos.egl.EGL10;
import javax.microedition.khronos.egl.EGLContext;
import io.agora.beautyapi.faceunity.utils.LogUtils;
public class GLUtils {
private static final String TAG = "GLUtils";
public static final float[] IDENTITY_MATRIX = new float[16];
static {
Matrix.setIdentityM(IDENTITY_MATRIX, 0);
}
private GLUtils() {
}
public static Bitmap getTexture2DImage(int textureID, int width, int height) {
try {
int[] oldFboId = new int[1];
GLES20.glGetIntegerv(GLES20.GL_FRAMEBUFFER_BINDING, IntBuffer.wrap(oldFboId));
int[] framebuffers = new int[1];
GLES20.glGenFramebuffers(1, framebuffers, 0);
int framebufferId = framebuffers[0];
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, framebufferId);
int[] renderbuffers = new int[1];
GLES20.glGenRenderbuffers(1, renderbuffers, 0);
int renderId = renderbuffers[0];
GLES20.glBindRenderbuffer(GLES20.GL_RENDERBUFFER, renderId);
GLES20.glRenderbufferStorage(GLES20.GL_RENDERBUFFER, GLES20.GL_DEPTH_COMPONENT16, width, height);
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, textureID, 0);
GLES20.glFramebufferRenderbuffer(GLES20.GL_FRAMEBUFFER, GLES20.GL_DEPTH_ATTACHMENT, GLES20.GL_RENDERBUFFER, renderId);
if (GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER) != GLES20.GL_FRAMEBUFFER_COMPLETE) {
LogUtils.e(TAG, "Framebuffer error");
}
ByteBuffer rgbaBuf = ByteBuffer.allocateDirect(width * height * 4);
rgbaBuf.position(0);
GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaBuf);
Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
bitmap.copyPixelsFromBuffer(rgbaBuf);
GLES20.glDeleteRenderbuffers(1, IntBuffer.wrap(framebuffers));
GLES20.glDeleteFramebuffers(1, IntBuffer.allocate(framebufferId));
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, oldFboId[0]);
return bitmap;
} catch (Exception e) {
LogUtils.e(TAG, e.toString());
}
return null;
}
public static Bitmap getTextureOESImage(int textureID, int width, int height) {
try {
int[] oldFboId = new int[1];
GLES20.glGetIntegerv(GLES20.GL_FRAMEBUFFER_BINDING, IntBuffer.wrap(oldFboId));
int[] framebuffers = new int[1];
GLES20.glGenFramebuffers(1, framebuffers, 0);
int framebufferId = framebuffers[0];
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, framebufferId);
int[] renderbuffers = new int[1];
GLES20.glGenRenderbuffers(1, renderbuffers, 0);
int renderId = renderbuffers[0];
GLES20.glBindRenderbuffer(GLES20.GL_RENDERBUFFER, renderId);
GLES20.glRenderbufferStorage(GLES20.GL_RENDERBUFFER, GLES20.GL_DEPTH_COMPONENT16, width, height);
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureID, 0);
GLES20.glFramebufferRenderbuffer(GLES20.GL_FRAMEBUFFER, GLES20.GL_DEPTH_ATTACHMENT, GLES20.GL_RENDERBUFFER, renderId);
if (GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER) != GLES20.GL_FRAMEBUFFER_COMPLETE) {
LogUtils.e(TAG, "Framebuffer error");
}
ByteBuffer rgbaBuf = ByteBuffer.allocateDirect(width * height * 4);
rgbaBuf.position(0);
GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaBuf);
Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
bitmap.copyPixelsFromBuffer(rgbaBuf);
GLES20.glDeleteRenderbuffers(1, IntBuffer.wrap(framebuffers));
GLES20.glDeleteFramebuffers(1, IntBuffer.allocate(framebufferId));
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, oldFboId[0]);
return bitmap;
} catch (Exception e) {
LogUtils.e(TAG, e.toString());
}
return null;
}
public static Bitmap nv21ToBitmap(byte[] nv21, int width, int height) {
Bitmap bitmap = null;
try {
YuvImage image = new YuvImage(nv21, ImageFormat.NV21, width, height, null);
ByteArrayOutputStream stream = new ByteArrayOutputStream();
image.compressToJpeg(new Rect(0, 0, width, height), 80, stream);
bitmap = BitmapFactory.decodeByteArray(stream.toByteArray(), 0, stream.size());
stream.close();
} catch (IOException e) {
e.printStackTrace();
}
return bitmap;
}
private static Bitmap readBitmap(int width, int height) {
ByteBuffer rgbaBuf = ByteBuffer.allocateDirect(width * height * 4);
rgbaBuf.position(0);
GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaBuf);
Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
bitmap.copyPixelsFromBuffer(rgbaBuf);
return bitmap;
}
public static float[] createTransformMatrix(int rotation, boolean flipH, boolean flipV) {
float[] renderMVPMatrix = new float[16];
float[] tmp = new float[16];
Matrix.setIdentityM(tmp, 0);
boolean _flipH = flipH;
boolean _flipV = flipV;
if (rotation % 180 != 0) {
_flipH = flipV;
_flipV = flipH;
}
if (_flipH) {
Matrix.rotateM(tmp, 0, tmp, 0, 180, 0, 1f, 0);
}
if (_flipV) {
Matrix.rotateM(tmp, 0, tmp, 0, 180, 1f, 0f, 0);
}
float _rotation = rotation;
if (_rotation != 0) {
if (_flipH != _flipV) {
_rotation *= -1;
}
Matrix.rotateM(tmp, 0, tmp, 0, _rotation, 0, 0, 1);
}
Matrix.setIdentityM(renderMVPMatrix, 0);
Matrix.multiplyMM(renderMVPMatrix, 0, tmp, 0, renderMVPMatrix, 0);
return renderMVPMatrix;
}
public static EGLContext getCurrGLContext() {
EGL10 egl = (EGL10) javax.microedition.khronos.egl.EGLContext.getEGL();
if (egl != null && !Objects.equals(egl.eglGetCurrentContext(), EGL10.EGL_NO_CONTEXT)) {
return egl.eglGetCurrentContext();
}
return null;
}
public static void checkGlError(String op) {
int error = GLES20.glGetError();
if (error != GLES20.GL_NO_ERROR) {
String msg = op + ": glError 0x" + Integer.toHexString(error);
LogUtils.e(TAG, msg);
throw new RuntimeException(msg);
}
}
public static int createProgram(String vertexSource, String fragmentSource) {
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
if (vertexShader == 0) {
return 0;
}
int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
if (pixelShader == 0) {
return 0;
}
int program = GLES20.glCreateProgram();
checkGlError("glCreateProgram");
if (program == 0) {
LogUtils.e(TAG, "Could not create program");
}
GLES20.glAttachShader(program, vertexShader);
checkGlError("glAttachShader");
GLES20.glAttachShader(program, pixelShader);
checkGlError("glAttachShader");
GLES20.glLinkProgram(program);
int[] linkStatus = new int[1];
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] != GLES20.GL_TRUE) {
LogUtils.e(TAG, "Could not link program: ");
LogUtils.e(TAG, GLES20.glGetProgramInfoLog(program));
GLES20.glDeleteProgram(program);
program = 0;
}
return program;
}
public static int loadShader(int shaderType, String source) {
int shader = GLES20.glCreateShader(shaderType);
checkGlError("glCreateShader type=" + shaderType);
GLES20.glShaderSource(shader, source);
GLES20.glCompileShader(shader);
int[] compiled = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == 0) {
LogUtils.e(TAG, "Could not compile shader " + shaderType + ":");
LogUtils.e(TAG, " " + GLES20.glGetShaderInfoLog(shader));
GLES20.glDeleteShader(shader);
shader = 0;
}
return shader;
}
public static int createTexture(int textureTarget, Bitmap bitmap, int minFilter,
int magFilter, int wrapS, int wrapT) {
int[] textureHandle = new int[1];
GLES20.glGenTextures(1, textureHandle, 0);
checkGlError("glGenTextures");
GLES20.glBindTexture(textureTarget, textureHandle[0]);
checkGlError("glBindTexture " + textureHandle[0]);
GLES20.glTexParameterf(textureTarget, GLES20.GL_TEXTURE_MIN_FILTER, minFilter);
GLES20.glTexParameterf(textureTarget, GLES20.GL_TEXTURE_MAG_FILTER, magFilter); //线性插值
GLES20.glTexParameteri(textureTarget, GLES20.GL_TEXTURE_WRAP_S, wrapS);
GLES20.glTexParameteri(textureTarget, GLES20.GL_TEXTURE_WRAP_T, wrapT);
if (bitmap != null) {
android.opengl.GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);
}
checkGlError("glTexParameter");
return textureHandle[0];
}
}

View File

@@ -0,0 +1,214 @@
/*
* MIT License
*
* Copyright (c) 2023 Agora Community
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package io.agora.beautyapi.faceunity.utils.egl
import android.opengl.GLES20
import io.agora.beautyapi.faceunity.utils.LogUtils
import java.util.concurrent.Callable
import java.util.concurrent.ConcurrentLinkedQueue
import java.util.concurrent.CountDownLatch
import java.util.concurrent.Executors
import java.util.concurrent.Future
import javax.microedition.khronos.egl.EGLContext
class TextureProcessHelper(
private val cacheCount: Int = 2
) {
private val TAG = "TextureProcessHelper"
private val glTextureBufferQueueIn = GLTextureBufferQueue(cacheCount = cacheCount, loggable = true)
private val glTextureBufferQueueOut = GLTextureBufferQueue(cacheCount = cacheCount, loggable = false)
private val glFrameBuffer = GLFrameBuffer()
private val futureQueue = ConcurrentLinkedQueue<Future<Int>>()
private val workerThread = Executors.newSingleThreadExecutor()
private val eglContextHelper =
EGLContextHelper()
private var eglContextBase: EGLContext? = null
private var isReleased = false
private var filter: ((GLTextureBufferQueue.TextureOut) -> Int)? = null
private var isBegin = false
private var frameIndex = 0
fun setFilter(filter: (GLTextureBufferQueue.TextureOut) -> Int) {
this.filter = filter
}
fun process(
texId: Int, texType: Int,
width: Int, height: Int, rotation: Int,
transform: FloatArray,
isFrontCamera: Boolean,
isMirror: Boolean
): Int {
if (isReleased) {
return -1
}
val currGLContext = GLUtils.getCurrGLContext() ?: return -1
if (eglContextBase == null) {
eglContextBase = currGLContext
executeSync {
eglContextHelper.initEGL(eglContextBase)
eglContextHelper.eglMakeCurrent()
}
} else if (eglContextBase != currGLContext) {
eglContextBase = currGLContext
executeSync {
eglContextHelper.release()
eglContextHelper.initEGL(eglContextBase)
eglContextHelper.eglMakeCurrent()
}
}
glTextureBufferQueueIn.enqueue(
GLTextureBufferQueue.TextureIn(
texId,
texType,
width,
height,
rotation,
false,
isFrontCamera,
isMirror,
transform,
frameIndex
)
)
frameIndex ++
if (isReleased) {
return -1
}
futureQueue.offer(workerThread.submit(Callable {
if (isReleased) {
return@Callable -2
}
val frame = glTextureBufferQueueIn.dequeue(false) ?: return@Callable -2
val filterTexId = filter?.invoke(frame) ?: -1
if (filterTexId >= 0) {
glTextureBufferQueueOut.enqueue(
GLTextureBufferQueue.TextureIn(
filterTexId,
GLES20.GL_TEXTURE_2D,
frame.width,
frame.height,
0,
false,
false,
true,
null,
frame.tag
)
)
} else {
glTextureBufferQueueOut.enqueue(
GLTextureBufferQueue.TextureIn(
frame.textureId,
frame.textureType,
frame.width,
frame.height,
0,
false,
false,
true,
null,
frame.tag
)
)
}
glTextureBufferQueueIn.dequeue(true)
return@Callable 0
}))
var ret = 0
if (isBegin || futureQueue.size >= cacheCount) {
isBegin = true
try {
val get = futureQueue.poll()?.get() ?: -1
if (get == 0) {
val dequeue = glTextureBufferQueueOut.dequeue() ?: return -1
glFrameBuffer.setSize(dequeue.width, dequeue.height)
ret = glFrameBuffer.process(dequeue.textureId, dequeue.textureType)
}
}catch (e: Exception){
LogUtils.e(TAG, "process end with exception: $e")
}
}
return ret
}
fun reset(){
if(frameIndex == 0){
return
}
isBegin = false
frameIndex = 0
var future = futureQueue.poll()
while (future != null) {
future.cancel(true)
future = futureQueue.poll()
}
glTextureBufferQueueIn.reset()
// glFrameBuffer.release()
executeSync {
glTextureBufferQueueOut.reset()
}
}
fun size() = futureQueue.size
fun release() {
isReleased = true
filter = null
isBegin = false
frameIndex = 0
var future = futureQueue.poll()
while (future != null) {
future.cancel(true)
future = futureQueue.poll()
}
glTextureBufferQueueIn.release()
glFrameBuffer.release()
executeSync {
glTextureBufferQueueOut.release()
if (eglContextBase != null) {
eglContextHelper.release()
eglContextBase = null
}
}
workerThread.shutdown()
}
fun executeSync(run: () -> Unit) {
val latch = CountDownLatch(1)
workerThread.execute {
run.invoke()
latch.countDown()
}
latch.await()
}
}